diff --git a/CMakeLists.txt b/CMakeLists.txt
index 5b3a91b..8a67f7f 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -26,11 +26,16 @@ add_message_files(FILES
Appearance.msg
Audio.msg
BodyArray.msg
+ Body.msg
+ BoundingBox2D.msg
Expressions.msg
+ FaceArray.msg
+ Face.msg
+ FacePoints.msg
JointOrientationAndType.msg
JointPositionAndState.msg
Lean.msg
- Body.msg
+ Quaternion.msg
)
generate_messages(DEPENDENCIES
@@ -70,18 +75,22 @@ add_executable(startBody "${PROJECT_SOURCE_DIR}/src/startBody.cpp")
add_executable(startDepth "${PROJECT_SOURCE_DIR}/src/startDepth.cpp")
add_executable(startIR "${PROJECT_SOURCE_DIR}/src/startIR.cpp")
add_executable(startRGB "${PROJECT_SOURCE_DIR}/src/startRGB.cpp")
+add_executable(startFace "${PROJECT_SOURCE_DIR}/src/startFace.cpp")
add_dependencies(startAudio ${k2_client_EXPORTED_TARGETS})
add_dependencies(startBody ${k2_client_EXPORTED_TARGETS})
add_dependencies(startDepth ${k2_client_EXPORTED_TARGETS})
add_dependencies(startIR ${k2_client_EXPORTED_TARGETS})
add_dependencies(startRGB ${k2_client_EXPORTED_TARGETS})
+add_dependencies(startFace ${k2_client_EXPORTED_TARGETS})
target_link_libraries(startAudio ${JSONCPP_LIBRARY} ${catkin_LIBRARIES})
target_link_libraries(startBody ${JSONCPP_LIBRARY} ${catkin_LIBRARIES})
target_link_libraries(startDepth ${JSONCPP_LIBRARY} ${catkin_LIBRARIES})
target_link_libraries(startIR ${JSONCPP_LIBRARY} ${catkin_LIBRARIES})
target_link_libraries(startRGB ${JSONCPP_LIBRARY} ${catkin_LIBRARIES})
+target_link_libraries(startFace ${JSONCPP_LIBRARY} ${catkin_LIBRARIES})
+
diff --git a/launch/k2_client.launch b/launch/k2_client.launch
index 4b23c9d..2590f21 100644
--- a/launch/k2_client.launch
+++ b/launch/k2_client.launch
@@ -1,14 +1,16 @@
-
+
+
-
-
-
-
-
-
+
+
+
+
+
+
+
diff --git a/msg/Body.msg b/msg/Body.msg
index ad84f09..835f6b7 100644
--- a/msg/Body.msg
+++ b/msg/Body.msg
@@ -1,4 +1,8 @@
Header header
+int32 fromX
+int32 toX
+int32 fromY
+int32 toY
int32 leanTrackingState
Lean lean
bool isRestricted
@@ -14,4 +18,4 @@ Appearance appearance
Activities activities
Expressions expressions
JointOrientationAndType[] jointOrientations
-JointPositionAndState[] jointPositions
\ No newline at end of file
+JointPositionAndState[] jointPositions
diff --git a/msg/BoundingBox2D.msg b/msg/BoundingBox2D.msg
new file mode 100644
index 0000000..006ac1d
--- /dev/null
+++ b/msg/BoundingBox2D.msg
@@ -0,0 +1,4 @@
+int32 left
+int32 top
+int32 right
+int32 bottom
\ No newline at end of file
diff --git a/msg/Face.msg b/msg/Face.msg
new file mode 100644
index 0000000..92c4d5c
--- /dev/null
+++ b/msg/Face.msg
@@ -0,0 +1,16 @@
+Header header
+
+Appearance appearance
+Activities activities
+Expressions expressions
+
+FacePoints facePointsInInfraredSpace
+FacePoints facePointsInColorSpace
+
+BoundingBox2D faceBoundingBoxInInfraredSpace
+BoundingBox2D faceBoundingBoxInColorSpace
+
+Quaternion faceRotationQuaternion
+
+int64 trackingId
+int32 faceFrameFeatures
diff --git a/msg/FaceArray.msg b/msg/FaceArray.msg
new file mode 100644
index 0000000..459c992
--- /dev/null
+++ b/msg/FaceArray.msg
@@ -0,0 +1 @@
+Face[] faces
\ No newline at end of file
diff --git a/msg/FacePoints.msg b/msg/FacePoints.msg
new file mode 100644
index 0000000..81a29a7
--- /dev/null
+++ b/msg/FacePoints.msg
@@ -0,0 +1,10 @@
+float32 eyeLeftX
+float32 eyeLeftY
+float32 eyeRightX
+float32 eyeRightY
+float32 noseX
+float32 noseY
+float32 mouthCornerLeftX
+float32 mouthCornerLeftY
+float32 mouthCornerRightX
+float32 mouthCornerRightY
diff --git a/msg/Quaternion.msg b/msg/Quaternion.msg
new file mode 100644
index 0000000..f908ddc
--- /dev/null
+++ b/msg/Quaternion.msg
@@ -0,0 +1,4 @@
+float32 X
+float32 Y
+float32 Z
+float32 W
\ No newline at end of file
diff --git a/src/startAudio.cpp b/src/startAudio.cpp
index 86df3b4..4c2a92f 100644
--- a/src/startAudio.cpp
+++ b/src/startAudio.cpp
@@ -26,7 +26,10 @@ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
***************************************************************************************/
#include "k2_client/k2_client.h"
-#include "k2_client/Audio.h"
+#include
+#include
+#include
+
std::string topicName = "audio";
int twiceStreamSize = 8200;
@@ -38,33 +41,15 @@ int main(int argC,char **argV)
ros::NodeHandle n;
std::string serverAddress;
n.getParam("/serverNameOrIP",serverAddress);
- Socket mySocket(serverAddress.c_str(),"9004",twiceStreamSize);
- ros::Publisher audioPub = n.advertise(topicName,1);
+ Socket mySocket(serverAddress.c_str(),"9009",twiceStreamSize);
+ ros::Publisher audioPub = n.advertise(topicName,1);
while(ros::ok())
{
mySocket.readData();
- std::string jsonString;
- for(int i=0;i
+#include
std::string topicName = "bodyArray";
-size_t streamSize = 56008;
-size_t readSkipSize = 56000;
-size_t stringSize = 28000;
+size_t streamSize = 60000;
+size_t readSkipSize = 60000;
+size_t stringSize = 30000;
-int main(int argC,char **argV)
+int main(int argc,char **argv)
{
- ros::init(argC,argV,"startBody");
+ ros::init(argc,argv,"startBody");
ros::NodeHandle n;
std::string serverAddress;
n.getParam("/serverNameOrIP",serverAddress);
Socket mySocket(serverAddress.c_str(),"9003",streamSize);
- iconv_t charConverter = iconv_open("UTF-8","UTF-16");
ros::Publisher bodyPub = n.advertise(topicName,1);
- char jsonCharArray[readSkipSize];
-
while(ros::ok())
{
mySocket.readData();
- char *jsonCharArrayPtr;
- char *socketCharArrayPtr;
- jsonCharArrayPtr = jsonCharArray;
- socketCharArrayPtr = mySocket.mBuffer;
- iconv(charConverter,&socketCharArrayPtr,&readSkipSize,&jsonCharArrayPtr,&stringSize);
- double utcTime;
- memcpy(&utcTime,&mySocket.mBuffer[readSkipSize],sizeof(double));
- std::string jsonString(jsonCharArray);
+ std::string jsonString(mySocket.mBuffer);
Json::Value jsonObject;
Json::Reader jsonReader;
bool parsingSuccessful = jsonReader.parse(jsonString,jsonObject,false);
@@ -70,27 +61,34 @@ int main(int argC,char **argV)
for(int i=0;i<6;i++)
{
k2_client::Body body;
- body.header.stamp = ros::Time(utcTime);
+ body.header.stamp = ros::Time::now();
body.header.frame_id = ros::this_node::getNamespace().substr(1,std::string::npos) + "/depthFrame";
- body.leanTrackingState = jsonObject[i]["LeanTrackingState"].asInt();
- body.lean.leanX = jsonObject[i]["Lean"]["X"].asDouble();
- body.lean.leanY = jsonObject[i]["Lean"]["Y"].asDouble();
- body.isTracked = jsonObject[i]["IsTracked"].asBool();
- body.trackingId = jsonObject[i]["TrackingId"].asUInt64();
- body.clippedEdges = jsonObject[i]["ClippedEdges"].asInt();
- body.engaged = jsonObject[i]["Engaged"].asBool();
- body.handRightConfidence = jsonObject[i]["HandRightConfidence"].asInt();
- body.handRightState = jsonObject[i]["HandRightState"].asInt();
- body.handLeftConfidence = jsonObject[i]["HandLeftConfidence"].asInt();
- body.handLeftState = jsonObject[i]["HandLeftState"].asInt();
- body.appearance.wearingGlasses = jsonObject[i]["Appearance"]["WearingGlasses"].asBool();
- body.activities.eyeLeftClosed = jsonObject[i]["Activities"]["EyeLeftClosed"].asBool();
- body.activities.eyeRightClosed = jsonObject[i]["Activities"]["EyeRightClosed"].asBool();
- body.activities.mouthOpen = jsonObject[i]["Activities"]["MouthOpen"].asBool();
- body.activities.mouthMoved = jsonObject[i]["Activities"]["MouthMoved"].asBool();
- body.activities.lookingAway = jsonObject[i]["Activities"]["LookingAway"].asBool();
- body.expressions.neutral = jsonObject[i]["Expressions"]["Neutral"].asBool();
- body.expressions.neutral = jsonObject[i]["Expressions"]["Happy"].asBool();
+ body.fromX = jsonObject[i]["FromX"].asInt();
+ body.fromY = jsonObject[i]["FromY"].asInt();
+ body.toX = jsonObject[i]["ToX"].asInt();
+ body.toY = jsonObject[i]["ToY"].asInt();
+ body.leanTrackingState = jsonObject[i]["Body"]["LeanTrackingState"].asInt();
+ body.lean.leanX = jsonObject[i]["Body"]["Lean"]["X"].asDouble();
+ body.lean.leanY = jsonObject[i]["Body"]["Lean"]["Y"].asDouble();
+ body.isTracked = jsonObject[i]["Body"]["IsTracked"].asBool();
+ if (!body.isTracked) {
+ continue;
+ }
+ body.trackingId = jsonObject[i]["Body"]["TrackingId"].asUInt64();
+ body.clippedEdges = jsonObject[i]["Body"]["ClippedEdges"].asInt();
+ body.engaged = jsonObject[i]["Body"]["Engaged"].asBool();
+ body.handRightConfidence = jsonObject[i]["Body"]["HandRightConfidence"].asInt();
+ body.handRightState = jsonObject[i]["Body"]["HandRightState"].asInt();
+ body.handLeftConfidence = jsonObject[i]["Body"]["HandLeftConfidence"].asInt();
+ body.handLeftState = jsonObject[i]["Body"]["HandLeftState"].asInt();
+ body.appearance.wearingGlasses = jsonObject[i]["Body"]["Appearance"]["WearingGlasses"].asBool();
+ body.activities.eyeLeftClosed = jsonObject[i]["Body"]["Activities"]["EyeLeftClosed"].asBool();
+ body.activities.eyeRightClosed = jsonObject[i]["Body"]["Activities"]["EyeRightClosed"].asBool();
+ body.activities.mouthOpen = jsonObject[i]["Body"]["Activities"]["MouthOpen"].asBool();
+ body.activities.mouthMoved = jsonObject[i]["Body"]["Activities"]["MouthMoved"].asBool();
+ body.activities.lookingAway = jsonObject[i]["Body"]["Activities"]["LookingAway"].asBool();
+ body.expressions.neutral = jsonObject[i]["Body"]["Expressions"]["Neutral"].asBool();
+ body.expressions.neutral = jsonObject[i]["Body"]["Expressions"]["Happy"].asBool();
for(int j=0;j<25;j++)
{
k2_client::JointOrientationAndType JOAT;
@@ -125,17 +123,17 @@ int main(int argC,char **argV)
case 24: fieldName = "ThumbRight";break;
}
- JOAT.orientation.x = jsonObject[i]["JointOrientations"][fieldName]["Orientation"]["X"].asDouble();
- JOAT.orientation.y = jsonObject[i]["JointOrientations"][fieldName]["Orientation"]["Y"].asDouble();
- JOAT.orientation.z = jsonObject[i]["JointOrientations"][fieldName]["Orientation"]["Z"].asDouble();
- JOAT.orientation.w = jsonObject[i]["JointOrientations"][fieldName]["Orientation"]["W"].asDouble();
- JOAT.jointType = jsonObject[i]["JointOrientations"][fieldName]["JointType"].asInt();
+ JOAT.orientation.x = jsonObject[i]["Body"]["JointOrientations"][fieldName]["Orientation"]["X"].asDouble();
+ JOAT.orientation.y = jsonObject[i]["Body"]["JointOrientations"][fieldName]["Orientation"]["Y"].asDouble();
+ JOAT.orientation.z = jsonObject[i]["Body"]["JointOrientations"][fieldName]["Orientation"]["Z"].asDouble();
+ JOAT.orientation.w = jsonObject[i]["Body"]["JointOrientations"][fieldName]["Orientation"]["W"].asDouble();
+ JOAT.jointType = jsonObject[i]["Body"]["JointOrientations"][fieldName]["JointType"].asInt();
- JPAS.trackingState = jsonObject[i]["Joints"][fieldName]["TrackingState"].asBool();
- JPAS.position.x = jsonObject[i]["Joints"][fieldName]["Position"]["X"].asDouble();
- JPAS.position.y = jsonObject[i]["Joints"][fieldName]["Position"]["Y"].asDouble();
- JPAS.position.z = jsonObject[i]["Joints"][fieldName]["Position"]["Z"].asDouble();
- JPAS.jointType = jsonObject[i]["Joints"][fieldName]["JointType"].asInt();
+ JPAS.trackingState = jsonObject[i]["Body"]["Joints"][fieldName]["TrackingState"].asBool();
+ JPAS.position.x = jsonObject[i]["Body"]["Joints"][fieldName]["Position"]["X"].asDouble();
+ JPAS.position.y = jsonObject[i]["Body"]["Joints"][fieldName]["Position"]["Y"].asDouble();
+ JPAS.position.z = jsonObject[i]["Body"]["Joints"][fieldName]["Position"]["Z"].asDouble();
+ JPAS.jointType = jsonObject[i]["Body"]["Joints"][fieldName]["JointType"].asInt();
body.jointOrientations.push_back(JOAT);
body.jointPositions.push_back(JPAS);
@@ -145,10 +143,11 @@ int main(int argC,char **argV)
}
catch (...)
{
- ROS_ERROR("An exception occured");
+ ROS_ERROR("startBody - An exception occured");
continue;
}
- bodyPub.publish(bodyArray);
+ if (bodyArray.bodies.size() > 0)
+ bodyPub.publish(bodyArray);
}
return 0;
}
diff --git a/src/startDepth.cpp b/src/startDepth.cpp
index 2248688..5990108 100644
--- a/src/startDepth.cpp
+++ b/src/startDepth.cpp
@@ -29,46 +29,42 @@ WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// this alternate resolution for an aligned depth image
//int imageSize = 639392;
-int imageSize = 434176;
-int streamSize = imageSize + sizeof(double);
+int imageSize = 1920 * 1080 * 6;
+int streamSize = imageSize;
std::string cameraName = "depth";
std::string imageTopicSubName = "image_depth";
std::string cameraFrame = "";
int main(int argC,char **argV)
{
- ros::init(argC,argV,"startDepth");
- ros::NodeHandle n(cameraName);
- image_transport::ImageTransport imT(n);
- std::string serverAddress;
- n.getParam("/serverNameOrIP",serverAddress);
+ ros::init(argC,argV,"startDepth");
+ ros::NodeHandle n(cameraName);
+ image_transport::ImageTransport imT(n);
+ std::string serverAddress;
+ n.getParam("/serverNameOrIP",serverAddress);
n.getParam(ros::this_node::getNamespace().substr(1,std::string::npos) +
"/depth_frame", cameraFrame);
- Socket mySocket(serverAddress.c_str(),"9001",streamSize);
- image_transport::CameraPublisher cameraPublisher = imT.advertiseCamera(
- imageTopicSubName, 1);
- camera_info_manager::CameraInfoManager camInfoMgr(n,cameraName);
- camInfoMgr.loadCameraInfo("");
- cv::Mat frame;
- cv_bridge::CvImage cvImage;
- sensor_msgs::Image rosImage;
- while(ros::ok())
- {
- mySocket.readData();
- // this alternate resolution was for an aligned depth image
- //frame = cv::Mat(cv::Size(754,424),CV_16UC1,mySocket.mBuffer);
- frame = cv::Mat(cv::Size(512,424), CV_16UC1,mySocket.mBuffer);
- cv::flip(frame,frame,1);
- double utcTime;
- memcpy(&utcTime,&mySocket.mBuffer[imageSize],sizeof(double));
+ Socket mySocket(serverAddress.c_str(), "18000", streamSize);
+ image_transport::Publisher imagePublisher = imT.advertise(imageTopicSubName, 1);
+ cv::Mat frame;
+ cv_bridge::CvImage cvImage;
+ sensor_msgs::Image rosImage;
+ while(ros::ok())
+ {
+ mySocket.readData();
+// for(int i=0; i < 6; i++) {
+// printf("%02x", (unsigned char)(mySocket.mBuffer[(1920 * 1079 + 1000) * 6 + i]));
+// }
+// printf("\n");
+ frame = cv::Mat(cv::Size(1920,1080), CV_16SC3,mySocket.mBuffer);
+ cv::flip(frame,frame,0);
+ cv::Vec3s test_vec = frame.at(500, 500);
cvImage.header.frame_id = cameraFrame.c_str();
- cvImage.encoding = "16UC1";
+ cvImage.encoding = "16SC3";
cvImage.image = frame;
cvImage.toImageMsg(rosImage);
- sensor_msgs::CameraInfo camInfo = camInfoMgr.getCameraInfo();
- camInfo.header.frame_id = cvImage.header.frame_id;
- cameraPublisher.publish(rosImage, camInfo, ros::Time(utcTime));
+ imagePublisher.publish(rosImage);
ros::spinOnce();
- }
- return 0;
+ }
+ return 0;
}
diff --git a/src/startFace.cpp b/src/startFace.cpp
new file mode 100644
index 0000000..d9da846
--- /dev/null
+++ b/src/startFace.cpp
@@ -0,0 +1,101 @@
+#include "k2_client/k2_client.h"
+#include "k2_client/FaceArray.h"
+#include
+#include
+
+std::string topicName = "faceArray";
+size_t streamSize = 60000;
+size_t readSkipSize = 60000;
+size_t stringSize = 30000;
+
+int main(int argC,char **argV)
+{
+ ros::init(argC,argV,"startFace");
+ ros::NodeHandle n;
+ std::string serverAddress;
+ n.getParam("/serverNameOrIP",serverAddress);
+ Socket mySocket(serverAddress.c_str(),"9006",streamSize);
+ ros::Publisher facePub = n.advertise(topicName,1);
+ while(ros::ok())
+ {
+ mySocket.readData();
+ std::string jsonString(mySocket.mBuffer);
+ Json::Value jsonObject;
+ Json::Reader jsonReader;
+ bool parsingSuccessful = jsonReader.parse(jsonString,jsonObject,false);
+ if(!parsingSuccessful)
+ {
+ ROS_ERROR("Failure to parse");
+ continue;
+ }
+ k2_client::FaceArray faceArray;
+ try
+ {
+ for(int i=0;i<6;i++)
+ {
+ k2_client::Face face;
+ face.header.stamp = ros::Time::now();
+ face.header.frame_id = ros::this_node::getNamespace().substr(1,std::string::npos) + "/depthFrame";
+
+ face.appearance.wearingGlasses = jsonObject[i]["FaceProperties"]["WearingGlasses"].asBool();
+ face.activities.eyeLeftClosed = jsonObject[i]["FaceProperties"]["EyeLeftClosed"].asBool();
+ face.activities.eyeRightClosed = jsonObject[i]["FaceProperties"]["EyeRightClosed"].asBool();
+ face.activities.mouthOpen = jsonObject[i]["FaceProperties"]["MouthOpen"].asBool();
+ face.activities.mouthMoved = jsonObject[i]["FaceProperties"]["MouthMoved"].asBool();
+ face.activities.lookingAway = jsonObject[i]["FaceProperties"]["LookingAway"].asBool();
+ face.expressions.neutral = jsonObject[i]["FaceProperties"]["Neutral"].asBool();
+ face.expressions.neutral = jsonObject[i]["FaceProperties"]["Happy"].asBool();
+
+ face.facePointsInInfraredSpace.eyeLeftX = jsonObject[i]["FacePointsInInfraredSpace"]["EyeLeft"]["X"].asDouble();
+ face.facePointsInInfraredSpace.eyeLeftY = jsonObject[i]["FacePointsInInfraredSpace"]["EyeLeft"]["Y"].asDouble();
+ face.facePointsInInfraredSpace.eyeRightX = jsonObject[i]["FacePointsInInfraredSpace"]["EyeRight"]["X"].asDouble();
+ face.facePointsInInfraredSpace.eyeRightY = jsonObject[i]["FacePointsInInfraredSpace"]["EyeRight"]["Y"].asDouble();
+ face.facePointsInInfraredSpace.noseX = jsonObject[i]["FacePointsInInfraredSpace"]["EyeRight"]["X"].asDouble();
+ face.facePointsInInfraredSpace.noseY = jsonObject[i]["FacePointsInInfraredSpace"]["EyeRight"]["Y"].asDouble();
+ face.facePointsInInfraredSpace.mouthCornerLeftX = jsonObject[i]["FacePointsInInfraredSpace"]["MouthCornerLeft"]["X"].asDouble();
+ face.facePointsInInfraredSpace.mouthCornerLeftY = jsonObject[i]["FacePointsInInfraredSpace"]["MouthCornerLeft"]["Y"].asDouble();
+ face.facePointsInInfraredSpace.mouthCornerRightX = jsonObject[i]["FacePointsInInfraredSpace"]["MouthCornerRight"]["X"].asDouble();
+ face.facePointsInInfraredSpace.mouthCornerRightY = jsonObject[i]["FacePointsInInfraredSpace"]["MouthCornerRight"]["Y"].asDouble();
+
+ face.facePointsInColorSpace.eyeLeftX = jsonObject[i]["FacePointsInColorSpace"]["EyeLeft"]["X"].asDouble();
+ face.facePointsInColorSpace.eyeLeftY = jsonObject[i]["FacePointsInColorSpace"]["EyeLeft"]["Y"].asDouble();
+ face.facePointsInColorSpace.eyeRightX = jsonObject[i]["FacePointsInColorSpace"]["EyeRight"]["X"].asDouble();
+ face.facePointsInColorSpace.eyeRightY = jsonObject[i]["FacePointsInColorSpace"]["EyeRight"]["Y"].asDouble();
+ face.facePointsInColorSpace.noseX = jsonObject[i]["FacePointsInColorSpace"]["EyeRight"]["X"].asDouble();
+ face.facePointsInColorSpace.noseY = jsonObject[i]["FacePointsInColorSpace"]["EyeRight"]["Y"].asDouble();
+ face.facePointsInColorSpace.mouthCornerLeftX = jsonObject[i]["FacePointsInColorSpace"]["MouthCornerLeft"]["X"].asDouble();
+ face.facePointsInColorSpace.mouthCornerLeftY = jsonObject[i]["FacePointsInColorSpace"]["MouthCornerLeft"]["Y"].asDouble();
+ face.facePointsInColorSpace.mouthCornerRightX = jsonObject[i]["FacePointsInColorSpace"]["MouthCornerRight"]["X"].asDouble();
+ face.facePointsInColorSpace.mouthCornerRightY = jsonObject[i]["FacePointsInColorSpace"]["MouthCornerRight"]["Y"].asDouble();
+
+ face.faceBoundingBoxInInfraredSpace.left = jsonObject[i]["FaceBoundingBoxInInfraredSpace"]["Left"].asInt();
+ face.faceBoundingBoxInInfraredSpace.top = jsonObject[i]["FaceBoundingBoxInInfraredSpace"]["Top"].asInt();
+ face.faceBoundingBoxInInfraredSpace.right = jsonObject[i]["FaceBoundingBoxInInfraredSpace"]["Right"].asInt();
+ face.faceBoundingBoxInInfraredSpace.bottom = jsonObject[i]["FaceBoundingBoxInInfraredSpace"]["Bottom"].asInt();
+
+ face.faceBoundingBoxInColorSpace.left = jsonObject[i]["FaceBoundingBoxInColorSpace"]["Left"].asInt();
+ face.faceBoundingBoxInColorSpace.top = jsonObject[i]["FaceBoundingBoxInColorSpace"]["Top"].asInt();
+ face.faceBoundingBoxInColorSpace.right = jsonObject[i]["FaceBoundingBoxInColorSpace"]["Right"].asInt();
+ face.faceBoundingBoxInColorSpace.bottom = jsonObject[i]["FaceBoundingBoxInColorSpace"]["Bottom"].asInt();
+
+ face.faceRotationQuaternion.X = jsonObject[i]["FaceRotationQuaternion"]["X"].asDouble();
+ face.faceRotationQuaternion.Y = jsonObject[i]["FaceRotationQuaternion"]["Y"].asDouble();
+ face.faceRotationQuaternion.Z = jsonObject[i]["FaceRotationQuaternion"]["Z"].asDouble();
+ face.faceRotationQuaternion.W = jsonObject[i]["FaceRotationQuaternion"]["W"].asDouble();
+
+ //face.trackingId = jsonObject[i]["TrackingId"].asInt();
+ face.faceFrameFeatures = jsonObject[i]["FaceFrameFeatures"].asInt();
+
+ faceArray.faces.push_back(face);
+ }
+ }
+ catch (...)
+ {
+ ROS_ERROR("An exception occured");
+ continue;
+ }
+ if (faceArray.faces.size() > 0)
+ facePub.publish(faceArray);
+ }
+ return 0;
+}
diff --git a/src/startRGB.cpp b/src/startRGB.cpp
index a66bf3e..a9c3e39 100644
--- a/src/startRGB.cpp
+++ b/src/startRGB.cpp
@@ -27,49 +27,38 @@ WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
***************************************************************************************/
#include "k2_client/k2_client.h"
-int imageSize = 6220800;
-int streamSize = imageSize + sizeof(double);
+int imageSize = 1920 * 1080 * 4;
+int streamSize = imageSize;
std::string cameraName = "rgb";
std::string imageTopicSubName = "image_color";
std::string cameraFrame = "";
int main(int argC,char **argV)
{
- ros::init(argC,argV,"startRGB");
- ros::NodeHandle n(cameraName);
- image_transport::ImageTransport imT(n);
- std::string serverAddress;
- n.getParam("/serverNameOrIP",serverAddress);
+ ros::init(argC,argV,"startRGB");
+ ros::NodeHandle n(cameraName);
+ image_transport::ImageTransport imT(n);
+ std::string serverAddress;
+ n.getParam("/serverNameOrIP",serverAddress);
n.getParam(ros::this_node::getNamespace().substr(1,std::string::npos) +
"/rgb_frame", cameraFrame);
- Socket mySocket(serverAddress.c_str(),"9000",streamSize);
- image_transport::CameraPublisher cameraPublisher = imT.advertiseCamera(
- imageTopicSubName, 1);
- camera_info_manager::CameraInfoManager camInfoMgr(n,cameraName);
- camInfoMgr.loadCameraInfo("");
- cv::Mat frame;
- cv_bridge::CvImage cvImage;
- sensor_msgs::Image rosImage;
- while(ros::ok())
- {
- printf("Got a frame.\n");
-
- mySocket.readData();
- printf("Creating mat.\n");
- frame = cv::Mat(cv::Size(1920,1080), CV_8UC3, mySocket.mBuffer);
- cv::flip(frame,frame,1);
- printf("Getting time.\n");
- double utcTime;
- memcpy(&utcTime,&mySocket.mBuffer[imageSize],sizeof(double));
+ Socket mySocket(serverAddress.c_str(),"9000",streamSize);
+ image_transport::Publisher imagePublisher = imT.advertise(imageTopicSubName, 1);
+ cv::Mat frame;
+ cv_bridge::CvImage cvImage;
+ sensor_msgs::Image rosImage;
+ while(ros::ok())
+ {
+ mySocket.readData();
+ frame = cv::Mat(cv::Size(1920, 1080), CV_8UC4, mySocket.mBuffer);
+ cv::cvtColor(frame, frame, CV_BGRA2BGR);
+ cv::flip(frame,frame,0);
cvImage.header.frame_id = cameraFrame.c_str();
cvImage.encoding = "bgr8";
cvImage.image = frame;
cvImage.toImageMsg(rosImage);
- sensor_msgs::CameraInfo camInfo = camInfoMgr.getCameraInfo();
- camInfo.header.frame_id = cvImage.header.frame_id;
- printf("Updating.\n");
- cameraPublisher.publish(rosImage, camInfo, ros::Time(utcTime));
+ imagePublisher.publish(rosImage);
ros::spinOnce();
- }
- return 0;
+ }
+ return 0;
}