diff --git a/cpp/client/src/video-source.cpp b/cpp/client/src/video-source.cpp index cf702b4f..fd7db1fc 100644 --- a/cpp/client/src/video-source.cpp +++ b/cpp/client/src/video-source.cpp @@ -16,7 +16,7 @@ VideoSource::VideoSource(io_service& io_service, const std::string& sourcePath, const boost::shared_ptr& frame): io_(io_service), frame_(frame), isRunning_(false), framesSourced_(0), -nRewinds_(0) +nRewinds_(0), userDataSize_(0), userData_(nullptr) { assert(frame.get()); @@ -107,7 +107,8 @@ void VideoSource::deliverFrame(const RawFrame& frame) { for (auto capturer:capturers_) capturer->incomingArgbFrame(frame.getWidth(), frame.getHeight(), - frame.getBuffer().get(), frame.getFrameSizeInBytes()); + frame.getBuffer().get(), frame.getFrameSizeInBytes(), + userDataSize_, userData_); // LogTrace("") << "delivered frame to " << capturers_.size() << " capturers" << endl; } diff --git a/cpp/client/src/video-source.hpp b/cpp/client/src/video-source.hpp index 016cf349..ba89714b 100644 --- a/cpp/client/src/video-source.hpp +++ b/cpp/client/src/video-source.hpp @@ -44,7 +44,8 @@ class VideoSource { private: bool isRunning_; - unsigned int framesSourced_, nRewinds_; + unsigned char* userData_; + unsigned int framesSourced_, nRewinds_, userDataSize_; boost::shared_ptr source_; boost::shared_ptr frame_; std::vector capturers_; @@ -55,4 +56,4 @@ class VideoSource { void deliverFrame(const RawFrame& frame); }; -#endif \ No newline at end of file +#endif diff --git a/cpp/include/interfaces.hpp b/cpp/include/interfaces.hpp index 15bfbf8f..8c28e20b 100644 --- a/cpp/include/interfaces.hpp +++ b/cpp/include/interfaces.hpp @@ -77,7 +77,9 @@ namespace ndnrtc virtual int incomingArgbFrame(const unsigned int width, const unsigned int height, unsigned char* argbFrameData, - unsigned int frameSize) = 0; + unsigned int frameSize, + unsigned int userDataSize = 0, + unsigned char* userData = nullptr) = 0; /** * Alternative method for delivering YUV frames (I420 or y420 or @@ -91,7 +93,9 @@ namespace ndnrtc const unsigned int strideV, const unsigned char* yBuffer, const unsigned char* uBuffer, - const unsigned char* vBuffer) = 0; + const unsigned char* vBuffer, + unsigned int userDataSize = 0, + unsigned char* userData = nullptr) = 0; }; } diff --git a/cpp/include/local-stream.hpp b/cpp/include/local-stream.hpp index b7a1b757..b9448065 100644 --- a/cpp/include/local-stream.hpp +++ b/cpp/include/local-stream.hpp @@ -175,7 +175,9 @@ namespace ndnrtc { int incomingArgbFrame(const unsigned int width, const unsigned int height, unsigned char* argbFrameData, - unsigned int frameSize); + unsigned int frameSize, + unsigned int userDataSize = 0, + unsigned char* userData = nullptr); /** * Encode and publish I420 frame data. @@ -191,7 +193,9 @@ namespace ndnrtc { const unsigned int strideV, const unsigned char* yBuffer, const unsigned char* uBuffer, - const unsigned char* vBuffer); + const unsigned char* vBuffer, + unsigned int userDataSize = 0, + unsigned char* userData = nullptr); /** * Returns full stream prefix used for publishing data @@ -226,4 +230,4 @@ namespace ndnrtc { }; } -#endif \ No newline at end of file +#endif diff --git a/cpp/src/frame-data.hpp b/cpp/src/frame-data.hpp index 95842bf3..e415ee6e 100755 --- a/cpp/src/frame-data.hpp +++ b/cpp/src/frame-data.hpp @@ -610,7 +610,7 @@ namespace ndnrtc { ENABLE_IF(T,Mutable) VideoFramePacketT(const webrtc::EncodedImage& frame): HeaderPacketT(frame._length, frame._buffer), - isSyncListSet_(false) + isSyncListSet_(false), isUserDataSet_(false) { assert(frame._encodedWidth); assert(frame._encodedHeight); @@ -652,13 +652,40 @@ namespace ndnrtc { typedef typename std::vector::Blob>::const_iterator BlobIterator; std::map syncList; - for (BlobIterator blob = this->blobs_.begin()+1; - blob+1 < this->blobs_.end(); blob+=2) - syncList[std::string((const char*)blob->data(), blob->size())] = *(PacketNumber*)(blob+1)->data(); + for (BlobIterator blob = this->blobs_.begin()+1;blob+1 < this->blobs_.end(); blob+=2) + { + std::string mapKey((const char*)blob->data(), blob->size()); + std::map::iterator it = syncList.find(mapKey); + if(it == syncList.end()) + syncList[mapKey] = *(PacketNumber*)(blob+1)->data(); + } return boost::move(syncList); } + const std::map getUserData() const + { + typedef typename std::vector::Blob>::const_iterator BlobIterator; + std::map userData; + std::vector keyVector; + + for (BlobIterator blob = this->blobs_.begin()+1;blob+1 < this->blobs_.end(); blob+=2) + { + std::string mapKey((const char*)blob->data(), blob->size()); + std::vector::iterator it = std::find(keyVector.begin(), keyVector.end(), mapKey); + + if(it == keyVector.end()) + keyVector.push_back(mapKey); + else + { + std::string ud((const char*)(blob+1)->data()); + userData[mapKey] = ud; + } + } + + return boost::move(userData); + } + ENABLE_IF(T,Mutable) boost::shared_ptr getParityData(size_t segmentLength, double ratio) @@ -702,6 +729,28 @@ namespace ndnrtc { isSyncListSet_ = true; } + /** + * Provide user data along with video frames for client code. + * e.g. unsigned char userData[] = "data\0", and its length is defined as 4. + * NOTE: User data should be end up with '\0'. + */ + ENABLE_IF(T,Mutable) + void setUserData(const std::map>& userData) + { + if (this->isHeaderSet()) throw std::runtime_error("Can't add more data to this packet" + " as header has been set already"); + if (isUserDataSet_) throw std::runtime_error("User Data has been already set"); + + for (auto it:userData) + { + std::string ud(it.second.second, it.second.second+it.second.first+1); + this->addBlob(it.first.size(), (uint8_t*)it.first.c_str()); + this->addBlob(ud.length(), (uint8_t*)ud.c_str()); + } + + isUserDataSet_ = true; + } + typedef std::vector> ImmutableVideoSegmentsVector; typedef std::vector> ImmutableRecoverySegmentsVector; @@ -720,7 +769,7 @@ namespace ndnrtc { } __attribute__((packed)) Header; webrtc::EncodedImage frame_; - bool isSyncListSet_; + bool isSyncListSet_, isUserDataSet_; }; typedef VideoFramePacketT<> VideoFramePacket; diff --git a/cpp/src/local-stream.cpp b/cpp/src/local-stream.cpp index 9ae63ba7..46cc4b4e 100644 --- a/cpp/src/local-stream.cpp +++ b/cpp/src/local-stream.cpp @@ -135,9 +135,12 @@ void LocalVideoStream::removeThread(const string& threadName) int LocalVideoStream::incomingArgbFrame(const unsigned int width, const unsigned int height, unsigned char* argbFrameData, - unsigned int frameSize) + unsigned int frameSize, + unsigned int userDataSize, + unsigned char* userData) { - return pimpl_->incomingFrame(ArgbRawFrameWrapper({width, height, argbFrameData, frameSize})); + return pimpl_->incomingFrame(ArgbRawFrameWrapper({width, height, argbFrameData, frameSize}), + userDataSize, userData); } int LocalVideoStream::incomingI420Frame(const unsigned int width, @@ -147,10 +150,12 @@ int LocalVideoStream::incomingI420Frame(const unsigned int width, const unsigned int strideV, const unsigned char* yBuffer, const unsigned char* uBuffer, - const unsigned char* vBuffer) + const unsigned char* vBuffer, + unsigned int userDataSize, + unsigned char* userData) { return pimpl_->incomingFrame(I420RawFrameWrapper({width, height, strideY, strideU, - strideV, yBuffer, uBuffer, vBuffer})); + strideV, yBuffer, uBuffer, vBuffer}), userDataSize, userData); } string diff --git a/cpp/src/video-stream-impl.cpp b/cpp/src/video-stream-impl.cpp index 5c8b122b..0d21a0a7 100644 --- a/cpp/src/video-stream-impl.cpp +++ b/cpp/src/video-stream-impl.cpp @@ -84,19 +84,19 @@ vector VideoStreamImpl::getThreads() const } int -VideoStreamImpl::incomingFrame(const ArgbRawFrameWrapper& w) +VideoStreamImpl::incomingFrame(const ArgbRawFrameWrapper& w, unsigned int userDataSize, unsigned char* userData) { LogDebugC << "⤹ incoming ARGB frame " << w.width_ << "x" << w.height_ << std::endl; - if (feedFrame(conv_ << w)) + if (feedFrame(conv_ << w, userDataSize, userData)) return (playbackCounter_-1); return -1; } int -VideoStreamImpl::incomingFrame(const I420RawFrameWrapper& w) +VideoStreamImpl::incomingFrame(const I420RawFrameWrapper& w, unsigned int userDataSize, unsigned char* userData) { LogDebugC << "⤹ incoming I420 frame " << w.width_ << "x" << w.height_ << std::endl; - if (feedFrame(conv_ << w)) + if (feedFrame(conv_ << w, userDataSize, userData)) return (playbackCounter_-1); return -1; } @@ -155,7 +155,7 @@ void VideoStreamImpl::remove(const string& threadName) } } -bool VideoStreamImpl::feedFrame(const WebRtcVideoFrame& frame) +bool VideoStreamImpl::feedFrame(const WebRtcVideoFrame& frame, unsigned int userDataSize, unsigned char* userData) { (*statStorage_)[Indicator::CapturedNum]++; @@ -171,15 +171,18 @@ bool VideoStreamImpl::feedFrame(const WebRtcVideoFrame& frame) LogDebugC << "↓ feeding "<< playbackCounter_ << "p into encoders..." << std::endl; map futureFrames; + map> allUserData; for (auto it:threads_) { FutureFramePtr ff = boost::make_shared(boost::move(boost::async(boost::launch::async, boost::bind(&VideoThread::encode, it.second.get(), (*scalers_[it.first])(frame))))); futureFrames[it.first] = ff; + allUserData[it.first] = make_pair(userDataSize, userData); } map frames; + map> filteredUserData; for (auto it:futureFrames) { FramePacketPtr f(it.second->get()); @@ -187,6 +190,7 @@ bool VideoStreamImpl::feedFrame(const WebRtcVideoFrame& frame) { (*statStorage_)[Indicator::EncodedNum]++; frames[it.first] = f; + filteredUserData[it.first] = allUserData[it.first]; } } @@ -195,7 +199,7 @@ bool VideoStreamImpl::feedFrame(const WebRtcVideoFrame& frame) if (frames.size()) { - publish(frames); + publish(frames, filteredUserData); playbackCounter_++; result = true; } @@ -213,7 +217,8 @@ bool VideoStreamImpl::feedFrame(const WebRtcVideoFrame& frame) LogWarnC << "incoming frame was given, but there are no threads" << std::endl; } -void VideoStreamImpl::publish(map& frames) +void +VideoStreamImpl::publish(map& frames, map> filteredUserData) { LogTraceC << "will publish " << frames.size() << " frames" << std::endl; @@ -227,6 +232,7 @@ void VideoStreamImpl::publish(map& frames) packetHdr.publishUnixTimestampMs_ = clock::unixTimestamp(); it.second->setSyncList(getCurrentSyncList(isKey)); + it.second->setUserData(filteredUserData); it.second->setHeader(packetHdr); LogTraceC << "thread " << it.first << " " << packetHdr.sampleRate_ diff --git a/cpp/src/video-stream-impl.hpp b/cpp/src/video-stream-impl.hpp index 6ae004ca..4dbad705 100644 --- a/cpp/src/video-stream-impl.hpp +++ b/cpp/src/video-stream-impl.hpp @@ -12,6 +12,7 @@ #include #include #include +#include #include "media-stream-base.hpp" #include "ndnrtc-object.hpp" @@ -46,8 +47,8 @@ namespace ndnrtc { std::vector getThreads() const; - int incomingFrame(const ArgbRawFrameWrapper&); - int incomingFrame(const I420RawFrameWrapper&); + int incomingFrame(const ArgbRawFrameWrapper&, unsigned int userDataSize=0, unsigned char* userData=nullptr); + int incomingFrame(const I420RawFrameWrapper&, unsigned int userDataSize=0, unsigned char* userData=nullptr); void setLogger(boost::shared_ptr); private: @@ -88,12 +89,14 @@ namespace ndnrtc { void remove(const std::string& threadName); bool updateMeta(); - bool feedFrame(const WebRtcVideoFrame& frame); - void publish(std::map>& frames); + bool feedFrame(const WebRtcVideoFrame& frame, unsigned int userDataSize=0, unsigned char* userData=nullptr); + void publish(std::map>& frames, + std::map> filteredUserData = + boost::assign::map_list_of("", std::make_pair(0, nullptr))); void publish(const std::string& thread, boost::shared_ptr& fp); void publishManifest(ndn::Name dataName, PublishedDataPtrVector& segments); std::map getCurrentSyncList(bool forKey = false); }; } -#endif \ No newline at end of file +#endif diff --git a/cpp/tests/mock-objects/external-capturer-mock.hpp b/cpp/tests/mock-objects/external-capturer-mock.hpp index 3a82217d..7abd7910 100644 --- a/cpp/tests/mock-objects/external-capturer-mock.hpp +++ b/cpp/tests/mock-objects/external-capturer-mock.hpp @@ -18,7 +18,9 @@ class MockExternalCapturer : public ndnrtc::IExternalCapturer MOCK_METHOD4(incomingArgbFrame, int(const unsigned int width, const unsigned int height, unsigned char* argbFrameData, - unsigned int frameSize)); + unsigned int frameSize, + unsigned int userDataSize, + unsigned char* userData)); MOCK_METHOD8(incomingI420Frame, int(const unsigned int width, const unsigned int height, const unsigned int strideY, @@ -26,7 +28,9 @@ class MockExternalCapturer : public ndnrtc::IExternalCapturer const unsigned int strideV, const unsigned char* yBuffer, const unsigned char* uBuffer, - const unsigned char* vBuffer)); + const unsigned char* vBuffer, + unsigned int userDataSize, + unsigned char* userData)); }; #endif diff --git a/cpp/tests/test-network-data.cc b/cpp/tests/test-network-data.cc index e36a0d6a..d593ba2d 100644 --- a/cpp/tests/test-network-data.cc +++ b/cpp/tests/test-network-data.cc @@ -857,6 +857,66 @@ TEST(TestVideoFramePacket, TestAddSyncList) for (int i = 0; i < frameLen; ++i) EXPECT_EQ(buffer[i], fp.getFrame()._buffer[i]); } +TEST(TestVideoFramePacket, TestAddUserData) +{ + size_t frameLen = 4300; + int32_t size = webrtc::CalcBufferSize(webrtc::kI420, 640, 480); + uint8_t *buffer = (uint8_t*)malloc(frameLen); + for (int i = 0; i < frameLen; ++i) buffer[i] = i%255; + + webrtc::EncodedImage frame(buffer, frameLen, size); + frame._encodedWidth = 640; + frame._encodedHeight = 480; + frame._timeStamp = 1460488589; + frame.capture_time_ms_ = 1460488569; + frame._frameType = webrtc::kVideoFrameKey; + frame._completeFrame = true; + + VideoFramePacket fp(frame); + std::map syncList = boost::assign::map_list_of ("hi", 341) ("mid", 433) ("low", 432); + + fp.setSyncList(syncList); + EXPECT_EQ(syncList, fp.getSyncList()); + + unsigned char c1[] = "data1\0"; + unsigned char c2[] = "data22\0"; + unsigned char c3[] = "data333\0"; + std::map> userData = boost::assign::map_list_of + ("hi", std::make_pair(5, c1)) ("mid", std::make_pair(6, c2)) ("low",std::make_pair(7, c3)); + std::map ud = boost::assign::map_list_of ("hi", "data1") ("mid", "data22") ("low", "data333"); + + fp.setUserData(userData); + EXPECT_EQ(ud, fp.getUserData()); + + CommonHeader hdr; + hdr.sampleRate_ = 24.7; + hdr.publishTimestampMs_ = 488589553; + hdr.publishUnixTimestampMs_ = 1460488589; + + fp.setHeader(hdr); + EXPECT_EQ(syncList, fp.getSyncList()); + EXPECT_EQ(ud, fp.getUserData()); + + EXPECT_EQ(hdr.sampleRate_, fp.getHeader().sampleRate_); + EXPECT_EQ(hdr.publishTimestampMs_, fp.getHeader().publishTimestampMs_); + EXPECT_EQ(hdr.publishUnixTimestampMs_, fp.getHeader().publishUnixTimestampMs_); + + int length = fp.getLength(); + boost::shared_ptr parityData = fp.getParityData(VideoFrameSegment::payloadLength(1000), 0.2); + EXPECT_TRUE(parityData.get()); + EXPECT_EQ(length, fp.getLength()); + EXPECT_EQ(hdr.sampleRate_, fp.getHeader().sampleRate_); + EXPECT_EQ(hdr.publishTimestampMs_, fp.getHeader().publishTimestampMs_); + EXPECT_EQ(hdr.publishUnixTimestampMs_, fp.getHeader().publishUnixTimestampMs_); + EXPECT_EQ(frame._encodedWidth , fp.getFrame()._encodedWidth ); + EXPECT_EQ(frame._encodedHeight , fp.getFrame()._encodedHeight ); + EXPECT_EQ(frame._timeStamp , fp.getFrame()._timeStamp ); + EXPECT_EQ(frame.capture_time_ms_ , fp.getFrame().capture_time_ms_); + EXPECT_EQ(frame._frameType , fp.getFrame()._frameType ); + EXPECT_EQ(frame._completeFrame , fp.getFrame()._completeFrame ); + for (int i = 0; i < frameLen; ++i) EXPECT_EQ(buffer[i], fp.getFrame()._buffer[i]); +} + TEST(TestVideoFramePacket, TestFromNetworkData) { size_t frameLen = 4300; @@ -879,8 +939,16 @@ TEST(TestVideoFramePacket, TestFromNetworkData) std::map syncList = boost::assign::map_list_of ("hi", 341) ("mid", 433) ("low", 432); + unsigned char c1[] = "data1\0"; + unsigned char c2[] = "data22\0"; + unsigned char c3[] = "data333\0"; + std::map> userData = boost::assign::map_list_of + ("hi", std::make_pair(5, c1)) ("mid", std::make_pair(6, c2)) ("low",std::make_pair(7, c3)); + std::map ud = boost::assign::map_list_of ("hi", "data1") ("mid", "data22") ("low", "data333"); + VideoFramePacket first(frame); first.setSyncList(syncList); + first.setUserData(userData); first.setHeader(hdr); VideoFramePacket fp(boost::move((NetworkData&)first)); @@ -888,6 +956,7 @@ TEST(TestVideoFramePacket, TestFromNetworkData) EXPECT_EQ(0, first.getLength()); EXPECT_FALSE(first.isValid()); EXPECT_EQ(syncList, fp.getSyncList()); + EXPECT_EQ(ud, fp.getUserData()); EXPECT_EQ(hdr.sampleRate_, fp.getHeader().sampleRate_); EXPECT_EQ(hdr.publishTimestampMs_, fp.getHeader().publishTimestampMs_); @@ -930,6 +999,39 @@ TEST(TestVideoFramePacket, TestAddSyncListThrow) EXPECT_ANY_THROW(fp.setSyncList(syncList)); } +TEST(TestVideoFramePacket, TestAddUserDataThrow) +{ + size_t frameLen = 4300; + int32_t size = webrtc::CalcBufferSize(webrtc::kI420, 640, 480); + uint8_t *buffer = (uint8_t*)malloc(frameLen); + for (int i = 0; i < frameLen; ++i) buffer[i] = i%255; + + webrtc::EncodedImage frame(buffer, frameLen, size); + frame._encodedWidth = 640; + frame._encodedHeight = 480; + frame._timeStamp = 1460488589; + frame.capture_time_ms_ = 1460488569; + frame._frameType = webrtc::kVideoFrameKey; + frame._completeFrame = true; + + VideoFramePacket fp(frame); + + CommonHeader hdr; + hdr.sampleRate_ = 24.7; + hdr.publishTimestampMs_ = 488589553; + hdr.publishUnixTimestampMs_ = 1460488589; + + fp.setHeader(hdr); + + unsigned char c1[] = "data1\0"; + unsigned char c2[] = "data22\0"; + unsigned char c3[] = "data333\0"; + std::map> userData = boost::assign::map_list_of + ("hi", std::make_pair(5, c1)) ("mid", std::make_pair(6, c2)) ("low",std::make_pair(7, c3)); + + EXPECT_ANY_THROW(fp.setUserData(userData)); +} + TEST(TestVideoFramePacket, TestSliceFrame) { CommonHeader hdr; @@ -952,8 +1054,14 @@ TEST(TestVideoFramePacket, TestSliceFrame) VideoFramePacket vp(frame); std::map syncList = boost::assign::map_list_of ("hi", 341) ("mid", 433) ("low", 432); + unsigned char c1[] = "data1\0"; + unsigned char c2[] = "data22\0"; + unsigned char c3[] = "data333\0"; + std::map> userData = boost::assign::map_list_of + ("hi", std::make_pair(5, c1)) ("mid", std::make_pair(6, c2)) ("low",std::make_pair(7, c3)); vp.setSyncList(syncList); + vp.setUserData(userData); vp.setHeader(hdr); boost::shared_ptr parity = vp.getParityData(VideoFrameSegment::payloadLength(1000), 0.2); @@ -993,8 +1101,14 @@ TEST(TestVideoFramePacket, TestGetParity) VideoFramePacket vp(frame); std::map syncList = boost::assign::map_list_of ("hi", 341) ("mid", 433) ("low", 432); - + unsigned char c1[] = "data1\0"; + unsigned char c2[] = "data22\0"; + unsigned char c3[] = "data333\0"; + std::map> userData = boost::assign::map_list_of + ("hi", std::make_pair(5, c1)) ("mid", std::make_pair(6, c2)) ("low",std::make_pair(7, c3)); + vp.setSyncList(syncList); + vp.setUserData(userData); vp.setHeader(hdr); boost::shared_ptr parity = vp.getParityData(VideoFrameSegment::payloadLength(8000), 0.2); @@ -1379,8 +1493,15 @@ TEST(TestWireData, TestMergeVideoFramePacket) VideoFramePacket vp(frame); std::map syncList = boost::assign::map_list_of ("hi", 341) ("mid", 433) ("low", 432); + unsigned char c1[] = "data1\0"; + unsigned char c2[] = "data22\0"; + unsigned char c3[] = "data333\0"; + std::map> userData = boost::assign::map_list_of + ("hi", std::make_pair(5, c1)) ("mid", std::make_pair(6, c2)) ("low",std::make_pair(7, c3)); + std::map ud = boost::assign::map_list_of ("hi", "data1") ("mid", "data22") ("low", "data333"); vp.setSyncList(syncList); + vp.setUserData(userData); vp.setHeader(hdr); boost::shared_ptr parity = vp.getParityData(VideoFrameSegment::payloadLength(1000), 0.2); @@ -1470,6 +1591,14 @@ TEST(TestWireData, TestMergeVideoFramePacket) ASSERT_NE(packet->getSyncList().end(), packet->getSyncList().find(t.first)); EXPECT_EQ(t.second, packet->getSyncList().at(t.first)); } + + EXPECT_EQ(ud.size(), packet->getUserData().size()); + idx = 0; + for (auto t:ud) + { + ASSERT_NE(packet->getUserData().end(), packet->getUserData().find(t.first)); + EXPECT_EQ(t.second, packet->getUserData().at(t.first)); + } } TEST(TestWireData, TestMergeAudioBundle)