more try with raw video

This commit is contained in:
mpromonet 2019-08-27 14:19:13 +01:00
parent 874760a311
commit 6be9d9e625
6 changed files with 20 additions and 17 deletions

View File

@ -33,7 +33,7 @@ class BaseServerMediaSubsession
public:
static FramedSource* createSource(UsageEnvironment& env, FramedSource * videoES, const std::string& format);
static RTPSink* createSink(UsageEnvironment& env, Groupsock * rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, const std::string& format, V4L2DeviceSource* source);
char const* getAuxLine(V4L2DeviceSource* source,unsigned char rtpPayloadType);
char const* getAuxLine(V4L2DeviceSource* source, RTPSink* rtpSink);
protected:
StreamReplicator* m_replicator;

View File

@ -59,7 +59,7 @@ std::list< std::pair<unsigned char*,size_t> > H264_V4L2DeviceSource::splitFrames
std::ostringstream os;
os << "profile-level-id=" << std::hex << std::setw(6) << std::setfill('0') << profile_level_id;
os << ";sprop-parameter-sets=" << sps_base64 <<"," << pps_base64 << "\r\n";
os << ";sprop-parameter-sets=" << sps_base64 <<"," << pps_base64;
m_auxLine.assign(os.str());
delete [] sps_base64;

View File

@ -57,6 +57,6 @@ char const* MulticastServerMediaSubsession::sdpLines()
char const* MulticastServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink,FramedSource* inputSource)
{
return this->getAuxLine(dynamic_cast<V4L2DeviceSource*>(m_replicator->inputSource()), rtpSink->rtpPayloadType());
return this->getAuxLine(dynamic_cast<V4L2DeviceSource*>(m_replicator->inputSource()), rtpSink);
}

View File

@ -82,11 +82,9 @@ RTPSink* BaseServerMediaSubsession::createSink(UsageEnvironment& env, Groupsock
switch (source->getCaptureFormat()) {
case V4L2_PIX_FMT_YUV444: sampling = "YCbCr-4:4:4"; break;
case V4L2_PIX_FMT_YUYV: sampling = "YCbCr-4:2:2"; break;
case V4L2_PIX_FMT_UYVY: sampling = "YCbCr-4:2:2"; break;
}
videoSink = RawVideoRTPSink::createNew(env, rtpGroupsock, rtpPayloadTypeIfDynamic, source->getHeight(), source->getWidth(), 8, sampling.c_str());
if (videoSink) {
source->setAuxLine(videoSink->auxSDPLine());
}
}
#endif
else if (format.find("audio/L16") == 0)
@ -104,21 +102,25 @@ RTPSink* BaseServerMediaSubsession::createSink(UsageEnvironment& env, Groupsock
return videoSink;
}
char const* BaseServerMediaSubsession::getAuxLine(V4L2DeviceSource* source,unsigned char rtpPayloadType)
char const* BaseServerMediaSubsession::getAuxLine(V4L2DeviceSource* source, RTPSink* rtpSink)
{
const char* auxLine = NULL;
if (source)
{
if (rtpSink) {
std::ostringstream os;
os << "a=fmtp:" << int(rtpPayloadType) << " ";
os << source->getAuxLine();
int width = source->getWidth();
int height = source->getHeight();
if ( (width > 0) && (height>0) ) {
os << "a=x-dimensions:" << width << "," << height << "\r\n";
if (rtpSink->auxSDPLine()) {
os << rtpSink->auxSDPLine();
}
else if (source) {
unsigned char rtpPayloadType = rtpSink->rtpPayloadType();
os << "a=fmtp:" << int(rtpPayloadType) << " " << source->getAuxLine() << "\r\n";
int width = source->getWidth();
int height = source->getHeight();
if ( (width > 0) && (height>0) ) {
os << "a=x-dimensions:" << width << "," << height << "\r\n";
}
}
auxLine = strdup(os.str().c_str());
}
}
return auxLine;
}

View File

@ -32,6 +32,6 @@ RTPSink* UnicastServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock,
char const* UnicastServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink,FramedSource* inputSource)
{
return this->getAuxLine(dynamic_cast<V4L2DeviceSource*>(m_replicator->inputSource()), rtpSink->rtpPayloadType());
return this->getAuxLine(dynamic_cast<V4L2DeviceSource*>(m_replicator->inputSource()), rtpSink);
}

View File

@ -170,6 +170,7 @@ std::string getVideoRtpFormat(int format)
case V4L2_PIX_FMT_VP8 : rtpFormat = "video/VP8" ; break;
case V4L2_PIX_FMT_VP9 : rtpFormat = "video/VP9" ; break;
case V4L2_PIX_FMT_YUYV : rtpFormat = "video/RAW" ; break;
case V4L2_PIX_FMT_UYVY : rtpFormat = "video/RAW" ; break;
}
return rtpFormat;