try to support video/RAW

pull/120/head
Michel Promonet 6 years ago
parent 5936006374
commit c960a44846

@ -51,6 +51,7 @@ class ALSACapture
virtual unsigned long getBufferSize() { return m_bufferSize; };
virtual int getWidth() {return -1;}
virtual int getHeight() {return -1;}
virtual int getCaptureFormat() {return -1;}
unsigned long getSampleRate() { return m_params.m_sampleRate; }
unsigned long getChannels () { return m_params.m_channels; }

@ -23,6 +23,7 @@ class DeviceInterface
virtual unsigned long getBufferSize() = 0;
virtual int getWidth() = 0;
virtual int getHeight() = 0;
virtual int getCaptureFormat() = 0;
virtual ~DeviceInterface() {};
};
@ -40,8 +41,9 @@ class DeviceCaptureAccess : public DeviceInterface
virtual size_t read(char* buffer, size_t bufferSize) { return m_device->read(buffer, bufferSize); }
virtual int getFd() { return m_device->getFd(); }
virtual unsigned long getBufferSize() { return m_device->getBufferSize(); }
virtual int getWidth() { return m_device->getWidth(); }
virtual int getHeight() { return m_device->getHeight(); }
virtual int getWidth() { return m_device->getWidth(); }
virtual int getHeight() { return m_device->getHeight(); }
virtual int getCaptureFormat() { return m_device->getFormat(); }
protected:
T* m_device;

@ -62,8 +62,10 @@ class V4L2DeviceSource: public FramedSource
public:
static V4L2DeviceSource* createNew(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, bool useThread) ;
std::string getAuxLine() { return m_auxLine; };
void setAuxLine(const std::string auxLine) { m_auxLine = auxLine; };
int getWidth() { return m_device->getWidth(); };
int getHeight() { return m_device->getHeight(); };
int getCaptureFormat() { return m_device->getCaptureFormat(); };
protected:
V4L2DeviceSource(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, bool useThread);

@ -32,7 +32,7 @@ class BaseServerMediaSubsession
public:
static FramedSource* createSource(UsageEnvironment& env, FramedSource * videoES, const std::string& format);
static RTPSink* createSink(UsageEnvironment& env, Groupsock * rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, const std::string& format);
static RTPSink* createSink(UsageEnvironment& env, Groupsock * rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, const std::string& format, V4L2DeviceSource* source);
char const* getAuxLine(V4L2DeviceSource* source,unsigned char rtpPayloadType);
protected:

@ -29,7 +29,7 @@ MulticastServerMediaSubsession* MulticastServerMediaSubsession::createNew(UsageE
Groupsock* rtcpGroupsock = new Groupsock(env, destinationAddress, rtcpPortNum, ttl);
// Create a RTP sink
RTPSink* videoSink = createSink(env, rtpGroupsock, 96, format);
RTPSink* videoSink = createSink(env, rtpGroupsock, 96, format, dynamic_cast<V4L2DeviceSource*>(replicator->inputSource()));
// Create 'RTCP instance'
const unsigned maxCNAMElen = 100;

@ -8,7 +8,7 @@
** -------------------------------------------------------------------------*/
#include <sstream>
#include <linux/videodev2.h>
// project
#include "ServerMediaSubsession.h"
@ -46,7 +46,7 @@ FramedSource* BaseServerMediaSubsession::createSource(UsageEnvironment& env, Fra
return source;
}
RTPSink* BaseServerMediaSubsession::createSink(UsageEnvironment& env, Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, const std::string& format)
RTPSink* BaseServerMediaSubsession::createSink(UsageEnvironment& env, Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, const std::string& format, V4L2DeviceSource* source)
{
RTPSink* videoSink = NULL;
if (format == "video/MP2T")
@ -74,7 +74,21 @@ RTPSink* BaseServerMediaSubsession::createSink(UsageEnvironment& env, Groupsock
else if (format == "video/JPEG")
{
videoSink = JPEGVideoRTPSink::createNew (env, rtpGroupsock);
}
}
#if LIVEMEDIA_LIBRARY_VERSION_INT >= 1536192000
else if (format =="video/RAW")
{
std::string sampling;
switch (source->getCaptureFormat()) {
case V4L2_PIX_FMT_YUV444: sampling = "YCbCr-4:4:4"; break;
case V4L2_PIX_FMT_YUYV: sampling = "YCbCr-4:2:2"; break;
}
videoSink = RawVideoRTPSink::createNew(env, rtpGroupsock, rtpPayloadTypeIfDynamic, source->getHeight(), source->getWidth(), 16, sampling.c_str());
if (videoSink) {
source->setAuxLine(videoSink->auxSDPLine());
}
}
#endif
else if (format.find("audio/L16") == 0)
{
std::istringstream is(format);

@ -27,7 +27,7 @@ FramedSource* UnicastServerMediaSubsession::createNewStreamSource(unsigned clien
RTPSink* UnicastServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource)
{
return createSink(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, m_format);
return createSink(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, m_format, dynamic_cast<V4L2DeviceSource*>(m_replicator->inputSource()));
}
char const* UnicastServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink,FramedSource* inputSource)

@ -190,6 +190,7 @@ std::string getVideoRtpFormat(int format, bool muxTS)
case V4L2_PIX_FMT_JPEG : rtpFormat = "video/JPEG"; break;
case V4L2_PIX_FMT_VP8 : rtpFormat = "video/VP8" ; break;
case V4L2_PIX_FMT_VP9 : rtpFormat = "video/VP9" ; break;
case V4L2_PIX_FMT_YUYV : rtpFormat = "video/RAW" ; break;
}
}

Loading…
Cancel
Save