add option to send Transport Stream instead of Elementary Stream + rename option -T -> -p in order to use -T for TS

pull/33/head
Michel Promonet 9 years ago
parent 871fed8aff
commit 5d0746f1f7

@ -24,10 +24,10 @@ const char H264shortmarker[] = {0,0,1};
class H264_V4L2DeviceSource : public V4L2DeviceSource
{
public:
static H264_V4L2DeviceSource* createNew(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig) ;
static H264_V4L2DeviceSource* createNew(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig, bool keepMarker) ;
protected:
H264_V4L2DeviceSource(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig);
H264_V4L2DeviceSource(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig, bool keepMarker);
virtual ~H264_V4L2DeviceSource();
unsigned char* extractFrame(unsigned char* frame, size_t& size, size_t& outsize);
@ -39,6 +39,8 @@ class H264_V4L2DeviceSource : public V4L2DeviceSource
std::string m_sps;
std::string m_pps;
bool m_repeatConfig;
bool m_keepMarker;
int m_frameType;
};
#endif

@ -12,6 +12,7 @@
#include <string>
#include <iomanip>
#include <iostream>
// live555
#include <liveMedia.hh>
@ -28,8 +29,8 @@ class BaseServerMediaSubsession
BaseServerMediaSubsession(StreamReplicator* replicator): m_replicator(replicator) {};
public:
static FramedSource* createSource(UsageEnvironment& env, FramedSource * videoES, int format);
static RTPSink* createSink(UsageEnvironment& env, Groupsock * rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, int format);
static FramedSource* createSource(UsageEnvironment& env, FramedSource * videoES, const std::string& format);
static RTPSink* createSink(UsageEnvironment& env, Groupsock * rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, const std::string& format);
char const* getAuxLine(V4L2DeviceSource* source,unsigned char rtpPayloadType);
protected:
@ -47,7 +48,7 @@ class MulticastServerMediaSubsession : public PassiveServerMediaSubsession , pub
, Port rtpPortNum, Port rtcpPortNum
, int ttl
, StreamReplicator* replicator
, int format);
, const std::string& format);
protected:
MulticastServerMediaSubsession(StreamReplicator* replicator, RTPSink* rtpSink, RTCPInstance* rtcpInstance)
@ -67,10 +68,10 @@ class MulticastServerMediaSubsession : public PassiveServerMediaSubsession , pub
class UnicastServerMediaSubsession : public OnDemandServerMediaSubsession , public BaseServerMediaSubsession
{
public:
static UnicastServerMediaSubsession* createNew(UsageEnvironment& env, StreamReplicator* replicator, int format);
static UnicastServerMediaSubsession* createNew(UsageEnvironment& env, StreamReplicator* replicator, const std::string& format);
protected:
UnicastServerMediaSubsession(UsageEnvironment& env, StreamReplicator* replicator, int format)
UnicastServerMediaSubsession(UsageEnvironment& env, StreamReplicator* replicator, const std::string& format)
: OnDemandServerMediaSubsession(env, False), BaseServerMediaSubsession(replicator), m_format(format) {};
virtual FramedSource* createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate);
@ -78,7 +79,7 @@ class UnicastServerMediaSubsession : public OnDemandServerMediaSubsession , publ
virtual char const* getAuxSDPLine(RTPSink* rtpSink,FramedSource* inputSource);
protected:
int m_format;
const std::string m_format;
};
#endif
#endif

@ -21,19 +21,19 @@
// ---------------------------------
// H264 V4L2 FramedSource
// ---------------------------------
H264_V4L2DeviceSource* H264_V4L2DeviceSource::createNew(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig)
H264_V4L2DeviceSource* H264_V4L2DeviceSource::createNew(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig, bool keepMarker)
{
H264_V4L2DeviceSource* source = NULL;
if (device)
{
source = new H264_V4L2DeviceSource(env, params, device, outputFd, queueSize, useThread, repeatConfig);
source = new H264_V4L2DeviceSource(env, params, device, outputFd, queueSize, useThread, repeatConfig, keepMarker);
}
return source;
}
// Constructor
H264_V4L2DeviceSource::H264_V4L2DeviceSource(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig)
: V4L2DeviceSource(env, params, device, outputFd, queueSize,useThread), m_repeatConfig(repeatConfig)
H264_V4L2DeviceSource::H264_V4L2DeviceSource(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig, bool keepMarker)
: V4L2DeviceSource(env, params, device, outputFd, queueSize,useThread), m_repeatConfig(repeatConfig), m_keepMarker(keepMarker), m_frameType(0)
{
}
@ -52,7 +52,7 @@ std::list< std::pair<unsigned char*,size_t> > H264_V4L2DeviceSource::splitFrames
unsigned char* buffer = this->extractFrame(frame, bufSize, size);
while (buffer != NULL)
{
switch (buffer[0]&0x1F)
switch (m_frameType)
{
case 7: LOG(INFO) << "SPS size:" << size; m_sps.assign((char*)buffer,size); break;
case 8: LOG(INFO) << "PPS size:" << size; m_pps.assign((char*)buffer,size); break;
@ -96,6 +96,7 @@ unsigned char* H264_V4L2DeviceSource::extractFrame(unsigned char* frame, size_t
unsigned char * outFrame = NULL;
outsize = 0;
unsigned int markerlength = 0;
m_frameType = 0;
if ( (size>= sizeof(H264marker)) && (memcmp(frame,H264marker,sizeof(H264marker)) == 0) )
{
markerlength = sizeof(H264marker);
@ -107,16 +108,28 @@ unsigned char* H264_V4L2DeviceSource::extractFrame(unsigned char* frame, size_t
if (markerlength != 0)
{
size -= markerlength;
outFrame = &frame[markerlength];
outsize = size;
for (int i=0; i+sizeof(H264marker) < size; ++i)
m_frameType = (frame[markerlength]&0x1F);
unsigned char * ptr = (unsigned char*)memmem(&frame[markerlength], size-markerlength, H264marker, sizeof(H264marker));
if (ptr == NULL)
{
if ( (memcmp(&outFrame[i],H264marker,sizeof(H264marker)) == 0) || (memcmp(&outFrame[i],H264shortmarker,sizeof(H264shortmarker)) == 0) )
{
outsize = i;
break;
}
ptr = (unsigned char*)memmem(&frame[markerlength], size-markerlength, H264shortmarker, sizeof(H264shortmarker));
}
if (m_keepMarker)
{
outFrame = &frame[0];
}
else
{
size -= markerlength;
outFrame = &frame[markerlength];
}
if (ptr != NULL)
{
outsize = ptr - outFrame;
}
else
{
outsize = size;
}
size -= outsize;
}

@ -9,9 +9,6 @@
#include <sstream>
// libv4l2
#include <linux/videodev2.h>
// live555
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
@ -24,28 +21,38 @@
// ---------------------------------
// BaseServerMediaSubsession
// ---------------------------------
FramedSource* BaseServerMediaSubsession::createSource(UsageEnvironment& env, FramedSource * videoES, int format)
FramedSource* BaseServerMediaSubsession::createSource(UsageEnvironment& env, FramedSource* videoES, const std::string& format)
{
FramedSource* source = NULL;
switch (format)
if (format == "video/MP2T")
{
source = MPEG2TransportStreamFramer::createNew(env, videoES);
}
else if (format == "video/H264")
{
source = H264VideoStreamDiscreteFramer::createNew(env, videoES);
}
else
{
case V4L2_PIX_FMT_H264 : source = H264VideoStreamDiscreteFramer::createNew(env, videoES); break;
#ifdef V4L2_PIX_FMT_VP8
case V4L2_PIX_FMT_VP8 : source = videoES; break;
#endif
source = videoES;
}
return source;
}
RTPSink* BaseServerMediaSubsession::createSink(UsageEnvironment& env, Groupsock * rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, int format)
RTPSink* BaseServerMediaSubsession::createSink(UsageEnvironment& env, Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, const std::string& format)
{
RTPSink* videoSink = NULL;
switch (format)
if (format == "video/MP2T")
{
videoSink = SimpleRTPSink::createNew(env, rtpGroupsock,rtpPayloadTypeIfDynamic, 90000, "video", "MP2T", 1, True, False);
}
else if (format == "video/H264")
{
videoSink = H264VideoRTPSink::createNew(env, rtpGroupsock,rtpPayloadTypeIfDynamic);
}
else if (format == "video/VP8")
{
case V4L2_PIX_FMT_H264 : videoSink = H264VideoRTPSink::createNew(env, rtpGroupsock,rtpPayloadTypeIfDynamic); break;
#ifdef V4L2_PIX_FMT_VP8
case V4L2_PIX_FMT_VP8 : videoSink = VP8VideoRTPSink::createNew(env, rtpGroupsock,rtpPayloadTypeIfDynamic); break;
#endif
videoSink = VP8VideoRTPSink::createNew (env, rtpGroupsock,rtpPayloadTypeIfDynamic);
}
return videoSink;
}
@ -72,7 +79,7 @@ MulticastServerMediaSubsession* MulticastServerMediaSubsession::createNew(UsageE
, Port rtpPortNum, Port rtcpPortNum
, int ttl
, StreamReplicator* replicator
, int format)
, const std::string& format)
{
// Create a source
FramedSource* source = replicator->createStreamReplica();
@ -117,7 +124,7 @@ char const* MulticastServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink,Frame
// -----------------------------------------
// ServerMediaSubsession for Unicast
// -----------------------------------------
UnicastServerMediaSubsession* UnicastServerMediaSubsession::createNew(UsageEnvironment& env, StreamReplicator* replicator, int format)
UnicastServerMediaSubsession* UnicastServerMediaSubsession::createNew(UsageEnvironment& env, StreamReplicator* replicator, const std::string& format)
{
return new UnicastServerMediaSubsession(env,replicator,format);
}

@ -56,7 +56,7 @@ void sighandler(int n)
RTSPServer* createRTSPServer(UsageEnvironment& env, unsigned short rtspPort, unsigned short rtspOverHTTPPort, int timeout)
{
UserAuthenticationDatabase* authDB = NULL;
RTSPServer* rtspServer = RTSPServer::createNew(env, rtspPort, authDB, timeout);
RTSPServer* rtspServer = RTSPServerSupportingHTTPStreaming::createNew(env, rtspPort, authDB, timeout);
if (rtspServer != NULL)
{
// set http tunneling
@ -89,6 +89,19 @@ void addSession(RTSPServer* rtspServer, const std::string & sessionName, ServerM
}
}
std::string getRtpFormat(int format)
{
std::string rtpFormat;
switch(format)
{
case V4L2_PIX_FMT_H264 : rtpFormat = "video/H264"; break;
#ifdef V4L2_PIX_FMT_VP8
case V4L2_PIX_FMT_VP8 : rtpFormat = "video/VP8" ; break;
#endif
}
return rtpFormat;
}
// -----------------------------------------
// entry point
// -----------------------------------------
@ -113,32 +126,34 @@ int main(int argc, char** argv)
std::string maddr;
bool repeatConfig = true;
int timeout = 65;
bool muxTS = false;
// decode parameters
int c = 0;
while ((c = getopt (argc, argv, "v::Q:O:" "I:P:T:m:u:M:ct:" "rsfF:W:H:" "h")) != -1)
while ((c = getopt (argc, argv, "v::Q:O:" "I:P:p:m:u:M:ct:T" "rsfF:W:H:" "h")) != -1)
{
switch (c)
{
case 'v': verbose = 1; if (optarg && *optarg=='v') verbose++; break;
case 'Q': queueSize = atoi(optarg); break;
case 'v': verbose = 1; if (optarg && *optarg=='v') verbose++; break;
case 'Q': queueSize = atoi(optarg); break;
case 'O': outputFile = optarg; break;
// RTSP/RTP
case 'I': ReceivingInterfaceAddr = inet_addr(optarg); break;
case 'P': rtspPort = atoi(optarg); break;
case 'T': rtspOverHTTPPort = atoi(optarg); break;
case 'u': url = optarg; break;
case 'm': multicast = true; murl = optarg; break;
case 'I': ReceivingInterfaceAddr = inet_addr(optarg); break;
case 'P': rtspPort = atoi(optarg); break;
case 'p': rtspOverHTTPPort = atoi(optarg); break;
case 'u': url = optarg; break;
case 'm': multicast = true; murl = optarg; break;
case 'M': multicast = true; maddr = optarg; break;
case 'c': repeatConfig = false; break;
case 't': timeout = atoi(optarg); break;
case 'c': repeatConfig = false; break;
case 't': timeout = atoi(optarg); break;
case 'T': muxTS = true; break;
// V4L2
case 'r': useMmap = false; break;
case 'r': useMmap = false; break;
case 's': useThread = false; break;
case 'f': format = 0; break;
case 'F': fps = atoi(optarg); break;
case 'W': width = atoi(optarg); break;
case 'H': height = atoi(optarg); break;
case 'f': format = 0; break;
case 'F': fps = atoi(optarg); break;
case 'W': width = atoi(optarg); break;
case 'H': height = atoi(optarg); break;
case 'h':
default:
@ -150,15 +165,16 @@ int main(int argc, char** argv)
std::cout << "\t -vv : very verbose" << std::endl;
std::cout << "\t -Q length: Number of frame queue (default "<< queueSize << ")" << std::endl;
std::cout << "\t -O output: Copy captured frame to a file or a V4L2 device" << std::endl;
std::cout << "\t RTSP options :" << std::endl;
std::cout << "\t RTSP/RTP options :" << std::endl;
std::cout << "\t -I addr : RTSP interface (default autodetect)" << std::endl;
std::cout << "\t -P port : RTSP port (default "<< rtspPort << ")" << std::endl;
std::cout << "\t -T port : RTSP over HTTP port (default "<< rtspOverHTTPPort << ")" << std::endl;
std::cout << "\t -p port : RTSP over HTTP port (default "<< rtspOverHTTPPort << ")" << std::endl;
std::cout << "\t -u url : unicast url (default " << url << ")" << std::endl;
std::cout << "\t -m url : multicast url (default " << murl << ")" << std::endl;
std::cout << "\t -M addr : multicast group:port (default is random_address:20000)" << std::endl;
std::cout << "\t -c : don't repeat config (default repeat config before IDR frame)" << std::endl;
std::cout << "\t -t secs : RTCP expiration timeout (default " << timeout << ")" << std::endl;
std::cout << "\t -T : send Transport Stream instead of elementary Stream" << std::endl;
std::cout << "\t V4L2 options :" << std::endl;
std::cout << "\t -r : V4L2 capture using read interface (default use memory mapped buffers)" << std::endl;
std::cout << "\t -s : V4L2 capture using live555 mainloop (default use a reader thread)" << std::endl;
@ -250,10 +266,18 @@ int main(int argc, char** argv)
{
LOG(NOTICE) << "Cannot start V4L2 Capture for:" << deviceName;
}
V4L2DeviceSource* videoES = NULL;
FramedSource* videoES = NULL;
std::string rtpFormat(getRtpFormat(format));
if (format == V4L2_PIX_FMT_H264)
{
videoES = H264_V4L2DeviceSource::createNew(*env, param, videoCapture, outfd, queueSize, useThread, repeatConfig);
videoES = H264_V4L2DeviceSource::createNew(*env, param, videoCapture, outfd, queueSize, useThread, repeatConfig, muxTS);
if (muxTS)
{
MPEG2TransportStreamFromESSource* muxer = MPEG2TransportStreamFromESSource::createNew(*env);
muxer->addNewVideoSource(videoES, 5);
videoES = muxer;
rtpFormat = "video/MP2T";
}
}
else
{
@ -286,7 +310,7 @@ int main(int argc, char** argv)
{
LOG(NOTICE) << "RTP address " << inet_ntoa(destinationAddress) << ":" << rtpPortNum;
LOG(NOTICE) << "RTCP address " << inet_ntoa(destinationAddress) << ":" << rtcpPortNum;
addSession(rtspServer, baseUrl+murl, MulticastServerMediaSubsession::createNew(*env,destinationAddress, Port(rtpPortNum), Port(rtcpPortNum), ttl, replicator,format));
addSession(rtspServer, baseUrl+murl, MulticastServerMediaSubsession::createNew(*env,destinationAddress, Port(rtpPortNum), Port(rtcpPortNum), ttl, replicator,rtpFormat));
// increment ports for next sessions
rtpPortNum+=2;
@ -294,7 +318,7 @@ int main(int argc, char** argv)
}
// Create Unicast Session
addSession(rtspServer, baseUrl+url, UnicastServerMediaSubsession::createNew(*env,replicator,format));
addSession(rtspServer, baseUrl+url, UnicastServerMediaSubsession::createNew(*env,replicator,rtpFormat));
}
if (out)
{

Loading…
Cancel
Save