split concern

pull/33/head
MPR 10 years ago
parent a11f229dd5
commit 084b132f4b

@ -1,9 +1,9 @@
TARGETS=$(foreach file,$(basename $(wildcard *.c*)),$(file))
TARGET=$(notdir $(realpath .))
LIVE555_CFLAGS=-I/usr/include/UsageEnvironment -I /usr/include/groupsock -I /usr/include/liveMedia -I /usr/include/BasicUsageEnvironment
LIVE555_FDFLAGS=-lliveMedia -lgroupsock -lBasicUsageEnvironment -lUsageEnvironment
all: /usr/include/libv4l2.h /usr/include/liveMedia/liveMedia.hh $(TARGETS)
all: /usr/include/libv4l2.h /usr/include/liveMedia/liveMedia.hh $(TARGET)
/usr/include/libv4l2.h:
$(info Cannot find /usr/include/libv4l2.h)
@ -13,11 +13,20 @@ all: /usr/include/libv4l2.h /usr/include/liveMedia/liveMedia.hh $(TARGETS)
$(info Cannot find /usr/include/liveMedia/liveMedia.hh)
sudo apt-get install liblivemedia-dev
%: %.cpp %.h
$(info $(OS))
g++ -g -o $@ $< $(LIVE555_CFLAGS) $(LIVE555_FDFLAGS) -lv4l2
ODIR=obj
$(ODIR):
mkdir -p $(ODIR)
$(ODIR)/%.o: %.cpp $(ODIR) $(DEPS)
g++ -g -c -o $@ $< $(LIVE555_CFLAGS)
OBJ = $(patsubst %,$(ODIR)/%,$(foreach src,$(wildcard *.cpp),$(src:.cpp=.o)))
$(TARGET): $(OBJ)
$(info $(OBJ))
g++ -g -o $@ $^ $(LIVE555_CFLAGS) $(LIVE555_FDFLAGS) -lv4l2
clean:
rm -f $(TARGETS)
rm -rf $(TARGET) $(ODIR)

@ -0,0 +1,91 @@
/* ---------------------------------------------------------------------------
** This software is in the public domain, furnished "as is", without technical
** support, and with no warranty, express or implied, as to its usefulness for
** any purpose.
**
** ServerMediaSubsession.cpp
**
** -------------------------------------------------------------------------*/
// live555
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
#include <Base64.hh>
// project
#include "ServerMediaSubsession.h"
// -----------------------------------------
// ServerMediaSubsession for Multicast
// -----------------------------------------
MulticastServerMediaSubsession* MulticastServerMediaSubsession::createNew(UsageEnvironment& env
, struct in_addr destinationAddress
, Port rtpPortNum, Port rtcpPortNum
, int ttl
, unsigned char rtpPayloadType
, StreamReplicator* replicator
, int format)
{
// Create a source
FramedSource* source = replicator->createStreamReplica();
FramedSource* videoSource = createSource(env, source, format);
// Create RTP/RTCP groupsock
Groupsock* rtpGroupsock = new Groupsock(env, destinationAddress, rtpPortNum, ttl);
Groupsock* rtcpGroupsock = new Groupsock(env, destinationAddress, rtcpPortNum, ttl);
// Create a RTP sink
RTPSink* videoSink = createSink(env, rtpGroupsock, rtpPayloadType, format);
// Create 'RTCP instance'
const unsigned maxCNAMElen = 100;
unsigned char CNAME[maxCNAMElen+1];
gethostname((char*)CNAME, maxCNAMElen);
CNAME[maxCNAMElen] = '\0';
RTCPInstance* rtcpInstance = RTCPInstance::createNew(env, rtcpGroupsock, 500, CNAME, videoSink, NULL);
// Start Playing the Sink
videoSink->startPlaying(*videoSource, NULL, NULL);
return new MulticastServerMediaSubsession(replicator, videoSink, rtcpInstance);
}
char const* MulticastServerMediaSubsession::sdpLines()
{
if (m_SDPLines.empty())
{
// Ugly workaround to give SPS/PPS that are get from the RTPSink
m_SDPLines.assign(PassiveServerMediaSubsession::sdpLines());
m_SDPLines.append(getAuxSDPLine(m_rtpSink,NULL));
}
return m_SDPLines.c_str();
}
char const* MulticastServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink,FramedSource* inputSource)
{
return this->getAuxLine(dynamic_cast<V4L2DeviceSource*>(m_replicator->inputSource()), rtpSink->rtpPayloadType());
}
// -----------------------------------------
// ServerMediaSubsession for Unicast
// -----------------------------------------
UnicastServerMediaSubsession* UnicastServerMediaSubsession::createNew(UsageEnvironment& env, StreamReplicator* replicator, int format)
{
return new UnicastServerMediaSubsession(env,replicator,format);
}
FramedSource* UnicastServerMediaSubsession::createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate)
{
FramedSource* source = m_replicator->createStreamReplica();
return createSource(envir(), source, m_format);
}
RTPSink* UnicastServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource)
{
return createSink(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, m_format);
}
char const* UnicastServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink,FramedSource* inputSource)
{
return this->getAuxLine(dynamic_cast<V4L2DeviceSource*>(m_replicator->inputSource()), rtpSink->rtpPayloadType());
}

@ -0,0 +1,118 @@
/* ---------------------------------------------------------------------------
** This software is in the public domain, furnished "as is", without technical
** support, and with no warranty, express or implied, as to its usefulness for
** any purpose.
**
** ServerMediaSubsession.h
**
** -------------------------------------------------------------------------*/
#ifndef SERVER_MEDIA_SUBSESSION
#define SERVER_MEDIA_SUBSESSION
#include <string>
#include <sstream>
// V4L2
#include <linux/videodev2.h>
// live555
#include <liveMedia.hh>
// project
#include "V4l2DeviceSource.h"
// ---------------------------------
// BaseServerMediaSubsession
// ---------------------------------
class BaseServerMediaSubsession
{
public:
BaseServerMediaSubsession(StreamReplicator* replicator): m_replicator(replicator) {};
public:
static FramedSource* createSource(UsageEnvironment& env, FramedSource * videoES, int format)
{
FramedSource* source = NULL;
switch (format)
{
case V4L2_PIX_FMT_H264 : source = H264VideoStreamDiscreteFramer::createNew(env, videoES); break;
}
return source;
}
static RTPSink* createSink(UsageEnvironment& env, Groupsock * rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, int format)
{
RTPSink* videoSink = NULL;
switch (format)
{
case V4L2_PIX_FMT_H264 : videoSink = H264VideoRTPSink::createNew(env, rtpGroupsock,rtpPayloadTypeIfDynamic); break;
}
return videoSink;
}
char const* getAuxLine(V4L2DeviceSource* source,unsigned char rtpPayloadType)
{
const char* auxLine = NULL;
if (source)
{
std::ostringstream os;
os << "a=fmtp:" << int(rtpPayloadType) << " ";
os << source->getAuxLine();
os << "\r\n";
auxLine = strdup(os.str().c_str());
}
return auxLine;
}
protected:
StreamReplicator* m_replicator;
};
// -----------------------------------------
// ServerMediaSubsession for Multicast
// -----------------------------------------
class MulticastServerMediaSubsession : public PassiveServerMediaSubsession , public BaseServerMediaSubsession
{
public:
static MulticastServerMediaSubsession* createNew(UsageEnvironment& env
, struct in_addr destinationAddress
, Port rtpPortNum, Port rtcpPortNum
, int ttl
, unsigned char rtpPayloadType
, StreamReplicator* replicator
, int format);
protected:
MulticastServerMediaSubsession(StreamReplicator* replicator, RTPSink* rtpSink, RTCPInstance* rtcpInstance)
: PassiveServerMediaSubsession(*rtpSink, rtcpInstance), BaseServerMediaSubsession(replicator), m_rtpSink(rtpSink) {};
virtual char const* sdpLines() ;
virtual char const* getAuxSDPLine(RTPSink* rtpSink,FramedSource* inputSource);
protected:
RTPSink* m_rtpSink;
std::string m_SDPLines;
};
// -----------------------------------------
// ServerMediaSubsession for Unicast
// -----------------------------------------
class UnicastServerMediaSubsession : public OnDemandServerMediaSubsession , public BaseServerMediaSubsession
{
public:
static UnicastServerMediaSubsession* createNew(UsageEnvironment& env, StreamReplicator* replicator, int format);
protected:
UnicastServerMediaSubsession(UsageEnvironment& env, StreamReplicator* replicator, int format)
: OnDemandServerMediaSubsession(env, False), BaseServerMediaSubsession(replicator), m_format(format) {};
virtual FramedSource* createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate);
virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource);
virtual char const* getAuxSDPLine(RTPSink* rtpSink,FramedSource* inputSource);
protected:
int m_format;
};
#endif

@ -1,24 +1,21 @@
/***************************************************************************************/
/* V4L2 RTSP streamer */
/* */
/* H264 capture using V4L2 */
/* RTSP using live555 */
/* */
/* NOTE : Configuration SPS/PPS need to be captured in one single frame */
/***************************************************************************************/
/* ---------------------------------------------------------------------------
** This software is in the public domain, furnished "as is", without technical
** support, and with no warranty, express or implied, as to its usefulness for
** any purpose.
**
** v4l2DeviceSource.cpp
**
** V4L2 source
**
** -------------------------------------------------------------------------*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <fcntl.h>
#include <errno.h>
#include <signal.h>
#include <linux/videodev2.h>
#include <iomanip>
#include <sstream>
// libv4l2
#include <linux/videodev2.h>
#include <libv4l2.h>
// live555
@ -27,7 +24,7 @@
#include <Base64.hh>
// project
#include "h264_v4l2_rtspserver.h"
#include "V4l2DeviceSource.h"
// ---------------------------------
// V4L2 FramedSource
@ -370,218 +367,3 @@ void V4L2DeviceSource::queueFrame(char * frame, int frameSize, const timeval &tv
envir().taskScheduler().triggerEvent(m_eventTriggerId, this);
}
// -----------------------------------------
// ServerMediaSubsession for Multicast
// -----------------------------------------
MulticastServerMediaSubsession* MulticastServerMediaSubsession::createNew(UsageEnvironment& env
, struct in_addr destinationAddress
, Port rtpPortNum, Port rtcpPortNum
, int ttl
, unsigned char rtpPayloadType
, StreamReplicator* replicator
, int format)
{
// Create a source
FramedSource* source = replicator->createStreamReplica();
FramedSource* videoSource = createSource(env, source, format);
// Create RTP/RTCP groupsock
Groupsock* rtpGroupsock = new Groupsock(env, destinationAddress, rtpPortNum, ttl);
Groupsock* rtcpGroupsock = new Groupsock(env, destinationAddress, rtcpPortNum, ttl);
// Create a RTP sink
RTPSink* videoSink = createSink(env, rtpGroupsock, rtpPayloadType, format);
// Create 'RTCP instance'
const unsigned maxCNAMElen = 100;
unsigned char CNAME[maxCNAMElen+1];
gethostname((char*)CNAME, maxCNAMElen);
CNAME[maxCNAMElen] = '\0';
RTCPInstance* rtcpInstance = RTCPInstance::createNew(env, rtcpGroupsock, 500, CNAME, videoSink, NULL);
// Start Playing the Sink
videoSink->startPlaying(*videoSource, NULL, NULL);
return new MulticastServerMediaSubsession(replicator, videoSink, rtcpInstance);
}
char const* MulticastServerMediaSubsession::sdpLines()
{
if (m_SDPLines.empty())
{
// Ugly workaround to give SPS/PPS that are get from the RTPSink
m_SDPLines.assign(PassiveServerMediaSubsession::sdpLines());
m_SDPLines.append(getAuxSDPLine(m_rtpSink,NULL));
}
return m_SDPLines.c_str();
}
char const* MulticastServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink,FramedSource* inputSource)
{
return this->getAuxLine(dynamic_cast<V4L2DeviceSource*>(m_replicator->inputSource()), rtpSink->rtpPayloadType());
}
// -----------------------------------------
// ServerMediaSubsession for Unicast
// -----------------------------------------
UnicastServerMediaSubsession* UnicastServerMediaSubsession::createNew(UsageEnvironment& env, StreamReplicator* replicator, int format)
{
return new UnicastServerMediaSubsession(env,replicator,format);
}
FramedSource* UnicastServerMediaSubsession::createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate)
{
FramedSource* source = m_replicator->createStreamReplica();
return createSource(envir(), source, m_format);
}
RTPSink* UnicastServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource)
{
return createSink(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, m_format);
}
char const* UnicastServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink,FramedSource* inputSource)
{
return this->getAuxLine(dynamic_cast<V4L2DeviceSource*>(m_replicator->inputSource()), rtpSink->rtpPayloadType());
}
// -----------------------------------------
// signal handler
// -----------------------------------------
char quit = 0;
void sighandler(int n)
{
printf("SIGINT\n");
quit =1;
}
// -----------------------------------------
// signal handler
// -----------------------------------------
void addSession(RTSPServer* rtspServer, const char* sessionName, ServerMediaSubsession *subSession)
{
UsageEnvironment& env(rtspServer->envir());
ServerMediaSession* sms = ServerMediaSession::createNew(env, sessionName);
sms->addSubsession(subSession);
rtspServer->addServerMediaSession(sms);
char* url = rtspServer->rtspURL(sms);
env << "Play this stream using the URL \"" << url << "\"\n";
delete[] url;
}
// -----------------------------------------
// entry point
// -----------------------------------------
int main(int argc, char** argv)
{
// default parameters
const char *dev_name = "/dev/video0";
int format = V4L2_PIX_FMT_H264;
int width = 640;
int height = 480;
int queueSize = 10;
int fps = 25;
unsigned short rtpPortNum = 20000;
unsigned short rtcpPortNum = rtpPortNum+1;
unsigned char ttl = 5;
struct in_addr destinationAddress;
unsigned short rtspPort = 8554;
unsigned short rtspOverHTTPPort = 8080;
bool multicast = false;
bool verbose = false;
std::string outputFile;
// decode parameters
int c = 0;
while ((c = getopt (argc, argv, "hW:H:Q:P:F:vO:mT:")) != -1)
{
switch (c)
{
case 'O': outputFile = optarg; break;
case 'v': verbose = true; break;
case 'm': multicast = true; break;
case 'W': width = atoi(optarg); break;
case 'H': height = atoi(optarg); break;
case 'Q': queueSize = atoi(optarg); break;
case 'P': rtspPort = atoi(optarg); break;
case 'T': rtspOverHTTPPort = atoi(optarg); break;
case 'F': fps = atoi(optarg); break;
case 'h':
{
std::cout << argv[0] << " [-v][-m][-P RTSP port][-P RTSP/HTTP port][-Q queueSize] [-W width] [-H height] [-F fps] [-O file] [device]" << std::endl;
std::cout << "\t -v : Verbose " << std::endl;
std::cout << "\t -Q length: Number of frame queue (default "<< queueSize << ")" << std::endl;
std::cout << "\t -O file : Dump capture to a file" << std::endl;
std::cout << "\t RTSP options :" << std::endl;
std::cout << "\t -m : Enable multicast output" << std::endl;
std::cout << "\t -P port : RTSP port (default "<< rtspPort << ")" << std::endl;
std::cout << "\t -H port : RTSP over HTTP port (default "<< rtspOverHTTPPort << ")" << std::endl;
std::cout << "\t V4L2 options :" << std::endl;
std::cout << "\t -F fps : V4L2 capture framerate (default "<< fps << ")" << std::endl;
std::cout << "\t -W width : V4L2 capture width (default "<< width << ")" << std::endl;
std::cout << "\t -H height: V4L2 capture height (default "<< height << ")" << std::endl;
std::cout << "\t device : V4L2 capture device (default "<< dev_name << ")" << std::endl;
exit(0);
}
}
}
if (optind<argc)
{
dev_name = argv[optind];
}
//
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
RTSPServer* rtspServer = RTSPServer::createNew(*env, rtspPort);
if (rtspServer == NULL)
{
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
}
else
{
// set http tunneling
rtspServer->setUpTunnelingOverHTTP(rtspOverHTTPPort);
// Init capture
*env << "Create V4L2 Source..." << dev_name << "\n";
V4L2DeviceSource::V4L2DeviceParameters param(dev_name,format,queueSize,width,height,fps,verbose,outputFile);
V4L2DeviceSource* videoES = V4L2DeviceSource::createNew(*env, param);
if (videoES == NULL)
{
*env << "Unable to create source \n";
}
else
{
destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
OutPacketBuffer::maxSize = videoES->getBufferSize();
StreamReplicator* replicator = StreamReplicator::createNew(*env, videoES, false);
// Create Server Multicast Session
if (multicast)
{
addSession(rtspServer, "multicast", MulticastServerMediaSubsession::createNew(*env,destinationAddress, Port(rtpPortNum), Port(rtcpPortNum), ttl, 96, replicator,format));
}
// Create Server Unicast Session
addSession(rtspServer, "unicast", UnicastServerMediaSubsession::createNew(*env,replicator,format));
// main loop
signal(SIGINT,sighandler);
env->taskScheduler().doEventLoop(&quit);
*env << "Exiting..\n";
}
Medium::close(videoES);
Medium::close(rtspServer);
}
env->reclaim();
delete scheduler;
return 0;
}

@ -0,0 +1,127 @@
/* ---------------------------------------------------------------------------
** This software is in the public domain, furnished "as is", without technical
** support, and with no warranty, express or implied, as to its usefulness for
** any purpose.
**
** v4l2DeviceSource.h
**
** V4L2 source
**
** -------------------------------------------------------------------------*/
#ifndef V4L2_DEVICE_SOURCE
#define V4L2_DEVICE_SOURCE
#include <string>
#include <list>
#include <iostream>
// live555
#include <liveMedia.hh>
// ---------------------------------
// V4L2 FramedSource
// ---------------------------------
const char marker[] = {0,0,0,1};
class V4L2DeviceSource: public FramedSource
{
public:
// ---------------------------------
// V4L2 Capture parameters
// ---------------------------------
struct V4L2DeviceParameters
{
V4L2DeviceParameters(const char* devname, int format, int queueSize, int width, int height, int fps, bool verbose,const std::string & outputFile) :
m_devName(devname), m_format(format), m_queueSize(queueSize), m_width(width), m_height(height), m_fps(fps), m_verbose(verbose), m_outputFIle(outputFile) {};
std::string m_devName;
int m_width;
int m_height;
int m_format;
int m_queueSize;
int m_fps;
bool m_verbose;
std::string m_outputFIle;
};
// ---------------------------------
// Captured frame
// ---------------------------------
struct Frame
{
Frame(char* buffer, int size, timeval timestamp) : m_buffer(buffer), m_size(size), m_timestamp(timestamp) {};
~Frame() { delete m_buffer; };
char* m_buffer;
int m_size;
timeval m_timestamp;
};
// ---------------------------------
// compute FPS
// ---------------------------------
class Fps
{
public:
Fps(const std::string & msg) : m_fps(0), m_fps_sec(0), m_msg(msg) {};
public:
int notify(int tv_sec)
{
m_fps++;
if (tv_sec != m_fps_sec)
{
std::cout << m_msg << "tv_sec:" << tv_sec << " fps:" << m_fps <<"\n";
m_fps_sec = tv_sec;
m_fps = 0;
}
return m_fps;
}
protected:
int m_fps;
int m_fps_sec;
const std::string m_msg;
};
public:
static V4L2DeviceSource* createNew(UsageEnvironment& env, V4L2DeviceParameters params);
int getBufferSize() { return m_bufferSize; };
std::string getAuxLine() { return m_auxLine; };
protected:
V4L2DeviceSource(UsageEnvironment& env, V4L2DeviceParameters params);
virtual ~V4L2DeviceSource();
protected:
bool init();
int initdevice(const char *dev_name);
int checkCapabilities(int fd);
int configureFormat(int fd);
int configureParam(int fd);
int xioctl(int fd, int request, void *arg);
virtual void doGetNextFrame();
static void deliverFrameStub(void* clientData) {((V4L2DeviceSource*) clientData)->deliverFrame();};
void deliverFrame();
static void incomingPacketHandlerStub(void* clientData, int mask) { ((V4L2DeviceSource*) clientData)->getNextFrame(); };
void getNextFrame();
bool processConfigrationFrame(char * frame, int frameSize);
void processFrame(char * frame, int &frameSize, const timeval &ref);
void queueFrame(char * frame, int frameSize, const timeval &tv);
private:
V4L2DeviceParameters m_params;
int m_fd;
int m_bufferSize;
std::list<Frame*> m_captureQueue;
Fps m_in;
Fps m_out;
EventTriggerId m_eventTriggerId;
FILE* m_outfile;
std::string m_auxLine;
};
#endif

@ -1,217 +0,0 @@
/***************************************************************************************/
/* V4L2 RTSP streamer */
/* */
/* H264 capture using V4L2 */
/* RTSP using live555 */
/* */
/* NOTE : Configuration SPS/PPS need to be captured in one single frame */
/***************************************************************************************/
#ifndef H264_V4L2_RTSP_SERVER
#define H264_V4L2_RTSP_SERVER
#include <string>
#include <list>
#include <iostream>
// live555
#include <liveMedia.hh>
// ---------------------------------
// V4L2 FramedSource
// ---------------------------------
const char marker[] = {0,0,0,1};
class V4L2DeviceSource: public FramedSource
{
public:
// ---------------------------------
// V4L2 Capture parameters
// ---------------------------------
struct V4L2DeviceParameters
{
V4L2DeviceParameters(const char* devname, int format, int queueSize, int width, int height, int fps, bool verbose,const std::string & outputFile) :
m_devName(devname), m_format(format), m_queueSize(queueSize), m_width(width), m_height(height), m_fps(fps), m_verbose(verbose), m_outputFIle(outputFile) {};
std::string m_devName;
int m_width;
int m_height;
int m_format;
int m_queueSize;
int m_fps;
bool m_verbose;
std::string m_outputFIle;
};
// ---------------------------------
// Captured frame
// ---------------------------------
struct Frame
{
Frame(char* buffer, int size, timeval timestamp) : m_buffer(buffer), m_size(size), m_timestamp(timestamp) {};
~Frame() { delete m_buffer; };
char* m_buffer;
int m_size;
timeval m_timestamp;
};
// ---------------------------------
// compute FPS
// ---------------------------------
class Fps
{
public:
Fps(const std::string & msg) : m_fps(0), m_fps_sec(0), m_msg(msg) {};
public:
int notify(int tv_sec)
{
m_fps++;
if (tv_sec != m_fps_sec)
{
std::cout << m_msg << "tv_sec:" << tv_sec << " fps:" << m_fps <<"\n";
m_fps_sec = tv_sec;
m_fps = 0;
}
return m_fps;
}
protected:
int m_fps;
int m_fps_sec;
const std::string m_msg;
};
public:
static V4L2DeviceSource* createNew(UsageEnvironment& env, V4L2DeviceParameters params);
int getBufferSize() { return m_bufferSize; };
std::string getAuxLine() { return m_auxLine; };
protected:
V4L2DeviceSource(UsageEnvironment& env, V4L2DeviceParameters params);
virtual ~V4L2DeviceSource();
protected:
bool init();
int initdevice(const char *dev_name);
int checkCapabilities(int fd);
int configureFormat(int fd);
int configureParam(int fd);
int xioctl(int fd, int request, void *arg);
virtual void doGetNextFrame();
static void deliverFrameStub(void* clientData) {((V4L2DeviceSource*) clientData)->deliverFrame();};
void deliverFrame();
static void incomingPacketHandlerStub(void* clientData, int mask) { ((V4L2DeviceSource*) clientData)->getNextFrame(); };
void getNextFrame();
bool processConfigrationFrame(char * frame, int frameSize);
void processFrame(char * frame, int &frameSize, const timeval &ref);
void queueFrame(char * frame, int frameSize, const timeval &tv);
private:
V4L2DeviceParameters m_params;
int m_fd;
int m_bufferSize;
std::list<Frame*> m_captureQueue;
Fps m_in;
Fps m_out;
EventTriggerId m_eventTriggerId;
FILE* m_outfile;
std::string m_auxLine;
};
// ---------------------------------
// BaseServerMediaSubsession
// ---------------------------------
class BaseServerMediaSubsession
{
public:
BaseServerMediaSubsession(StreamReplicator* replicator): m_replicator(replicator) {};
public:
static FramedSource* createSource(UsageEnvironment& env, FramedSource * videoES, int format)
{
FramedSource* source = NULL;
switch (format)
{
case V4L2_PIX_FMT_H264 : source = H264VideoStreamDiscreteFramer::createNew(env, videoES); break;
}
return source;
}
static RTPSink* createSink(UsageEnvironment& env, Groupsock * rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, int format)
{
RTPSink* videoSink = NULL;
switch (format)
{
case V4L2_PIX_FMT_H264 : videoSink = H264VideoRTPSink::createNew(env, rtpGroupsock,rtpPayloadTypeIfDynamic); break;
}
return videoSink;
}
char const* getAuxLine(V4L2DeviceSource* source,unsigned char rtpPayloadType)
{
const char* auxLine = NULL;
if (source)
{
std::ostringstream os;
os << "a=fmtp:" << int(rtpPayloadType) << " ";
os << source->getAuxLine();
os << "\r\n";
auxLine = strdup(os.str().c_str());
}
return auxLine;
}
protected:
StreamReplicator* m_replicator;
};
// -----------------------------------------
// ServerMediaSubsession for Multicast
// -----------------------------------------
class MulticastServerMediaSubsession : public PassiveServerMediaSubsession , public BaseServerMediaSubsession
{
public:
static MulticastServerMediaSubsession* createNew(UsageEnvironment& env
, struct in_addr destinationAddress
, Port rtpPortNum, Port rtcpPortNum
, int ttl
, unsigned char rtpPayloadType
, StreamReplicator* replicator
, int format);
protected:
MulticastServerMediaSubsession(StreamReplicator* replicator, RTPSink* rtpSink, RTCPInstance* rtcpInstance)
: PassiveServerMediaSubsession(*rtpSink, rtcpInstance), BaseServerMediaSubsession(replicator), m_rtpSink(rtpSink) {};
virtual char const* sdpLines() ;
virtual char const* getAuxSDPLine(RTPSink* rtpSink,FramedSource* inputSource);
protected:
RTPSink* m_rtpSink;
std::string m_SDPLines;
};
// -----------------------------------------
// ServerMediaSubsession for Unicast
// -----------------------------------------
class UnicastServerMediaSubsession : public OnDemandServerMediaSubsession , public BaseServerMediaSubsession
{
public:
static UnicastServerMediaSubsession* createNew(UsageEnvironment& env, StreamReplicator* replicator, int format);
protected:
UnicastServerMediaSubsession(UsageEnvironment& env, StreamReplicator* replicator, int format)
: OnDemandServerMediaSubsession(env, False), BaseServerMediaSubsession(replicator), m_format(format) {};
virtual FramedSource* createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate);
virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource);
virtual char const* getAuxSDPLine(RTPSink* rtpSink,FramedSource* inputSource);
protected:
int m_format;
};
#endif

@ -0,0 +1,175 @@
/* ---------------------------------------------------------------------------
** This software is in the public domain, furnished "as is", without technical
** support, and with no warranty, express or implied, as to its usefulness for
** any purpose.
**
** main.cpp
**
** V4L2 RTSP streamer
**
** H264 capture using V4L2
** RTSP using live555
**
** NOTE : Configuration SPS/PPS need to be captured in one single frame
** -------------------------------------------------------------------------*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <errno.h>
#include <signal.h>
#include <sstream>
// libv4l2
#include <linux/videodev2.h>
#include <libv4l2.h>
// live555
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
#include <Base64.hh>
// project
#include "ServerMediaSubsession.h"
// -----------------------------------------
// signal handler
// -----------------------------------------
char quit = 0;
void sighandler(int n)
{
printf("SIGINT\n");
quit =1;
}
// -----------------------------------------
// signal handler
// -----------------------------------------
void addSession(RTSPServer* rtspServer, const char* sessionName, ServerMediaSubsession *subSession)
{
UsageEnvironment& env(rtspServer->envir());
ServerMediaSession* sms = ServerMediaSession::createNew(env, sessionName);
sms->addSubsession(subSession);
rtspServer->addServerMediaSession(sms);
char* url = rtspServer->rtspURL(sms);
env << "Play this stream using the URL \"" << url << "\"\n";
delete[] url;
}
// -----------------------------------------
// entry point
// -----------------------------------------
int main(int argc, char** argv)
{
// default parameters
const char *dev_name = "/dev/video0";
int format = V4L2_PIX_FMT_H264;
int width = 640;
int height = 480;
int queueSize = 10;
int fps = 25;
unsigned short rtpPortNum = 20000;
unsigned short rtcpPortNum = rtpPortNum+1;
unsigned char ttl = 5;
struct in_addr destinationAddress;
unsigned short rtspPort = 8554;
unsigned short rtspOverHTTPPort = 8080;
bool multicast = false;
bool verbose = false;
std::string outputFile;
// decode parameters
int c = 0;
while ((c = getopt (argc, argv, "hW:H:Q:P:F:vO:mT:")) != -1)
{
switch (c)
{
case 'O': outputFile = optarg; break;
case 'v': verbose = true; break;
case 'm': multicast = true; break;
case 'W': width = atoi(optarg); break;
case 'H': height = atoi(optarg); break;
case 'Q': queueSize = atoi(optarg); break;
case 'P': rtspPort = atoi(optarg); break;
case 'T': rtspOverHTTPPort = atoi(optarg); break;
case 'F': fps = atoi(optarg); break;
case 'h':
{
std::cout << argv[0] << " [-v][-m][-P RTSP port][-P RTSP/HTTP port][-Q queueSize] [-W width] [-H height] [-F fps] [-O file] [device]" << std::endl;
std::cout << "\t -v : Verbose " << std::endl;
std::cout << "\t -Q length: Number of frame queue (default "<< queueSize << ")" << std::endl;
std::cout << "\t -O file : Dump capture to a file" << std::endl;
std::cout << "\t RTSP options :" << std::endl;
std::cout << "\t -m : Enable multicast output" << std::endl;
std::cout << "\t -P port : RTSP port (default "<< rtspPort << ")" << std::endl;
std::cout << "\t -H port : RTSP over HTTP port (default "<< rtspOverHTTPPort << ")" << std::endl;
std::cout << "\t V4L2 options :" << std::endl;
std::cout << "\t -F fps : V4L2 capture framerate (default "<< fps << ")" << std::endl;
std::cout << "\t -W width : V4L2 capture width (default "<< width << ")" << std::endl;
std::cout << "\t -H height: V4L2 capture height (default "<< height << ")" << std::endl;
std::cout << "\t device : V4L2 capture device (default "<< dev_name << ")" << std::endl;
exit(0);
}
}
}
if (optind<argc)
{
dev_name = argv[optind];
}
//
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
RTSPServer* rtspServer = RTSPServer::createNew(*env, rtspPort);
if (rtspServer == NULL)
{
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
}
else
{
// set http tunneling
rtspServer->setUpTunnelingOverHTTP(rtspOverHTTPPort);
// Init capture
*env << "Create V4L2 Source..." << dev_name << "\n";
V4L2DeviceSource::V4L2DeviceParameters param(dev_name,format,queueSize,width,height,fps,verbose,outputFile);
V4L2DeviceSource* videoES = V4L2DeviceSource::createNew(*env, param);
if (videoES == NULL)
{
*env << "Unable to create source for device " << dev_name << "\n";
}
else
{
destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
OutPacketBuffer::maxSize = videoES->getBufferSize();
StreamReplicator* replicator = StreamReplicator::createNew(*env, videoES, false);
// Create Server Multicast Session
if (multicast)
{
addSession(rtspServer, "multicast", MulticastServerMediaSubsession::createNew(*env,destinationAddress, Port(rtpPortNum), Port(rtcpPortNum), ttl, 96, replicator,format));
}
// Create Server Unicast Session
addSession(rtspServer, "unicast", UnicastServerMediaSubsession::createNew(*env,replicator,format));
// main loop
signal(SIGINT,sighandler);
env->taskScheduler().doEventLoop(&quit);
*env << "Exiting..\n";
}
Medium::close(videoES);
Medium::close(rtspServer);
}
env->reclaim();
delete scheduler;
return 0;
}
Loading…
Cancel
Save