extract H264 stuff from V4L2DeviceSource

pull/33/head
Michel Promonet 10 years ago
parent 1c2aaec031
commit 2c4c64c0bb

@ -0,0 +1,42 @@
/* ---------------------------------------------------------------------------
** This software is in the public domain, furnished "as is", without technical
** support, and with no warranty, express or implied, as to its usefulness for
** any purpose.
**
** H264_V4l2DeviceSource.h
**
** H264 V4L2 live555 source
**
** -------------------------------------------------------------------------*/
#ifndef H264_V4L2_DEVICE_SOURCE
#define H264_V4L2_DEVICE_SOURCE
// project
#include "V4l2DeviceSource.h"
// ---------------------------------
// H264 V4L2 FramedSource
// ---------------------------------
const char H264marker[] = {0,0,0,1};
class H264_V4L2DeviceSource : public V4L2DeviceSource
{
public:
static H264_V4L2DeviceSource* createNew(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, int verbose, bool useThread) ;
protected:
H264_V4L2DeviceSource(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, int verbose, bool useThread);
virtual ~H264_V4L2DeviceSource();
unsigned char* extractFrame(unsigned char* frame, size_t size, size_t& outsize);
// overide V4L2DeviceSource
virtual std::list< std::pair<unsigned char*,size_t> > splitFrames(unsigned char* frame, unsigned frameSize);
private:
std::string m_sps;
std::string m_pps;
};
#endif

@ -16,7 +16,8 @@
// live555
#include <liveMedia.hh>
class H264Filter;
// forward declaration
class V4L2DeviceSource;
// ---------------------------------
// BaseServerMediaSubsession
@ -29,7 +30,7 @@ class BaseServerMediaSubsession
public:
static FramedSource* createSource(UsageEnvironment& env, FramedSource * videoES, int format);
static RTPSink* createSink(UsageEnvironment& env, Groupsock * rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, int format);
char const* getAuxLine(H264Filter* source,unsigned char rtpPayloadType);
char const* getAuxLine(V4L2DeviceSource* source,unsigned char rtpPayloadType);
protected:
StreamReplicator* m_replicator;

@ -24,90 +24,10 @@
// project
#include "V4l2Capture.h"
// ---------------------------------
// H264 parsing
// ---------------------------------
const char H264marker[] = {0,0,0,1};
class H264Filter
{
public:
H264Filter() {};
virtual ~H264Filter() {};
std::string getAuxLine() { return m_auxLine; };
std::list< std::pair<unsigned char*,size_t> > splitFrames(unsigned char* frame, unsigned frameSize)
{
std::list< std::pair<unsigned char*,size_t> > frameList;
size_t size = 0;
unsigned char* buffer = this->extractFrame(frame, frameSize, size);
while (buffer != NULL)
{
frameList.push_back(std::make_pair<unsigned char*,size_t>(buffer, size));
switch (buffer[0]&0x1F)
{
case 7: std::cout << "SPS\n"; m_sps.assign((char*)buffer,size); break;
case 8: std::cout << "PPS\n"; m_pps.assign((char*)buffer,size); break;
default: break;
}
if (m_auxLine.empty() && !m_sps.empty() && !m_pps.empty())
{
u_int32_t profile_level_id = 0;
if (m_sps.size() >= 4) profile_level_id = (m_sps[1]<<16)|(m_sps[2]<<8)|m_sps[3];
char* sps_base64 = base64Encode(m_sps.c_str(), m_sps.size());
char* pps_base64 = base64Encode(m_pps.c_str(), m_pps.size());
std::ostringstream os;
os << "profile-level-id=" << std::hex << std::setw(6) << profile_level_id;
os << ";sprop-parameter-sets=" << sps_base64 <<"," << pps_base64;
m_auxLine.assign(os.str());
free(sps_base64);
free(pps_base64);
std::cout << m_auxLine.c_str() << "\n";
}
frameSize -= size;
buffer = this->extractFrame(&buffer[size], frameSize, size);
}
return frameList;
}
private:
unsigned char* extractFrame(unsigned char* frame, size_t size, size_t& outsize)
{
unsigned char * outFrame = NULL;
outsize = 0;
if ( (size>= sizeof(H264marker)) && (memcmp(frame,H264marker,sizeof(H264marker)) == 0) )
{
outFrame = &frame[sizeof(H264marker)];
outsize = size - sizeof(H264marker);
for (int i=0; i+sizeof(H264marker) < size; ++i)
{
if (memcmp(&outFrame[i],H264marker,sizeof(H264marker)) == 0)
{
outsize = i;
break;
}
}
}
return outFrame;
}
private:
std::string m_auxLine;
std::string m_sps;
std::string m_pps;
};
// ---------------------------------
// V4L2 FramedSource
// ---------------------------------
class V4L2DeviceSource: public FramedSource, public H264Filter
class V4L2DeviceSource: public FramedSource
{
public:
// ---------------------------------
@ -145,6 +65,7 @@ class V4L2DeviceSource: public FramedSource, public H264Filter
public:
static V4L2DeviceSource* createNew(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, int verbose, bool useThread) ;
std::string getAuxLine() { return m_auxLine; };
protected:
V4L2DeviceSource(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, int verbose, bool useThread);
@ -160,6 +81,9 @@ class V4L2DeviceSource: public FramedSource, public H264Filter
void processFrame(char * frame, int frameSize, const timeval &ref);
void queueFrame(char * frame, int frameSize, const timeval &tv);
// split packet in frames
virtual std::list< std::pair<unsigned char*,size_t> > splitFrames(unsigned char* frame, unsigned frameSize);
// overide FramedSource
virtual void doGetNextFrame();
virtual void doStopGettingFrames();
@ -175,6 +99,7 @@ class V4L2DeviceSource: public FramedSource, public H264Filter
unsigned int m_queueSize;
int m_verbose;
pthread_t m_thid;
std::string m_auxLine;
};
#endif

@ -0,0 +1,105 @@
/* ---------------------------------------------------------------------------
** This software is in the public domain, furnished "as is", without technical
** support, and with no warranty, express or implied, as to its usefulness for
** any purpose.
**
** H264_V4l2DeviceSource.cpp
**
** H264 V4L2 Live555 source
**
** -------------------------------------------------------------------------*/
#include <sstream>
// live555
#include <Base64.hh>
// project
#include "H264_V4l2DeviceSource.h"
// ---------------------------------
// H264 V4L2 FramedSource
// ---------------------------------
H264_V4L2DeviceSource* H264_V4L2DeviceSource::createNew(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, int verbose, bool useThread)
{
H264_V4L2DeviceSource* source = NULL;
if (device)
{
source = new H264_V4L2DeviceSource(env, params, device, outputFd, queueSize, verbose, useThread);
}
return source;
}
// Constructor
H264_V4L2DeviceSource::H264_V4L2DeviceSource(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, int verbose, bool useThread)
: V4L2DeviceSource(env, params, device, outputFd, queueSize, verbose,useThread)
{
}
// Destructor
H264_V4L2DeviceSource::~H264_V4L2DeviceSource()
{
}
// split packet in frames
std::list< std::pair<unsigned char*,size_t> > H264_V4L2DeviceSource::splitFrames(unsigned char* frame, unsigned frameSize)
{
std::list< std::pair<unsigned char*,size_t> > frameList;
size_t size = 0;
unsigned char* buffer = this->extractFrame(frame, frameSize, size);
while (buffer != NULL)
{
frameList.push_back(std::make_pair<unsigned char*,size_t>(buffer, size));
switch (buffer[0]&0x1F)
{
case 7: std::cout << "SPS\n"; m_sps.assign((char*)buffer,size); break;
case 8: std::cout << "PPS\n"; m_pps.assign((char*)buffer,size); break;
default: break;
}
if (m_auxLine.empty() && !m_sps.empty() && !m_pps.empty())
{
u_int32_t profile_level_id = 0;
if (m_sps.size() >= 4) profile_level_id = (m_sps[1]<<16)|(m_sps[2]<<8)|m_sps[3];
char* sps_base64 = base64Encode(m_sps.c_str(), m_sps.size());
char* pps_base64 = base64Encode(m_pps.c_str(), m_pps.size());
std::ostringstream os;
os << "profile-level-id=" << std::hex << std::setw(6) << profile_level_id;
os << ";sprop-parameter-sets=" << sps_base64 <<"," << pps_base64;
m_auxLine.assign(os.str());
free(sps_base64);
free(pps_base64);
std::cout << m_auxLine.c_str() << "\n";
}
frameSize -= size;
buffer = this->extractFrame(&buffer[size], frameSize, size);
}
return frameList;
}
// extract a frame
unsigned char* H264_V4L2DeviceSource::extractFrame(unsigned char* frame, size_t size, size_t& outsize)
{
unsigned char * outFrame = NULL;
outsize = 0;
if ( (size>= sizeof(H264marker)) && (memcmp(frame,H264marker,sizeof(H264marker)) == 0) )
{
outFrame = &frame[sizeof(H264marker)];
outsize = size - sizeof(H264marker);
for (int i=0; i+sizeof(H264marker) < size; ++i)
{
if (memcmp(&outFrame[i],H264marker,sizeof(H264marker)) == 0)
{
outsize = i;
break;
}
}
}
return outFrame;
}

@ -44,7 +44,7 @@ RTPSink* BaseServerMediaSubsession::createSink(UsageEnvironment& env, Groupsock
return videoSink;
}
char const* BaseServerMediaSubsession::getAuxLine(H264Filter* source,unsigned char rtpPayloadType)
char const* BaseServerMediaSubsession::getAuxLine(V4L2DeviceSource* source,unsigned char rtpPayloadType)
{
const char* auxLine = NULL;
if (source)
@ -105,7 +105,7 @@ char const* MulticastServerMediaSubsession::sdpLines()
char const* MulticastServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink,FramedSource* inputSource)
{
return this->getAuxLine(dynamic_cast<H264Filter*>(m_replicator->inputSource()), rtpSink->rtpPayloadType());
return this->getAuxLine(dynamic_cast<V4L2DeviceSource*>(m_replicator->inputSource()), rtpSink->rtpPayloadType());
}
// -----------------------------------------
@ -129,5 +129,5 @@ RTPSink* UnicastServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock,
char const* UnicastServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink,FramedSource* inputSource)
{
return this->getAuxLine(dynamic_cast<H264Filter*>(m_replicator->inputSource()), rtpSink->rtpPayloadType());
return this->getAuxLine(dynamic_cast<V4L2DeviceSource*>(m_replicator->inputSource()), rtpSink->rtpPayloadType());
}

@ -17,11 +17,6 @@
#include <linux/videodev2.h>
#include <libv4l2.h>
// live555
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
#include <Base64.hh>
// project
#include "V4l2DeviceSource.h"
@ -249,7 +244,8 @@ void V4L2DeviceSource::processFrame(char * frame, int frameSize, const timeval &
frameList.pop_front();
}
}
// post a frame to fifo
void V4L2DeviceSource::queueFrame(char * frame, int frameSize, const timeval &tv)
{
while (m_captureQueue.size() >= m_queueSize)
@ -267,4 +263,16 @@ void V4L2DeviceSource::queueFrame(char * frame, int frameSize, const timeval &tv
envir().taskScheduler().triggerEvent(m_eventTriggerId, this);
}
// split packet in frames
std::list< std::pair<unsigned char*,size_t> > V4L2DeviceSource::splitFrames(unsigned char* frame, unsigned frameSize)
{
std::list< std::pair<unsigned char*,size_t> > frameList;
if (frame != NULL)
{
frameList.push_back(std::make_pair<unsigned char*,size_t>(frame, frameSize));
}
return frameList;
}

@ -37,7 +37,7 @@
#include "V4l2ReadCapture.h"
#include "V4l2MmapCapture.h"
#include "V4l2DeviceSource.h"
#include "H264_V4l2DeviceSource.h"
#include "ServerMediaSubsession.h"
// -----------------------------------------
@ -244,20 +244,21 @@ int main(int argc, char** argv)
int outputFd = createOutput(outputFile, videoCapture->getFd());
LOG(NOTICE) << "Start V4L2 Capture..." << dev_name;
videoCapture->captureStart();
V4L2DeviceSource* videoES = V4L2DeviceSource::createNew(*env, param, videoCapture, outputFd, queueSize, verbose, useThread);
V4L2DeviceSource* videoES = H264_V4L2DeviceSource::createNew(*env, param, videoCapture, outputFd, queueSize, verbose, useThread);
if (videoES == NULL)
{
LOG(FATAL) << "Unable to create source for device " << dev_name;
}
else
{
destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
OutPacketBuffer::maxSize = videoCapture->getBufferSize();
StreamReplicator* replicator = StreamReplicator::createNew(*env, videoES, false);
// Create Server Multicast Session
if (multicast)
{
destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
LOG(NOTICE) << "Mutlicast address " << inet_ntoa(destinationAddress);
addSession(rtspServer, murl.c_str(), MulticastServerMediaSubsession::createNew(*env,destinationAddress, Port(rtpPortNum), Port(rtcpPortNum), ttl, replicator,format));
}

Loading…
Cancel
Save