mirror of
https://github.com/mpromonet/v4l2rtspserver
synced 2024-11-02 03:40:13 +00:00
repeat config and add a option to disable this. set presentationTime with capture time (then SPS/PPS/IDR share same timestamp)
This commit is contained in:
parent
1f89a9d1a5
commit
3c83669169
@ -57,11 +57,12 @@ Usage
|
||||
-Q length: Number of frame queue (default 10)
|
||||
-O output: Copy captured frame to a file or a V4L2 device
|
||||
RTSP options :
|
||||
-u url : unicast url (default unicast)
|
||||
-m url : multicast url (default multicast)
|
||||
-M addr : multicast group (default is a random address)
|
||||
-P port : RTSP port (default 8554)
|
||||
-H port : RTSP over HTTP port (default 0)
|
||||
-T port : RTSP over HTTP port (default 0)
|
||||
-u url : unicast url (default unicast)
|
||||
-m url : multicast url (default multicast)
|
||||
-M addr : multicast group (default is a random address)
|
||||
-c : don't repeat config (default repeat config before IDR frame)
|
||||
V4L2 options :
|
||||
-r : V4L2 capture using read interface (default use memory mapped buffers)
|
||||
-s : V4L2 capture using live555 mainloop (default use a separated reading thread)
|
||||
|
@ -23,10 +23,10 @@ const char H264marker[] = {0,0,0,1};
|
||||
class H264_V4L2DeviceSource : public V4L2DeviceSource
|
||||
{
|
||||
public:
|
||||
static H264_V4L2DeviceSource* createNew(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, bool useThread) ;
|
||||
static H264_V4L2DeviceSource* createNew(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig) ;
|
||||
|
||||
protected:
|
||||
H264_V4L2DeviceSource(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, bool useThread);
|
||||
H264_V4L2DeviceSource(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig);
|
||||
virtual ~H264_V4L2DeviceSource();
|
||||
|
||||
unsigned char* extractFrame(unsigned char* frame, size_t& size, size_t& outsize);
|
||||
@ -37,6 +37,7 @@ class H264_V4L2DeviceSource : public V4L2DeviceSource
|
||||
private:
|
||||
std::string m_sps;
|
||||
std::string m_pps;
|
||||
bool m_repeatConfig;
|
||||
};
|
||||
|
||||
#endif
|
||||
|
@ -21,19 +21,19 @@
|
||||
// ---------------------------------
|
||||
// H264 V4L2 FramedSource
|
||||
// ---------------------------------
|
||||
H264_V4L2DeviceSource* H264_V4L2DeviceSource::createNew(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, bool useThread)
|
||||
H264_V4L2DeviceSource* H264_V4L2DeviceSource::createNew(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig)
|
||||
{
|
||||
H264_V4L2DeviceSource* source = NULL;
|
||||
if (device)
|
||||
{
|
||||
source = new H264_V4L2DeviceSource(env, params, device, outputFd, queueSize, useThread);
|
||||
source = new H264_V4L2DeviceSource(env, params, device, outputFd, queueSize, useThread, repeatConfig);
|
||||
}
|
||||
return source;
|
||||
}
|
||||
|
||||
// Constructor
|
||||
H264_V4L2DeviceSource::H264_V4L2DeviceSource(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, bool useThread)
|
||||
: V4L2DeviceSource(env, params, device, outputFd, queueSize,useThread)
|
||||
H264_V4L2DeviceSource::H264_V4L2DeviceSource(UsageEnvironment& env, V4L2DeviceParameters params, V4l2Capture * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig)
|
||||
: V4L2DeviceSource(env, params, device, outputFd, queueSize,useThread), m_repeatConfig(repeatConfig)
|
||||
{
|
||||
}
|
||||
|
||||
@ -52,11 +52,17 @@ std::list< std::pair<unsigned char*,size_t> > H264_V4L2DeviceSource::splitFrames
|
||||
unsigned char* buffer = this->extractFrame(frame, bufSize, size);
|
||||
while (buffer != NULL)
|
||||
{
|
||||
frameList.push_back(std::make_pair<unsigned char*,size_t>(buffer, size));
|
||||
switch (buffer[0]&0x1F)
|
||||
{
|
||||
case 7: LOG(INFO) << "SPS size:" << size; m_sps.assign((char*)buffer,size); break;
|
||||
case 8: LOG(INFO) << "PPS size:" << size; m_pps.assign((char*)buffer,size); break;
|
||||
case 5: LOG(INFO) << "IDR size:" << size;
|
||||
if (m_repeatConfig && !m_sps.empty() && !m_pps.empty())
|
||||
{
|
||||
frameList.push_back(std::make_pair<unsigned char*,size_t>((unsigned char*)m_sps.c_str(), m_sps.size()));
|
||||
frameList.push_back(std::make_pair<unsigned char*,size_t>((unsigned char*)m_pps.c_str(), m_pps.size()));
|
||||
}
|
||||
break;
|
||||
default: break;
|
||||
}
|
||||
|
||||
@ -77,6 +83,7 @@ std::list< std::pair<unsigned char*,size_t> > H264_V4L2DeviceSource::splitFrames
|
||||
free(pps_base64);
|
||||
LOG(NOTICE) << m_auxLine;
|
||||
}
|
||||
frameList.push_back(std::make_pair<unsigned char*,size_t>(buffer, size));
|
||||
|
||||
buffer = this->extractFrame(&buffer[size], bufSize, size);
|
||||
}
|
||||
|
@ -151,11 +151,12 @@ void V4L2DeviceSource::deliverFrame()
|
||||
}
|
||||
else
|
||||
{
|
||||
gettimeofday(&fPresentationTime, NULL);
|
||||
timeval curTime;
|
||||
gettimeofday(&curTime, NULL);
|
||||
Frame * frame = m_captureQueue.front();
|
||||
m_captureQueue.pop_front();
|
||||
|
||||
m_out.notify(fPresentationTime.tv_sec, frame->m_size);
|
||||
m_out.notify(curTime.tv_sec, frame->m_size);
|
||||
if (frame->m_size > fMaxSize)
|
||||
{
|
||||
fFrameSize = fMaxSize;
|
||||
@ -166,10 +167,11 @@ void V4L2DeviceSource::deliverFrame()
|
||||
fFrameSize = frame->m_size;
|
||||
}
|
||||
timeval diff;
|
||||
timersub(&fPresentationTime,&(frame->m_timestamp),&diff);
|
||||
timersub(&curTime,&(frame->m_timestamp),&diff);
|
||||
|
||||
LOG(DEBUG) << "deliverFrame\ttimestamp:" << fPresentationTime.tv_sec << "." << fPresentationTime.tv_usec << "\tsize:" << fFrameSize <<"\tdiff:" << (diff.tv_sec*1000+diff.tv_usec/1000) << "ms\tqueue:" << m_captureQueue.size();
|
||||
LOG(DEBUG) << "deliverFrame\ttimestamp:" << curTime.tv_sec << "." << curTime.tv_usec << "\tsize:" << fFrameSize <<"\tdiff:" << (diff.tv_sec*1000+diff.tv_usec/1000) << "ms\tqueue:" << m_captureQueue.size();
|
||||
|
||||
fPresentationTime = frame->m_timestamp;
|
||||
memcpy(fTo, frame->m_buffer, fFrameSize);
|
||||
delete frame;
|
||||
}
|
||||
|
27
src/main.cpp
27
src/main.cpp
@ -165,26 +165,30 @@ int main(int argc, char** argv)
|
||||
std::string murl = "multicast";
|
||||
bool useThread = true;
|
||||
in_addr_t maddr = INADDR_NONE;
|
||||
bool repeatConfig = true;
|
||||
|
||||
// decode parameters
|
||||
int c = 0;
|
||||
while ((c = getopt (argc, argv, "hW:H:Q:P:F:v::O:T:m:u:rsM:")) != -1)
|
||||
while ((c = getopt (argc, argv, "v::Q:O:" "P:T:m:u:M:c" "rsF:W:H:" "h")) != -1)
|
||||
{
|
||||
switch (c)
|
||||
{
|
||||
case 'O': outputFile = optarg; break;
|
||||
case 'v': verbose = 1; if (optarg && *optarg=='v') verbose++; break;
|
||||
case 'm': multicast = true; if (optarg) murl = optarg; break;
|
||||
case 'M': multicast = true; if (optarg) maddr = inet_addr(optarg); break;
|
||||
case 'W': width = atoi(optarg); break;
|
||||
case 'H': height = atoi(optarg); break;
|
||||
case 'Q': queueSize = atoi(optarg); break;
|
||||
case 'O': outputFile = optarg; break;
|
||||
// RTSP/RTP
|
||||
case 'P': rtspPort = atoi(optarg); break;
|
||||
case 'T': rtspOverHTTPPort = atoi(optarg); break;
|
||||
case 'F': fps = atoi(optarg); break;
|
||||
case 'u': url = optarg; break;
|
||||
case 'm': multicast = true; murl = optarg; break;
|
||||
case 'M': multicast = true; maddr = inet_addr(optarg); break;
|
||||
case 'c': repeatConfig = false; break;
|
||||
// V4L2
|
||||
case 'r': useMmap = false; break;
|
||||
case 's': useThread = false; break;
|
||||
case 'u': url = optarg; break;
|
||||
case 'F': fps = atoi(optarg); break;
|
||||
case 'W': width = atoi(optarg); break;
|
||||
case 'H': height = atoi(optarg); break;
|
||||
|
||||
case 'h':
|
||||
default:
|
||||
@ -195,11 +199,12 @@ int main(int argc, char** argv)
|
||||
std::cout << "\t -Q length: Number of frame queue (default "<< queueSize << ")" << std::endl;
|
||||
std::cout << "\t -O output: Copy captured frame to a file or a V4L2 device" << std::endl;
|
||||
std::cout << "\t RTSP options :" << std::endl;
|
||||
std::cout << "\t -P port : RTSP port (default "<< rtspPort << ")" << std::endl;
|
||||
std::cout << "\t -T port : RTSP over HTTP port (default "<< rtspOverHTTPPort << ")" << std::endl;
|
||||
std::cout << "\t -u url : unicast url (default " << url << ")" << std::endl;
|
||||
std::cout << "\t -m url : multicast url (default " << murl << ")" << std::endl;
|
||||
std::cout << "\t -M addr : multicast group (default is a random address)" << std::endl;
|
||||
std::cout << "\t -P port : RTSP port (default "<< rtspPort << ")" << std::endl;
|
||||
std::cout << "\t -H port : RTSP over HTTP port (default "<< rtspOverHTTPPort << ")" << std::endl;
|
||||
std::cout << "\t -c : don't repeat config (default repeat config before IDR frame)" << std::endl;
|
||||
std::cout << "\t V4L2 options :" << std::endl;
|
||||
std::cout << "\t -r : V4L2 capture using read interface (default use memory mapped buffers)" << std::endl;
|
||||
std::cout << "\t -s : V4L2 capture using live555 mainloop (default use a reader thread)" << std::endl;
|
||||
@ -245,7 +250,7 @@ int main(int argc, char** argv)
|
||||
int outputFd = createOutput(outputFile, videoCapture->getFd());
|
||||
LOG(NOTICE) << "Start V4L2 Capture..." << dev_name;
|
||||
videoCapture->captureStart();
|
||||
V4L2DeviceSource* videoES = H264_V4L2DeviceSource::createNew(*env, param, videoCapture, outputFd, queueSize, useThread);
|
||||
V4L2DeviceSource* videoES = H264_V4L2DeviceSource::createNew(*env, param, videoCapture, outputFd, queueSize, useThread, repeatConfig);
|
||||
if (videoES == NULL)
|
||||
{
|
||||
LOG(FATAL) << "Unable to create source for device " << dev_name;
|
||||
|
Loading…
Reference in New Issue
Block a user