pull/33/head
mpromonet 8 years ago
commit 85c7fd3f6a

@ -1 +1 @@
repo_token: hZ360i2wZGvYbjQwy6d8iXgS9hd4mhcHr
repo_token: JHXnPIUwlIxL9EWjL7zypPbTStK0teBOu

@ -31,6 +31,15 @@ Download
--------
[Latest build](https://github.com/mpromonet/h264_v4l2_rtspserver/releases/latest/)
Before build
-------
The build try to install live555 package using apt-get, however in order to install live555 disabling check of port reuse, you can proceed like this:
wget http://www.live555.com/liveMedia/public/live555-latest.tar.gz -O - | tar xvzf -
cd live
./genMakefiles linux
sudo make CPPFLAGS=-DALLOW_RTSP_SERVER_PORT_REUSE=1 install
Build
-------
cmake . && make
@ -38,13 +47,6 @@ Build
If it fails you will need to install liblivemedia-dev liblog4cpp5-dev.
If it still not work you will need to read Makefile.
In order to build live555 disabling check of port reuse, you can proceed like this:
wget http://www.live555.com/liveMedia/public/live555-latest.tar.gz - | tar xvzf -
cd live
./genMakefile linux
sudo make CPPFLAGS=-DALLOW_RTSP_SERVER_PORT_REUSE=1 install
Install
---------
make install
@ -52,7 +54,7 @@ Install
Build Package
-------------
cpack .
dpkg -i h264_v4l2_rtspserver*.deb
dpkg -i v4l2rtspserver*.deb
Using Raspberry Pi Camera
-------------------------
@ -68,18 +70,18 @@ Using with v4l2loopback
-----------------------
For camera providing uncompress format [v4l2tools](https://github.com/mpromonet/v4l2tools) can compress the video to an intermediate virtual V4L2 device [v4l2loopback](https://github.com/umlaeute/v4l2loopback):
/dev/video0 (camera device)-> v4l2compress_h264 -> /dev/video10 (v4l2loopback device) -> h264_v4l2_rtspserver
/dev/video0 (camera device)-> v4l2compress_h264 -> /dev/video10 (v4l2loopback device) -> v4l2rtspserver
This workflow could be set using :
modprobe v4l2loopback video_nr=10
v4l2compress_h264 /dev/video0 /dev/video10 &
h264_v4l2_rtspserver /dev/video10 &
v4l2rtspserver /dev/video10 &
Usage
-----
./h264_v4l2_rtspserver [-v[v]] [-Q queueSize] [-O file] \
[-I interface] [-P RTSP port] [-T RTSP/HTTP port] [-m multicast url] [-u unicast url] [-M multicast addr] [-c] [-t timeout] \
./v4l2rtspserver [-v[v]] [-Q queueSize] [-O file] \
[-I interface] [-P RTSP port] [-p RTSP/HTTP port] [-m multicast url] [-u unicast url] [-M multicast addr] [-c] [-t timeout] \
[-r] [-s] [-W width] [-H height] [-F fps] [device1] [device2]
-v : verbose
-vv : very verbose
@ -88,18 +90,31 @@ Usage
RTSP options :
-I addr : RTSP interface (default autodetect)
-P port : RTSP port (default 8554)
-T port : RTSP over HTTP port (default 0)
-p port : RTSP over HTTP port (default 0)
-u url : unicast url (default unicast)
-m url : multicast url (default multicast)
-M addr : multicast group:port (default is random_address:20000)
-c : don't repeat config (default repeat config before IDR frame)
-t secs : RTCP expiration timeout (default 65)
-T : send Transport Stream instead of elementary Stream
-S secs : HTTP segment duration (enable HLS & MPEG-DASH)
V4L2 options :
-r : V4L2 capture using read interface (default use memory mapped buffers)
-w : V4L2 capture using write interface (default use memory mapped buffers)
-s : V4L2 capture using live555 mainloop (default use a separated reading thread)
-f : V4L2 capture using current format (-W,-H,-F are ignored)
-f : V4L2 capture using current capture format (-W,-H,-F are ignored)
-fformat : V4L2 capture using format (-W,-H,-F are used)
-W width : V4L2 capture width (default 640)
-H height: V4L2 capture height (default 480)
-F fps : V4L2 capture framerate (default 25)
device : V4L2 capture device (default /dev/video0)
Receiving HTTP streams
-----------------------
When v4l2rtspserver is started with '-S' arguments it give access to streams through HTTP. These streams could be reveced :
* for MPEG-DASH with :
MP4Client http://..../unicast.mpd
* for HLS with :
vlc http://..../unicast.m3u8
gstreamer-launch-1.0 playbin uri=http://.../unicast.m3u8

@ -27,15 +27,16 @@ class HTTPServer : public RTSPServer
HTTPClientConnection(RTSPServer& ourServer, int clientSocket, struct sockaddr_in clientAddr)
: RTSPServer::RTSPClientConnection(ourServer, clientSocket, clientAddr), fClientSessionId(0), fTCPSink(NULL) {
}
virtual ~HTTPClientConnection();
private:
void sendHeader(const char* contentType, unsigned int contentLength);
void streamSource(FramedSource* source);
ServerMediaSubsession* getSubsesion(const char* urlSuffix);
void sendM3u8PlayList(char const* urlSuffix);
void sendMpdPlayList(char const* urlSuffix);
void handleHTTPCmd_StreamingGET(char const* urlSuffix, char const* fullRequestStr);
bool sendM3u8PlayList(char const* urlSuffix);
bool sendMpdPlayList(char const* urlSuffix);
virtual void handleHTTPCmd_StreamingGET(char const* urlSuffix, char const* fullRequestStr);
static void afterStreaming(void* clientData);
private:

@ -40,7 +40,7 @@ class MJPEGVideoSource : public JPEGVideoSource
int headerSize = 0;
bool headerOk = false;
fFrameSize = 0;
for (unsigned int i = 0; i < frameSize ; ++i)
{
// SOF
@ -53,13 +53,13 @@ class MJPEGVideoSource : public JPEGVideoSource
// DQT
if ( (i+5+64) < frameSize && (fTo[i] == 0xFF) && (fTo[i+1] == 0xDB))
{
int quantSize = fTo[i+3];
int quantIdx = fTo[i+4];
if (quantIdx < 3)
unsigned int quantSize = fTo[i+3]-4;
unsigned int quantIdx = fTo[i+4];
if (quantSize*quantIdx+quantSize <= sizeof(m_qTable))
{
if ( quantIdx+1 > m_qTableCount )
m_qTableCount = quantIdx+1;
memcpy(m_qTable + quantIdx*64, fTo + i + 5, 64);
memcpy(m_qTable + quantSize*quantIdx, fTo + i + 5, quantSize);
if (quantSize*quantIdx+quantSize > m_qTableSize)
m_qTableSize = quantSize*quantIdx+quantSize;
}
}
// End of header
@ -90,9 +90,9 @@ class MJPEGVideoSource : public JPEGVideoSource
{
length = 0;
precision = 0;
if (m_qTableCount > 0)
if (m_qTableSize > 0)
{
length = 64*m_qTableCount;
length = m_qTableSize;
}
return m_qTable;
}
@ -100,7 +100,7 @@ class MJPEGVideoSource : public JPEGVideoSource
protected:
MJPEGVideoSource(UsageEnvironment& env, FramedSource* source) : JPEGVideoSource(env),
m_inputSource(source),
m_width(0), m_height(0), m_qTableCount(0),
m_width(0), m_height(0), m_qTableSize(0),
m_type(0)
{
memset(&m_qTable,0,sizeof(m_qTable));
@ -114,7 +114,7 @@ class MJPEGVideoSource : public JPEGVideoSource
FramedSource* m_inputSource;
u_int8_t m_width;
u_int8_t m_height;
u_int8_t m_qTable[64*3];
int m_qTableCount;
u_int8_t m_qTable[128*2];
unsigned int m_qTableSize;
u_int8_t m_type;
};

@ -155,7 +155,7 @@ class HLSServerMediaSubsession : public UnicastServerMediaSubsession
outputBuffer.append((const char*)m_buffer, frameSize);
// remove old buffers
while (m_outputBuffers.size()>3)
while (m_outputBuffers.size()>5)
{
m_outputBuffers.erase(m_outputBuffers.begin());
}

@ -0,0 +1,17 @@
<html>
<link rel="shortcut icon" href="about:blank"/>
<body>
<video controls id="video"></video>
<script src="https://cdn.jsdelivr.net/hls.js/latest/hls.js" ></script>
<script>
if (Hls.isSupported()) {
var video = document.getElementById("video");
var hls = new Hls();
hls.loadSource("unicast.m3u8");
hls.attachMedia(video);
hls.on(Hls.Events.MANIFEST_PARSED, function() { video.play(); });
}
</script>
</body>
</html>

@ -13,6 +13,8 @@
#include <sstream>
#include <fstream>
#include <algorithm>
#include "RTSPServer.hh"
#include "RTSPCommon.hh"
@ -71,20 +73,18 @@ ServerMediaSubsession* HTTPServer::HTTPClientConnection::getSubsesion(const char
return subsession;
}
void HTTPServer::HTTPClientConnection::sendM3u8PlayList(char const* urlSuffix)
bool HTTPServer::HTTPClientConnection::sendM3u8PlayList(char const* urlSuffix)
{
ServerMediaSubsession* subsession = this->getSubsesion(urlSuffix);
if (subsession == NULL)
{
handleHTTPCmd_notSupported();
return;
return false;
}
float duration = subsession->duration();
if (duration <= 0.0)
{
handleHTTPCmd_notSupported();
return;
return false;
}
unsigned int startTime = subsession->getCurrentNPT(NULL);
@ -92,7 +92,7 @@ void HTTPServer::HTTPClientConnection::sendM3u8PlayList(char const* urlSuffix)
unsigned sliceDuration = httpServer->m_hlsSegment;
std::ostringstream os;
os << "#EXTM3U\r\n"
<< "#EXT-X-ALLOW-CACHE:YES\r\n"
<< "#EXT-X-ALLOW-CACHE:NO\r\n"
<< "#EXT-X-MEDIA-SEQUENCE:" << startTime << "\r\n"
<< "#EXT-X-TARGETDURATION:" << sliceDuration << "\r\n";
@ -102,6 +102,7 @@ void HTTPServer::HTTPClientConnection::sendM3u8PlayList(char const* urlSuffix)
os << urlSuffix << "?segment=" << (startTime+slice*sliceDuration) << "\r\n";
}
envir() << "send M3u8 playlist:" << urlSuffix <<"\n";
const std::string& playList(os.str());
// send response header
@ -111,22 +112,22 @@ void HTTPServer::HTTPClientConnection::sendM3u8PlayList(char const* urlSuffix)
u_int8_t* playListBuffer = new u_int8_t[playList.size()];
memcpy(playListBuffer, playList.c_str(), playList.size());
this->streamSource(ByteStreamMemoryBufferSource::createNew(envir(), playListBuffer, playList.size()));
return true;
}
void HTTPServer::HTTPClientConnection::sendMpdPlayList(char const* urlSuffix)
bool HTTPServer::HTTPClientConnection::sendMpdPlayList(char const* urlSuffix)
{
ServerMediaSubsession* subsession = this->getSubsesion(urlSuffix);
if (subsession == NULL)
{
handleHTTPCmd_notSupported();
return;
return false;
}
float duration = subsession->duration();
if (duration <= 0.0)
{
handleHTTPCmd_notSupported();
return;
return false;
}
unsigned int startTime = subsession->getCurrentNPT(NULL);
@ -136,16 +137,13 @@ void HTTPServer::HTTPClientConnection::sendMpdPlayList(char const* urlSuffix)
os << "<?xml version='1.0' encoding='UTF-8'?>\r\n"
<< "<MPD type='dynamic' xmlns='urn:mpeg:DASH:schema:MPD:2011' profiles='urn:mpeg:dash:profile:full:2011' minimumUpdatePeriod='PT"<< sliceDuration <<"S' minBufferTime='" << sliceDuration << "'>\r\n"
<< "<Period start='PT0S'><AdaptationSet segmentAlignment='true'><Representation mimeType='video/mp2t' codecs='' >"
<< "<SegmentList duration='" << sliceDuration << "' startNumber='" << startTime << "' >\r\n";
<< "<Period start='PT0S'><AdaptationSet segmentAlignment='true'><Representation mimeType='video/mp2t' codecs='' >\r\n";
for (unsigned int slice=0; slice*sliceDuration<duration; slice++)
{
os << "<SegmentURL media='" << urlSuffix << "?segment=" << (startTime+slice*sliceDuration) << "' />\r\n";
}
os << "</SegmentList></Representation></AdaptationSet></Period>\r\n";
os << "<SegmentTemplate duration='" << sliceDuration << "' media='" << urlSuffix << "?segment=$Number$' startNumber='" << startTime << "' />\r\n";
os << "</Representation></AdaptationSet></Period>\r\n";
os << "</MPD>\r\n";
envir() << "send MPEG-DASH playlist:" << urlSuffix <<"\n";
const std::string& playList(os.str());
// send response header
@ -155,6 +153,8 @@ void HTTPServer::HTTPClientConnection::sendMpdPlayList(char const* urlSuffix)
u_int8_t* playListBuffer = new u_int8_t[playList.size()];
memcpy(playListBuffer, playList.c_str(), playList.size());
this->streamSource(ByteStreamMemoryBufferSource::createNew(envir(), playListBuffer, playList.size()));
return true;
}
@ -173,15 +173,48 @@ void HTTPServer::HTTPClientConnection::handleHTTPCmd_StreamingGET(char const* ur
streamName.assign(url.substr(0,pos));
ext.assign(url.substr(pos+1));
}
bool ok;
if (ext == "mpd")
{
// MPEG-DASH Playlist
this->sendMpdPlayList(streamName.c_str());
ok = this->sendMpdPlayList(streamName.c_str());
}
else
{
// HLS Playlist
this->sendM3u8PlayList(streamName.c_str());
ok = this->sendM3u8PlayList(streamName.c_str());
}
if (!ok)
{
// send local files
size_t pos = url.find_last_of("/");
if (pos != std::string::npos)
{
url.erase(pos);
}
if (url.empty())
{
url = "index.html";
ext = "html";
}
if (ext=="js") ext ="javascript";
std::ifstream file(url.c_str());
if (file.is_open())
{
envir() << "send file:" << url.c_str() <<"\n";
std::string content((std::istreambuf_iterator<char>(file)), std::istreambuf_iterator<char>());
std::string mime("text/");
mime.append(ext);
this->sendHeader(mime.c_str(), content.size());
this->streamSource(ByteStreamMemoryBufferSource::createNew(envir(), (u_int8_t*)content.c_str(), content.size()));
ok = true;
}
}
if (!ok)
{
handleHTTPCmd_notSupported();
}
}
else
@ -220,15 +253,15 @@ void HTTPServer::HTTPClientConnection::handleHTTPCmd_StreamingGET(char const* ur
{
// For some reason, we do not know the size of the requested range. We can't handle this request:
handleHTTPCmd_notSupported();
return;
}
else
{
// send response header
this->sendHeader("video/mp2t", numBytes);
// send response header
this->sendHeader("video/mp2t", numBytes);
// stream body
this->streamSource(subsession->getStreamSource(streamToken));
// stream body
this->streamSource(subsession->getStreamSource(streamToken));
}
}
}
@ -241,6 +274,17 @@ void HTTPServer::HTTPClientConnection::afterStreaming(void* clientData)
clientConnection->fIsActive = False; // will cause the object to get deleted at the end of handling the request
} else {
// We're no longer handling a request; delete the object now:
// delete clientConnection;
// delete clientConnection;
}
}
HTTPServer::HTTPClientConnection::~HTTPClientConnection()
{
if (fTCPSink != NULL)
{
FramedSource* oldSource = fTCPSink->source();
fTCPSink->stopPlaying();
Medium::close(fTCPSink);
Medium::close(oldSource);
}
}

@ -173,11 +173,11 @@ int main(int argc, char** argv)
bool repeatConfig = true;
int timeout = 65;
bool muxTS = false;
unsigned int hlsSegment = 10;
unsigned int hlsSegment = 0;
// decode parameters
int c = 0;
while ((c = getopt (argc, argv, "v::Q:O:" "I:P:p:m:u:M:ct:TS:" "rwsf::F:W:H:" "h")) != -1)
while ((c = getopt (argc, argv, "v::Q:O:" "I:P:p:m:u:M:ct:TS::" "rwsf::F:W:H:" "h")) != -1)
{
switch (c)
{
@ -194,7 +194,7 @@ int main(int argc, char** argv)
case 'c': repeatConfig = false; break;
case 't': timeout = atoi(optarg); break;
case 'T': muxTS = true; break;
case 'S': hlsSegment = atoi(optarg); muxTS=true; break;
case 'S': hlsSegment = optarg ? atoi(optarg) : 5; muxTS=true; break;
// V4L2
case 'r': ioTypeIn = V4l2DeviceFactory::IOTYPE_READ; break;
case 'w': ioTypeOut = V4l2DeviceFactory::IOTYPE_READ; break;
@ -208,13 +208,13 @@ int main(int argc, char** argv)
default:
{
std::cout << argv[0] << " [-v[v]] [-Q queueSize] [-O file]" << std::endl;
std::cout << "\t [-I interface] [-P RTSP port] [-T RTSP/HTTP port] [-m multicast url] [-u unicast url] [-M multicast addr] [-c] [-t timeout]" << std::endl;
std::cout << "\t [-r] [-w] [-s] [-W width] [-H height] [-F fps] [device] [device]" << std::endl;
std::cout << "\t [-I interface] [-P RTSP port] [-p RTSP/HTTP port] [-m multicast url] [-u unicast url] [-M multicast addr] [-c] [-t timeout] [-T] [-S[duration]]" << std::endl;
std::cout << "\t [-r] [-w] [-s] [-f[format] [-W width] [-H height] [-F fps] [device] [device]" << std::endl;
std::cout << "\t -v : verbose" << std::endl;
std::cout << "\t -vv : very verbose" << std::endl;
std::cout << "\t -Q length : Number of frame queue (default "<< queueSize << ")" << std::endl;
std::cout << "\t -O output : Copy captured frame to a file or a V4L2 device" << std::endl;
std::cout << "\t RTSP/RTP options :" << std::endl;
std::cout << "\t RTSP/RTP options :" << std::endl;
std::cout << "\t -I addr : RTSP interface (default autodetect)" << std::endl;
std::cout << "\t -P port : RTSP port (default "<< rtspPort << ")" << std::endl;
std::cout << "\t -p port : RTSP over HTTP port (default "<< rtspOverHTTPPort << ")" << std::endl;
@ -222,19 +222,19 @@ int main(int argc, char** argv)
std::cout << "\t -m url : multicast url (default " << murl << ")" << std::endl;
std::cout << "\t -M addr : multicast group:port (default is random_address:20000)" << std::endl;
std::cout << "\t -c : don't repeat config (default repeat config before IDR frame)" << std::endl;
std::cout << "\t -t secs : RTCP expiration timeout (default " << timeout << ")" << std::endl;
std::cout << "\t -t timeout: RTCP expiration timeout in seconds (default " << timeout << ")" << std::endl;
std::cout << "\t -T : send Transport Stream instead of elementary Stream" << std::endl;
std::cout << "\t -S secs : HTTP segment duration (enable HLS & MPEG-DASH)" << std::endl;
std::cout << "\t V4L2 options :" << std::endl;
std::cout << "\t -S[duration]: enable HLS & MPEG-DASH with segment duration in seconds (default 5)" << std::endl;
std::cout << "\t V4L2 options :" << std::endl;
std::cout << "\t -r : V4L2 capture using read interface (default use memory mapped buffers)" << std::endl;
std::cout << "\t -w : V4L2 capture using write interface (default use memory mapped buffers)"<< std::endl;
std::cout << "\t -s : V4L2 capture using live555 mainloop (default use a reader thread)" << std::endl;
std::cout << "\t -f : V4L2 capture using current capture format (-W,-H,-F are ignored)" << std::endl;
std::cout << "\t -f format : V4L2 capture using format (-W,-H,-F are used)" << std::endl;
std::cout << "\t -fformat : V4L2 capture using format (-W,-H,-F are used)" << std::endl;
std::cout << "\t -W width : V4L2 capture width (default "<< width << ")" << std::endl;
std::cout << "\t -H height : V4L2 capture height (default "<< height << ")" << std::endl;
std::cout << "\t -F fps : V4L2 capture framerate (default "<< fps << ")" << std::endl;
std::cout << "\t device : V4L2 capture device (default "<< dev_name << ")" << std::endl;
std::cout << "\t device : V4L2 capture device (default "<< dev_name << ")" << std::endl;
exit(0);
}
}
@ -351,9 +351,13 @@ int main(int argc, char** argv)
}
// Create Unicast Session
if (muxTS)
if (hlsSegment > 0)
{
addSession(rtspServer, baseUrl+url, HLSServerMediaSubsession::createNew(*env,replicator,rtpFormat, hlsSegment));
struct in_addr ip;
ip.s_addr = ourIPAddress(*env);
LOG(NOTICE) << "HLS http://" << inet_ntoa(ip) << ":" << rtspPort << "/" << baseUrl+url << ".m3u8";
LOG(NOTICE) << "MPEG-DASH http://" << inet_ntoa(ip) << ":" << rtspPort << "/" << baseUrl+url << ".mpd";
}
else
{

Loading…
Cancel
Save