for #117: allow to stream over http without muxing unicast/multicast stream

pull/120/head
mpromonet 6 years ago
parent 252d613c9d
commit df5215a75f

@ -43,7 +43,7 @@ class HLSServerMediaSubsession : public UnicastServerMediaSubsession
public:
unsigned int getHLSBufferSize(unsigned int slice);
const char* getHLSBuffer(unsigned int slice);
std::string getHLSBuffer(unsigned int slice);
unsigned int firstTime();
unsigned int duration();
unsigned int getSliceDuration() { return m_sliceDuration; }

@ -192,17 +192,19 @@ void HTTPServer::HTTPClientConnection::handleHTTPCmd_StreamingGET(char const* ur
os << "[\n";
bool first = true;
while ( (serverSession = it.next()) != NULL) {
if (first)
{
first = false;
os << " ";
}
else
{
os << ",";
if (serverSession->duration() > 0) {
if (first)
{
first = false;
os << " ";
}
else
{
os << ",";
}
os << "\"" << serverSession->streamName() << "\"";
os << "\n";
}
os << "\"" << serverSession->streamName() << "\"";
os << "\n";
}
os << "]\n";
std::string content(os.str());

@ -81,13 +81,14 @@ unsigned int HLSServerMediaSubsession::HLSSink::getHLSBufferSize(unsigned int sl
return size;
}
const char* HLSServerMediaSubsession::HLSSink::getHLSBuffer(unsigned int slice)
std::string HLSServerMediaSubsession::HLSSink::getHLSBuffer(unsigned int slice)
{
const char* content = NULL;
std::string content;
std::map<unsigned int,std::string>::iterator it = m_outputBuffers.find(slice);
if (it != m_outputBuffers.end())
{
content = it->second.c_str();
content = it->second;
}
return content;
}
@ -112,13 +113,90 @@ unsigned int HLSServerMediaSubsession::HLSSink::duration()
return (duration)*m_sliceDuration;
}
char marker[] = {0,0,0,1};
class AddMarker : public FramedFilter {
public:
AddMarker (UsageEnvironment& env, FramedSource* inputSource): FramedFilter(env, inputSource) {
m_bufferSize = OutPacketBuffer::maxSize;
m_buffer = new unsigned char[m_bufferSize];
}
virtual ~AddMarker () {
delete [] m_buffer;
}
private:
static void afterGettingFrame(void* clientData, unsigned frameSize,
unsigned numTruncatedBytes,
struct timeval presentationTime,
unsigned durationInMicroseconds) {
AddMarker* sink = (AddMarker*)clientData;
sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime);
}
void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes, struct timeval presentationTime)
{
fPresentationTime = presentationTime;
fDurationInMicroseconds = 0;
if (numTruncatedBytes > 0)
{
envir() << "AddMarker::afterGettingFrame(): The input frame data was too large for our buffer size truncated:" << numTruncatedBytes << " bufferSize:" << m_bufferSize << "\n";
m_bufferSize += numTruncatedBytes;
delete[] m_buffer;
m_buffer = new unsigned char[m_bufferSize];
fFrameSize = 0;
} else {
fFrameSize = frameSize + sizeof(marker);
if (fFrameSize > fMaxSize) {
fNumTruncatedBytes = fFrameSize - fMaxSize;
envir() << "AddMarker::afterGettingFrame(): buffer too small truncated:" << fNumTruncatedBytes << " bufferSize:" << fFrameSize << "\n";
} else {
fNumTruncatedBytes = 0;
memcpy(fTo, marker, sizeof(marker));
memcpy(fTo+sizeof(marker), m_buffer, frameSize);
}
}
afterGetting(this);
}
virtual void doGetNextFrame() {
if (fInputSource != NULL)
{
fInputSource->getNextFrame(m_buffer, m_bufferSize,
afterGettingFrame, this,
handleClosure, this);
}
}
unsigned char* m_buffer;
unsigned int m_bufferSize;
};
HLSServerMediaSubsession::HLSServerMediaSubsession(UsageEnvironment& env, StreamReplicator* replicator, const std::string& format, unsigned int sliceDuration)
: UnicastServerMediaSubsession(env, replicator, format), m_slice(0)
: UnicastServerMediaSubsession(env, replicator, "video/MP2T"), m_slice(0)
{
// Create a source
FramedSource* source = replicator->createStreamReplica();
FramedSource* videoSource = createSource(env, source, format);
FramedSource* source = replicator->createStreamReplica();
if (format == "video/H264") {
// add marker
FramedSource* filter = new AddMarker(env, source);
// mux to TS
MPEG2TransportStreamFromESSource* muxer = MPEG2TransportStreamFromESSource::createNew(env);
muxer->addNewVideoSource(filter, 5);
source = muxer;
} else if (format == "video/H265") {
// add marker
FramedSource* filter = new AddMarker(env, source);
// mux to TS
MPEG2TransportStreamFromESSource* muxer = MPEG2TransportStreamFromESSource::createNew(env);
muxer->addNewVideoSource(filter, 6);
source = muxer;
}
FramedSource* videoSource = createSource(env, source, m_format);
// Start Playing the HLS Sink
m_hlsSink = HLSSink::createNew(env, OutPacketBuffer::maxSize, sliceDuration);
@ -145,14 +223,19 @@ void HLSServerMediaSubsession::seekStream(unsigned clientSessionId, void* stream
m_slice = seekNPT / m_hlsSink->getSliceDuration();
seekNPT = m_slice * m_hlsSink->getSliceDuration();
numBytes = m_hlsSink->getHLSBufferSize(m_slice);
std::cout << "seek seekNPT:" << seekNPT << " slice:" << m_slice << " numBytes:" << numBytes << std::endl;
std::cout << "seek seekNPT:" << seekNPT << " slice:" << m_slice << " numBytes:" << numBytes << std::endl;
}
FramedSource* HLSServerMediaSubsession::getStreamSource(void* streamToken)
{
unsigned int size = m_hlsSink->getHLSBufferSize(m_slice);
u_int8_t* content = new u_int8_t[size];
memcpy(content, m_hlsSink->getHLSBuffer(m_slice), size);
return ByteStreamMemoryBufferSource::createNew(envir(), content, size);
FramedSource* source = NULL;
std::string buffer = m_hlsSink->getHLSBuffer(m_slice);
unsigned int size = buffer.size();
if ( size != 0 ) {
u_int8_t* content = new u_int8_t[size];
memcpy(content, buffer.c_str(), size);
source = ByteStreamMemoryBufferSource::createNew(envir(), content, size);
}
return source;
}

@ -105,35 +105,20 @@ RTSPServer* createRTSPServer(UsageEnvironment& env, unsigned short rtspPort, uns
// -----------------------------------------
// create FramedSource server
// -----------------------------------------
FramedSource* createFramedSource(UsageEnvironment* env, int format, DeviceInterface* videoCapture, int outfd, int queueSize, bool useThread, bool repeatConfig, MPEG2TransportStreamFromESSource* muxer)
FramedSource* createFramedSource(UsageEnvironment* env, int format, DeviceInterface* videoCapture, int outfd, int queueSize, bool useThread, bool repeatConfig)
{
bool muxTS = (muxer != NULL);
FramedSource* source = NULL;
if (format == V4L2_PIX_FMT_H264)
{
source = H264_V4L2DeviceSource::createNew(*env, videoCapture, outfd, queueSize, useThread, repeatConfig, muxTS);
if (muxTS)
{
muxer->addNewVideoSource(source, 5);
source = muxer;
}
source = H264_V4L2DeviceSource::createNew(*env, videoCapture, outfd, queueSize, useThread, repeatConfig, false);
}
else if (format == V4L2_PIX_FMT_HEVC)
{
source = H265_V4L2DeviceSource::createNew(*env, videoCapture, outfd, queueSize, useThread, repeatConfig, muxTS);
if (muxTS)
{
muxer->addNewVideoSource(source, 6);
source = muxer;
}
}
else if (!muxTS)
{
source = V4L2DeviceSource::createNew(*env, videoCapture, outfd, queueSize, useThread);
source = H265_V4L2DeviceSource::createNew(*env, videoCapture, outfd, queueSize, useThread, repeatConfig, false);
}
else
{
LOG(ERROR) << "TS in nor compatible with format";
source = V4L2DeviceSource::createNew(*env, videoCapture, outfd, queueSize, useThread);
}
return source;
}
@ -173,25 +158,18 @@ int addSession(RTSPServer* rtspServer, const std::string & sessionName, const st
// -----------------------------------------
// convert V4L2 pix format to RTP mime
// -----------------------------------------
std::string getVideoRtpFormat(int format, bool muxTS)
std::string getVideoRtpFormat(int format)
{
std::string rtpFormat;
if (muxTS)
{
rtpFormat = "video/MP2T";
}
else
{
switch(format)
{
case V4L2_PIX_FMT_HEVC : rtpFormat = "video/H265"; break;
case V4L2_PIX_FMT_H264 : rtpFormat = "video/H264"; break;
case V4L2_PIX_FMT_MJPEG: rtpFormat = "video/JPEG"; break;
case V4L2_PIX_FMT_JPEG : rtpFormat = "video/JPEG"; break;
case V4L2_PIX_FMT_VP8 : rtpFormat = "video/VP8" ; break;
case V4L2_PIX_FMT_VP9 : rtpFormat = "video/VP9" ; break;
case V4L2_PIX_FMT_YUYV : rtpFormat = "video/RAW" ; break;
}
switch(format)
{
case V4L2_PIX_FMT_HEVC : rtpFormat = "video/H265"; break;
case V4L2_PIX_FMT_H264 : rtpFormat = "video/H264"; break;
case V4L2_PIX_FMT_MJPEG: rtpFormat = "video/JPEG"; break;
case V4L2_PIX_FMT_JPEG : rtpFormat = "video/JPEG"; break;
case V4L2_PIX_FMT_VP8 : rtpFormat = "video/VP8" ; break;
case V4L2_PIX_FMT_VP9 : rtpFormat = "video/VP9" ; break;
case V4L2_PIX_FMT_YUYV : rtpFormat = "video/RAW" ; break;
}
return rtpFormat;
@ -426,11 +404,11 @@ int main(int argc, char** argv)
V4l2Access::IoType ioTypeOut = V4l2Access::IOTYPE_MMAP;
std::string url = "unicast";
std::string murl = "multicast";
std::string tsurl = "ts";
bool useThread = true;
std::string maddr;
bool repeatConfig = true;
int timeout = 65;
bool muxTS = false;
int defaultHlsSegment = 5;
unsigned int hlsSegment = 0;
const char* realm = NULL;
@ -465,8 +443,7 @@ int main(int argc, char** argv)
case 'M': multicast = true; maddr = optarg; break;
case 'c': repeatConfig = false; break;
case 't': timeout = atoi(optarg); break;
case 'T': muxTS = true; break;
case 'S': hlsSegment = optarg ? atoi(optarg) : defaultHlsSegment; muxTS=true; break;
case 'S': hlsSegment = optarg ? atoi(optarg) : defaultHlsSegment; break;
// users
case 'R': realm = optarg; break;
@ -610,11 +587,6 @@ int main(int argc, char** argv)
baseUrl = getDeviceName(videoDev);
baseUrl.append("/");
}
MPEG2TransportStreamFromESSource* muxer = NULL;
if (muxTS)
{
muxer = MPEG2TransportStreamFromESSource::createNew(*env);
}
StreamReplicator* videoReplicator = NULL;
std::string rtpFormat;
if (!videoDev.empty())
@ -638,13 +610,13 @@ int main(int argc, char** argv)
}
}
rtpFormat.assign(getVideoRtpFormat(videoCapture->getFormat(), muxTS));
rtpFormat.assign(getVideoRtpFormat(videoCapture->getFormat()));
if (rtpFormat.empty()) {
LOG(FATAL) << "No Streaming format supported for device " << videoDev;
delete videoCapture;
} else {
LOG(NOTICE) << "Create Source ..." << videoDev;
FramedSource* videoSource = createFramedSource(env, videoCapture->getFormat(), new DeviceCaptureAccess<V4l2Capture>(videoCapture), outfd, queueSize, useThread, repeatConfig, muxer);
FramedSource* videoSource = createFramedSource(env, videoCapture->getFormat(), new DeviceCaptureAccess<V4l2Capture>(videoCapture), outfd, queueSize, useThread, repeatConfig);
if (videoSource == NULL)
{
LOG(FATAL) << "Unable to create source for device " << videoDev;
@ -726,7 +698,8 @@ int main(int argc, char** argv)
}
nbSource += addSession(rtspServer, baseUrl+murl, subSession);
}
// Create Unicast Session
// Create HLS Session
if (hlsSegment > 0)
{
std::list<ServerMediaSubsession*> subSession;
@ -734,26 +707,25 @@ int main(int argc, char** argv)
{
subSession.push_back(HLSServerMediaSubsession::createNew(*env, videoReplicator, rtpFormat, hlsSegment));
}
nbSource += addSession(rtspServer, baseUrl+url, subSession);
nbSource += addSession(rtspServer, baseUrl+tsurl, subSession);
struct in_addr ip;
ip.s_addr = ourIPAddress(*env);
LOG(NOTICE) << "HLS http://" << inet_ntoa(ip) << ":" << rtspPort << "/" << baseUrl+url << ".m3u8";
LOG(NOTICE) << "MPEG-DASH http://" << inet_ntoa(ip) << ":" << rtspPort << "/" << baseUrl+url << ".mpd";
LOG(NOTICE) << "HLS http://" << inet_ntoa(ip) << ":" << rtspPort << "/" << baseUrl+tsurl << ".m3u8";
LOG(NOTICE) << "MPEG-DASH http://" << inet_ntoa(ip) << ":" << rtspPort << "/" << baseUrl+tsurl << ".mpd";
}
else
// Create Unicast Session
std::list<ServerMediaSubsession*> subSession;
if (videoReplicator)
{
std::list<ServerMediaSubsession*> subSession;
if (videoReplicator)
{
subSession.push_back(UnicastServerMediaSubsession::createNew(*env, videoReplicator, rtpFormat));
}
if (audioReplicator)
{
subSession.push_back(UnicastServerMediaSubsession::createNew(*env, audioReplicator, rtpAudioFormat));
}
nbSource += addSession(rtspServer, baseUrl+url, subSession);
subSession.push_back(UnicastServerMediaSubsession::createNew(*env, videoReplicator, rtpFormat));
}
if (audioReplicator)
{
subSession.push_back(UnicastServerMediaSubsession::createNew(*env, audioReplicator, rtpAudioFormat));
}
nbSource += addSession(rtspServer, baseUrl+url, subSession);
}
if (nbSource>0)

Loading…
Cancel
Save