Fix vf_tcdump's compilation
[mplayer/kovensky.git] / libmpdemux / demux_rtp.cpp
blob8a5cfb846033e42640fab98e9a63b5b8a27cf0cb
1 /*
2 * routines (with C-linkage) that interface between MPlayer
3 * and the "LIVE555 Streaming Media" libraries
5 * This file is part of MPlayer.
7 * MPlayer is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
12 * MPlayer is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
17 * You should have received a copy of the GNU General Public License along
18 * with MPlayer; if not, write to the Free Software Foundation, Inc.,
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22 extern "C" {
23 // on MinGW, we must include windows.h before the things it conflicts
24 #ifdef __MINGW32__ // with. they are each protected from
25 #include <windows.h> // windows.h, but not the other way around.
26 #endif
27 #include "demux_rtp.h"
28 #include "stheader.h"
29 #include "options.h"
31 #include "demux_rtp_internal.h"
33 #include "BasicUsageEnvironment.hh"
34 #include "liveMedia.hh"
35 #include "GroupsockHelper.hh"
36 #include <unistd.h>
38 // A data structure representing input data for each stream:
39 class ReadBufferQueue {
40 public:
41 ReadBufferQueue(MediaSubsession* subsession, demuxer_t* demuxer,
42 char const* tag);
43 virtual ~ReadBufferQueue();
45 FramedSource* readSource() const { return fReadSource; }
46 RTPSource* rtpSource() const { return fRTPSource; }
47 demuxer_t* ourDemuxer() const { return fOurDemuxer; }
48 char const* tag() const { return fTag; }
50 char blockingFlag; // used to implement synchronous reads
52 // For A/V synchronization:
53 Boolean prevPacketWasSynchronized;
54 float prevPacketPTS;
55 ReadBufferQueue** otherQueue;
57 // The 'queue' actually consists of just a single "demux_packet_t"
58 // (because the underlying OS does the actual queueing/buffering):
59 demux_packet_t* dp;
61 // However, we sometimes inspect buffers before delivering them.
62 // For this, we maintain a queue of pending buffers:
63 void savePendingBuffer(demux_packet_t* dp);
64 demux_packet_t* getPendingBuffer();
66 // For H264 over rtsp using AVParser, the next packet has to be saved
67 demux_packet_t* nextpacket;
69 private:
70 demux_packet_t* pendingDPHead;
71 demux_packet_t* pendingDPTail;
73 FramedSource* fReadSource;
74 RTPSource* fRTPSource;
75 demuxer_t* fOurDemuxer;
76 char const* fTag; // used for debugging
79 // A structure of RTP-specific state, kept so that we can cleanly
80 // reclaim it:
81 typedef struct RTPState {
82 char const* sdpDescription;
83 RTSPClient* rtspClient;
84 SIPClient* sipClient;
85 MediaSession* mediaSession;
86 ReadBufferQueue* audioBufferQueue;
87 ReadBufferQueue* videoBufferQueue;
88 unsigned flags;
89 struct timeval firstSyncTime;
92 extern "C" char* network_username;
93 extern "C" char* network_password;
94 static char* openURL_rtsp(RTSPClient* client, char const* url) {
95 // If we were given a user name (and optional password), then use them:
96 if (network_username != NULL) {
97 char const* password = network_password == NULL ? "" : network_password;
98 return client->describeWithPassword(url, network_username, password);
99 } else {
100 return client->describeURL(url);
104 static char* openURL_sip(SIPClient* client, char const* url) {
105 // If we were given a user name (and optional password), then use them:
106 if (network_username != NULL) {
107 char const* password = network_password == NULL ? "" : network_password;
108 return client->inviteWithPassword(url, network_username, password);
109 } else {
110 return client->invite(url);
114 #ifdef CONFIG_LIBNEMESI
115 extern int rtsp_transport_tcp;
116 #else
117 int rtsp_transport_tcp = 0;
118 #endif
120 extern int rtsp_port;
121 #ifdef CONFIG_LIBAVCODEC
122 extern AVCodecContext *avcctx;
123 #endif
125 extern "C" demuxer_t* demux_open_rtp(demuxer_t* demuxer) {
126 struct MPOpts *opts = demuxer->opts;
127 Boolean success = False;
128 do {
129 TaskScheduler* scheduler = BasicTaskScheduler::createNew();
130 if (scheduler == NULL) break;
131 UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
132 if (env == NULL) break;
134 RTSPClient* rtspClient = NULL;
135 SIPClient* sipClient = NULL;
137 if (demuxer == NULL || demuxer->stream == NULL) break; // shouldn't happen
138 demuxer->stream->eof = 0; // just in case
140 // Look at the stream's 'priv' field to see if we were initiated
141 // via a SDP description:
142 char* sdpDescription = (char*)(demuxer->stream->priv);
143 if (sdpDescription == NULL) {
144 // We weren't given a SDP description directly, so assume that
145 // we were given a RTSP or SIP URL:
146 char const* protocol = demuxer->stream->streaming_ctrl->url->protocol;
147 char const* url = demuxer->stream->streaming_ctrl->url->url;
148 extern int verbose;
149 if (strcmp(protocol, "rtsp") == 0) {
150 rtspClient = RTSPClient::createNew(*env, verbose, "MPlayer");
151 if (rtspClient == NULL) {
152 fprintf(stderr, "Failed to create RTSP client: %s\n",
153 env->getResultMsg());
154 break;
156 sdpDescription = openURL_rtsp(rtspClient, url);
157 } else { // SIP
158 unsigned char desiredAudioType = 0; // PCMU (use 3 for GSM)
159 sipClient = SIPClient::createNew(*env, desiredAudioType, NULL,
160 verbose, "MPlayer");
161 if (sipClient == NULL) {
162 fprintf(stderr, "Failed to create SIP client: %s\n",
163 env->getResultMsg());
164 break;
166 sipClient->setClientStartPortNum(8000);
167 sdpDescription = openURL_sip(sipClient, url);
170 if (sdpDescription == NULL) {
171 fprintf(stderr, "Failed to get a SDP description from URL \"%s\": %s\n",
172 url, env->getResultMsg());
173 break;
177 // Now that we have a SDP description, create a MediaSession from it:
178 MediaSession* mediaSession = MediaSession::createNew(*env, sdpDescription);
179 if (mediaSession == NULL) break;
182 // Create a 'RTPState' structure containing the state that we just created,
183 // and store it in the demuxer's 'priv' field, for future reference:
184 RTPState* rtpState = new RTPState;
185 rtpState->sdpDescription = sdpDescription;
186 rtpState->rtspClient = rtspClient;
187 rtpState->sipClient = sipClient;
188 rtpState->mediaSession = mediaSession;
189 rtpState->audioBufferQueue = rtpState->videoBufferQueue = NULL;
190 rtpState->flags = 0;
191 rtpState->firstSyncTime.tv_sec = rtpState->firstSyncTime.tv_usec = 0;
192 demuxer->priv = rtpState;
194 int audiofound = 0, videofound = 0;
195 // Create RTP receivers (sources) for each subsession:
196 MediaSubsessionIterator iter(*mediaSession);
197 MediaSubsession* subsession;
198 unsigned desiredReceiveBufferSize;
199 while ((subsession = iter.next()) != NULL) {
200 // Ignore any subsession that's not audio or video:
201 if (strcmp(subsession->mediumName(), "audio") == 0) {
202 if (audiofound) {
203 fprintf(stderr, "Additional subsession \"audio/%s\" skipped\n", subsession->codecName());
204 continue;
206 desiredReceiveBufferSize = 100000;
207 } else if (strcmp(subsession->mediumName(), "video") == 0) {
208 if (videofound) {
209 fprintf(stderr, "Additional subsession \"video/%s\" skipped\n", subsession->codecName());
210 continue;
212 desiredReceiveBufferSize = 2000000;
213 } else {
214 continue;
217 if (rtsp_port)
218 subsession->setClientPortNum (rtsp_port);
220 if (!subsession->initiate()) {
221 fprintf(stderr, "Failed to initiate \"%s/%s\" RTP subsession: %s\n", subsession->mediumName(), subsession->codecName(), env->getResultMsg());
222 } else {
223 fprintf(stderr, "Initiated \"%s/%s\" RTP subsession on port %d\n", subsession->mediumName(), subsession->codecName(), subsession->clientPortNum());
225 // Set the OS's socket receive buffer sufficiently large to avoid
226 // incoming packets getting dropped between successive reads from this
227 // subsession's demuxer. Depending on the bitrate(s) that you expect,
228 // you may wish to tweak the "desiredReceiveBufferSize" values above.
229 int rtpSocketNum = subsession->rtpSource()->RTPgs()->socketNum();
230 int receiveBufferSize
231 = increaseReceiveBufferTo(*env, rtpSocketNum,
232 desiredReceiveBufferSize);
233 if (verbose > 0) {
234 fprintf(stderr, "Increased %s socket receive buffer to %d bytes \n",
235 subsession->mediumName(), receiveBufferSize);
238 if (rtspClient != NULL) {
239 // Issue a RTSP "SETUP" command on the chosen subsession:
240 if (!rtspClient->setupMediaSubsession(*subsession, False,
241 rtsp_transport_tcp)) break;
242 if (!strcmp(subsession->mediumName(), "audio"))
243 audiofound = 1;
244 if (!strcmp(subsession->mediumName(), "video"))
245 videofound = 1;
250 if (rtspClient != NULL) {
251 // Issue a RTSP aggregate "PLAY" command on the whole session:
252 if (!rtspClient->playMediaSession(*mediaSession)) break;
253 } else if (sipClient != NULL) {
254 sipClient->sendACK(); // to start the stream flowing
257 // Now that the session is ready to be read, do additional
258 // MPlayer codec-specific initialization on each subsession:
259 iter.reset();
260 while ((subsession = iter.next()) != NULL) {
261 if (subsession->readSource() == NULL) continue; // not reading this
263 unsigned flags = 0;
264 if (strcmp(subsession->mediumName(), "audio") == 0) {
265 rtpState->audioBufferQueue
266 = new ReadBufferQueue(subsession, demuxer, "audio");
267 rtpState->audioBufferQueue->otherQueue = &(rtpState->videoBufferQueue);
268 rtpCodecInitialize_audio(demuxer, subsession, flags);
269 } else if (strcmp(subsession->mediumName(), "video") == 0) {
270 rtpState->videoBufferQueue
271 = new ReadBufferQueue(subsession, demuxer, "video");
272 rtpState->videoBufferQueue->otherQueue = &(rtpState->audioBufferQueue);
273 rtpCodecInitialize_video(demuxer, subsession, flags);
275 rtpState->flags |= flags;
277 success = True;
278 } while (0);
279 if (!success) return NULL; // an error occurred
281 // Hack: If audio and video are demuxed together on a single RTP stream,
282 // then create a new "demuxer_t" structure to allow the higher-level
283 // code to recognize this:
284 if (demux_is_multiplexed_rtp_stream(demuxer)) {
285 stream_t* s = new_ds_stream(demuxer->video);
286 demuxer_t* od = demux_open(opts, s, DEMUXER_TYPE_UNKNOWN,
287 opts->audio_id, opts->video_id, opts->sub_id,
288 NULL);
289 demuxer = new_demuxers_demuxer(od, od, od);
292 return demuxer;
295 extern "C" int demux_is_mpeg_rtp_stream(demuxer_t* demuxer) {
296 // Get the RTP state that was stored in the demuxer's 'priv' field:
297 RTPState* rtpState = (RTPState*)(demuxer->priv);
299 return (rtpState->flags&RTPSTATE_IS_MPEG12_VIDEO) != 0;
302 extern "C" int demux_is_multiplexed_rtp_stream(demuxer_t* demuxer) {
303 // Get the RTP state that was stored in the demuxer's 'priv' field:
304 RTPState* rtpState = (RTPState*)(demuxer->priv);
306 return (rtpState->flags&RTPSTATE_IS_MULTIPLEXED) != 0;
309 static demux_packet_t* getBuffer(demuxer_t* demuxer, demux_stream_t* ds,
310 Boolean mustGetNewData,
311 float& ptsBehind); // forward
313 extern "C" int demux_rtp_fill_buffer(demuxer_t* demuxer, demux_stream_t* ds) {
314 // Get a filled-in "demux_packet" from the RTP source, and deliver it.
315 // Note that this is called as a synchronous read operation, so it needs
316 // to block in the (hopefully infrequent) case where no packet is
317 // immediately available.
319 while (1) {
320 float ptsBehind;
321 demux_packet_t* dp = getBuffer(demuxer, ds, False, ptsBehind); // blocking
322 if (dp == NULL) return 0;
324 if (demuxer->stream->eof) return 0; // source stream has closed down
326 // Before using this packet, check to make sure that its presentation
327 // time is not far behind the other stream (if any). If it is,
328 // then we discard this packet, and get another instead. (The rest of
329 // MPlayer doesn't always do a good job of synchronizing when the
330 // audio and video streams get this far apart.)
331 // (We don't do this when streaming over TCP, because then the audio and
332 // video streams are interleaved.)
333 // (Also, if the stream is *excessively* far behind, then we allow
334 // the packet, because in this case it probably means that there was
335 // an error in the source's timestamp synchronization.)
336 const float ptsBehindThreshold = 1.0; // seconds
337 const float ptsBehindLimit = 60.0; // seconds
338 if (ptsBehind < ptsBehindThreshold ||
339 ptsBehind > ptsBehindLimit ||
340 rtsp_transport_tcp) { // packet's OK
341 ds_add_packet(ds, dp);
342 break;
345 #ifdef DEBUG_PRINT_DISCARDED_PACKETS
346 RTPState* rtpState = (RTPState*)(demuxer->priv);
347 ReadBufferQueue* bufferQueue = ds == demuxer->video ? rtpState->videoBufferQueue : rtpState->audioBufferQueue;
348 fprintf(stderr, "Discarding %s packet (%fs behind)\n", bufferQueue->tag(), ptsBehind);
349 #endif
350 free_demux_packet(dp); // give back this packet, and get another one
353 return 1;
356 Boolean awaitRTPPacket(demuxer_t* demuxer, demux_stream_t* ds,
357 unsigned char*& packetData, unsigned& packetDataLen,
358 float& pts) {
359 // Similar to "demux_rtp_fill_buffer()", except that the "demux_packet"
360 // is not delivered to the "demux_stream".
361 float ptsBehind;
362 demux_packet_t* dp = getBuffer(demuxer, ds, True, ptsBehind); // blocking
363 if (dp == NULL) return False;
365 packetData = dp->buffer;
366 packetDataLen = dp->len;
367 pts = dp->pts;
369 return True;
372 static void teardownRTSPorSIPSession(RTPState* rtpState); // forward
374 extern "C" void demux_close_rtp(demuxer_t* demuxer) {
375 // Reclaim all RTP-related state:
377 // Get the RTP state that was stored in the demuxer's 'priv' field:
378 RTPState* rtpState = (RTPState*)(demuxer->priv);
379 if (rtpState == NULL) return;
381 teardownRTSPorSIPSession(rtpState);
383 UsageEnvironment* env = NULL;
384 TaskScheduler* scheduler = NULL;
385 if (rtpState->mediaSession != NULL) {
386 env = &(rtpState->mediaSession->envir());
387 scheduler = &(env->taskScheduler());
389 Medium::close(rtpState->mediaSession);
390 Medium::close(rtpState->rtspClient);
391 Medium::close(rtpState->sipClient);
392 delete rtpState->audioBufferQueue;
393 delete rtpState->videoBufferQueue;
394 delete[] rtpState->sdpDescription;
395 delete rtpState;
396 #ifdef CONFIG_LIBAVCODEC
397 av_freep(&avcctx);
398 #endif
400 env->reclaim(); delete scheduler;
403 ////////// Extra routines that help implement the above interface functions:
405 #define MAX_RTP_FRAME_SIZE 5000000
406 // >= the largest conceivable frame composed from one or more RTP packets
408 static void afterReading(void* clientData, unsigned frameSize,
409 unsigned /*numTruncatedBytes*/,
410 struct timeval presentationTime,
411 unsigned /*durationInMicroseconds*/) {
412 int headersize = 0;
413 if (frameSize >= MAX_RTP_FRAME_SIZE) {
414 fprintf(stderr, "Saw an input frame too large (>=%d). Increase MAX_RTP_FRAME_SIZE in \"demux_rtp.cpp\".\n",
415 MAX_RTP_FRAME_SIZE);
417 ReadBufferQueue* bufferQueue = (ReadBufferQueue*)clientData;
418 demuxer_t* demuxer = bufferQueue->ourDemuxer();
419 RTPState* rtpState = (RTPState*)(demuxer->priv);
421 if (frameSize > 0) demuxer->stream->eof = 0;
423 demux_packet_t* dp = bufferQueue->dp;
425 if (bufferQueue->readSource()->isAMRAudioSource())
426 headersize = 1;
427 else if (bufferQueue == rtpState->videoBufferQueue &&
428 ((sh_video_t*)demuxer->video->sh)->format == mmioFOURCC('H','2','6','4')) {
429 dp->buffer[0]=0x00;
430 dp->buffer[1]=0x00;
431 dp->buffer[2]=0x01;
432 headersize = 3;
435 resize_demux_packet(dp, frameSize + headersize);
437 // Set the packet's presentation time stamp, depending on whether or
438 // not our RTP source's timestamps have been synchronized yet:
439 Boolean hasBeenSynchronized
440 = bufferQueue->rtpSource()->hasBeenSynchronizedUsingRTCP();
441 if (hasBeenSynchronized) {
442 if (verbose > 0 && !bufferQueue->prevPacketWasSynchronized) {
443 fprintf(stderr, "%s stream has been synchronized using RTCP \n",
444 bufferQueue->tag());
447 struct timeval* fst = &(rtpState->firstSyncTime); // abbrev
448 if (fst->tv_sec == 0 && fst->tv_usec == 0) {
449 *fst = presentationTime;
452 // For the "pts" field, use the time differential from the first
453 // synchronized time, rather than absolute time, in order to avoid
454 // round-off errors when converting to a float:
455 dp->pts = presentationTime.tv_sec - fst->tv_sec
456 + (presentationTime.tv_usec - fst->tv_usec)/1000000.0;
457 bufferQueue->prevPacketPTS = dp->pts;
458 } else {
459 if (verbose > 0 && bufferQueue->prevPacketWasSynchronized) {
460 fprintf(stderr, "%s stream is no longer RTCP-synchronized \n",
461 bufferQueue->tag());
464 // use the previous packet's "pts" once again:
465 dp->pts = bufferQueue->prevPacketPTS;
467 bufferQueue->prevPacketWasSynchronized = hasBeenSynchronized;
469 dp->pos = demuxer->filepos;
470 demuxer->filepos += frameSize + headersize;
472 // Signal any pending 'doEventLoop()' call on this queue:
473 bufferQueue->blockingFlag = ~0;
476 static void onSourceClosure(void* clientData) {
477 ReadBufferQueue* bufferQueue = (ReadBufferQueue*)clientData;
478 demuxer_t* demuxer = bufferQueue->ourDemuxer();
480 demuxer->stream->eof = 1;
482 // Signal any pending 'doEventLoop()' call on this queue:
483 bufferQueue->blockingFlag = ~0;
486 static demux_packet_t* getBuffer(demuxer_t* demuxer, demux_stream_t* ds,
487 Boolean mustGetNewData,
488 float& ptsBehind) {
489 // Begin by finding the buffer queue that we want to read from:
490 // (Get this from the RTP state, which we stored in
491 // the demuxer's 'priv' field)
492 RTPState* rtpState = (RTPState*)(demuxer->priv);
493 ReadBufferQueue* bufferQueue = NULL;
494 int headersize = 0;
495 TaskToken task;
497 if (demuxer->stream->eof) return NULL;
499 if (ds == demuxer->video) {
500 bufferQueue = rtpState->videoBufferQueue;
501 if (((sh_video_t*)ds->sh)->format == mmioFOURCC('H','2','6','4'))
502 headersize = 3;
503 } else if (ds == demuxer->audio) {
504 bufferQueue = rtpState->audioBufferQueue;
505 if (bufferQueue->readSource()->isAMRAudioSource())
506 headersize = 1;
507 } else {
508 fprintf(stderr, "(demux_rtp)getBuffer: internal error: unknown stream\n");
509 return NULL;
512 if (bufferQueue == NULL || bufferQueue->readSource() == NULL) {
513 fprintf(stderr, "(demux_rtp)getBuffer failed: no appropriate RTP subsession has been set up\n");
514 return NULL;
517 demux_packet_t* dp = NULL;
518 if (!mustGetNewData) {
519 // Check whether we have a previously-saved buffer that we can use:
520 dp = bufferQueue->getPendingBuffer();
521 if (dp != NULL) {
522 ptsBehind = 0.0; // so that we always accept this data
523 return dp;
527 // Allocate a new packet buffer, and arrange to read into it:
528 if (!bufferQueue->nextpacket) {
529 dp = new_demux_packet(MAX_RTP_FRAME_SIZE);
530 bufferQueue->dp = dp;
531 if (dp == NULL) return NULL;
534 #ifdef CONFIG_LIBAVCODEC
535 extern AVCodecParserContext * h264parserctx;
536 int consumed, poutbuf_size = 1;
537 const uint8_t *poutbuf = NULL;
538 float lastpts = 0.0;
540 do {
541 if (!bufferQueue->nextpacket) {
542 #endif
543 // Schedule the read operation:
544 bufferQueue->blockingFlag = 0;
545 bufferQueue->readSource()->getNextFrame(&dp->buffer[headersize], MAX_RTP_FRAME_SIZE - headersize,
546 afterReading, bufferQueue,
547 onSourceClosure, bufferQueue);
548 // Block ourselves until data becomes available:
549 TaskScheduler& scheduler
550 = bufferQueue->readSource()->envir().taskScheduler();
551 int delay = 10000000;
552 if (bufferQueue->prevPacketPTS * 1.05 > rtpState->mediaSession->playEndTime())
553 delay /= 10;
554 task = scheduler.scheduleDelayedTask(delay, onSourceClosure, bufferQueue);
555 scheduler.doEventLoop(&bufferQueue->blockingFlag);
556 scheduler.unscheduleDelayedTask(task);
557 if (demuxer->stream->eof) {
558 free_demux_packet(dp);
559 return NULL;
562 if (headersize == 1) // amr
563 dp->buffer[0] =
564 ((AMRAudioSource*)bufferQueue->readSource())->lastFrameHeader();
565 #ifdef CONFIG_LIBAVCODEC
566 } else {
567 bufferQueue->dp = dp = bufferQueue->nextpacket;
568 bufferQueue->nextpacket = NULL;
570 if (headersize == 3 && h264parserctx) { // h264
571 consumed = h264parserctx->parser->parser_parse(h264parserctx,
572 avcctx,
573 &poutbuf, &poutbuf_size,
574 dp->buffer, dp->len);
576 if (!consumed && !poutbuf_size)
577 return NULL;
579 if (!poutbuf_size) {
580 lastpts=dp->pts;
581 free_demux_packet(dp);
582 bufferQueue->dp = dp = new_demux_packet(MAX_RTP_FRAME_SIZE);
583 } else {
584 bufferQueue->nextpacket = dp;
585 bufferQueue->dp = dp = new_demux_packet(poutbuf_size);
586 memcpy(dp->buffer, poutbuf, poutbuf_size);
587 dp->pts=lastpts;
590 } while (!poutbuf_size);
591 #endif
593 // Set the "ptsBehind" result parameter:
594 if (bufferQueue->prevPacketPTS != 0.0
595 && bufferQueue->prevPacketWasSynchronized
596 && *(bufferQueue->otherQueue) != NULL
597 && (*(bufferQueue->otherQueue))->prevPacketPTS != 0.0
598 && (*(bufferQueue->otherQueue))->prevPacketWasSynchronized) {
599 ptsBehind = (*(bufferQueue->otherQueue))->prevPacketPTS
600 - bufferQueue->prevPacketPTS;
601 } else {
602 ptsBehind = 0.0;
605 if (mustGetNewData) {
606 // Save this buffer for future reads:
607 bufferQueue->savePendingBuffer(dp);
610 return dp;
613 static void teardownRTSPorSIPSession(RTPState* rtpState) {
614 MediaSession* mediaSession = rtpState->mediaSession;
615 if (mediaSession == NULL) return;
616 if (rtpState->rtspClient != NULL) {
617 rtpState->rtspClient->teardownMediaSession(*mediaSession);
618 } else if (rtpState->sipClient != NULL) {
619 rtpState->sipClient->sendBYE();
623 ////////// "ReadBuffer" and "ReadBufferQueue" implementation:
625 ReadBufferQueue::ReadBufferQueue(MediaSubsession* subsession,
626 demuxer_t* demuxer, char const* tag)
627 : prevPacketWasSynchronized(False), prevPacketPTS(0.0), otherQueue(NULL),
628 dp(NULL), nextpacket(NULL),
629 pendingDPHead(NULL), pendingDPTail(NULL),
630 fReadSource(subsession == NULL ? NULL : subsession->readSource()),
631 fRTPSource(subsession == NULL ? NULL : subsession->rtpSource()),
632 fOurDemuxer(demuxer), fTag(strdup(tag)) {
635 ReadBufferQueue::~ReadBufferQueue() {
636 free((void *)fTag);
638 // Free any pending buffers (that never got delivered):
639 demux_packet_t* dp = pendingDPHead;
640 while (dp != NULL) {
641 demux_packet_t* dpNext = dp->next;
642 dp->next = NULL;
643 free_demux_packet(dp);
644 dp = dpNext;
648 void ReadBufferQueue::savePendingBuffer(demux_packet_t* dp) {
649 // Keep this buffer around, until MPlayer asks for it later:
650 if (pendingDPTail == NULL) {
651 pendingDPHead = pendingDPTail = dp;
652 } else {
653 pendingDPTail->next = dp;
654 pendingDPTail = dp;
656 dp->next = NULL;
659 demux_packet_t* ReadBufferQueue::getPendingBuffer() {
660 demux_packet_t* dp = pendingDPHead;
661 if (dp != NULL) {
662 pendingDPHead = dp->next;
663 if (pendingDPHead == NULL) pendingDPTail = NULL;
665 dp->next = NULL;
668 return dp;
671 static int demux_rtp_control(struct demuxer *demuxer, int cmd, void *arg) {
672 double endpts = ((RTPState*)demuxer->priv)->mediaSession->playEndTime();
674 switch(cmd) {
675 case DEMUXER_CTRL_GET_TIME_LENGTH:
676 if (endpts <= 0)
677 return DEMUXER_CTRL_DONTKNOW;
678 *((double *)arg) = endpts;
679 return DEMUXER_CTRL_OK;
681 case DEMUXER_CTRL_GET_PERCENT_POS:
682 if (endpts <= 0)
683 return DEMUXER_CTRL_DONTKNOW;
684 *((int *)arg) = (int)(((RTPState*)demuxer->priv)->videoBufferQueue->prevPacketPTS*100/endpts);
685 return DEMUXER_CTRL_OK;
687 default:
688 return DEMUXER_CTRL_NOTIMPL;
692 demuxer_desc_t demuxer_desc_rtp = {
693 "LIVE555 RTP demuxer",
694 "live555",
696 "Ross Finlayson",
697 "requires LIVE555 Streaming Media library",
698 DEMUXER_TYPE_RTP,
699 0, // no autodetect
700 NULL,
701 demux_rtp_fill_buffer,
702 demux_open_rtp,
703 demux_close_rtp,
704 NULL,
705 demux_rtp_control