2 Copyright 2004-2005 Chris Tallon
\r
4 This file is part of VOMP.
\r
6 VOMP is free software; you can redistribute it and/or modify
\r
7 it under the terms of the GNU General Public License as published by
\r
8 the Free Software Foundation; either version 2 of the License, or
\r
9 (at your option) any later version.
\r
11 VOMP is distributed in the hope that it will be useful,
\r
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
\r
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
\r
14 GNU General Public License for more details.
\r
16 You should have received a copy of the GNU General Public License
\r
17 along with VOMP; if not, write to the Free Software
\r
18 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
\r
21 #include "videowin.h"
\r
23 #include "dssourcefilter.h"
\r
24 #include "dsallocator.h"
\r
28 void AdjustWindow();
\r
32 VideoWin::VideoWin()
\r
34 dsgraphbuilder=NULL;
\r
35 dsmediacontrol=NULL;
\r
43 dsvmrsurfnotify=NULL;
\r
44 filtermutex=CreateMutex(NULL,FALSE,NULL);
\r
46 offsetvideonotset=true;
\r
47 offsetaudionotset=true;
\r
54 cur_audio_media_sample=NULL;
\r
55 cur_video_media_sample=NULL;
\r
61 iframemode=false;//We are not in Iframe mode at begining
\r
67 VideoWin::~VideoWin()
\r
70 CloseHandle(filtermutex);
\r
77 int VideoWin::init(UCHAR tformat)
\r
79 if (initted) return 0;
\r
82 tvsize=Video::ASPECT16X9; //Internally Vomp should think we are a 16:9 TV
\r
86 if (!setFormat(tformat)){ shutdown(); return 0; }
\r
90 int VideoWin::setTVsize(UCHAR ttvsize)
\r
92 pseudotvsize=ttvsize;
\r
96 int VideoWin::setDefaultAspect()
\r
98 return setAspectRatio(Video::ASPECT4X3);
\r
101 int VideoWin::shutdown()
\r
103 if (!initted) return 0;
\r
108 int VideoWin::setFormat(UCHAR tformat)
\r
110 if (!initted) return 0;
\r
111 if ((tformat != PAL) && (tformat != NTSC)) return 0;
\r
113 if (format == NTSC)
\r
116 screenHeight = 480;
\r
121 screenHeight = 576;
\r
127 int VideoWin::setConnection(UCHAR tconnection)
\r
129 if (!initted) return 0;
\r
130 if ((tconnection != COMPOSITERGB) && (tconnection != SVIDEO)) return 0;
\r
131 connection = tconnection;
\r
136 int VideoWin::setAspectRatio(UCHAR taspectRatio)
\r
138 if (!initted) return 0;
\r
139 if ((taspectRatio != ASPECT4X3) && (taspectRatio != ASPECT16X9)) return 0;
\r
140 aspectRatio = taspectRatio;
\r
145 int VideoWin::setMode(UCHAR tmode)
\r
147 if (!initted) return 0;
\r
149 //if ((tmode == LETTERBOX) && (tvsize == ASPECT16X9)) return 0; // invalid mode
\r
151 if ((tmode != NORMAL) && (tmode != LETTERBOX) && (tmode != UNKNOWN2) && (tmode != QUARTER) && (tmode != EIGHTH)
\r
152 && (tmode != ZOOM) && (tmode != UNKNOWN6)) return 0;
\r
161 int VideoWin::signalOff()
\r
166 int VideoWin::signalOn()
\r
171 int VideoWin::setSource()
\r
173 if (!initted) return 0;
\r
178 int VideoWin::setPosition(int x, int y)
\r
180 if (!initted) return 0;
\r
181 if (mode==QUARTER || mode==EIGHTH) {
\r
188 int VideoWin::sync()
\r
190 if (!initted) return 0;
\r
195 #ifdef DS_DEBUG // This stuff would not included in vomp due to lincemse restrcitions
\r
196 #include "dshelper.h"
\r
201 int VideoWin::play()
\r
203 if (!initted) return 0;
\r
207 int VideoWin::dsplay()
\r
209 if (!initted) return 0;
\r
212 //Build filter graph
\r
215 if (hres=CoCreateInstance(CLSID_FilterGraph,NULL,CLSCTX_INPROC_SERVER,
\r
216 IID_IGraphBuilder,(void**)&dsgraphbuilder)!=S_OK) {
\r
220 AddToRot(dsgraphbuilder,&graphidentifier);
\r
222 //This is just a try to see if building the graph works
\r
223 // dsgraphbuilder->RenderFile(L"D:\\Projekte\\VTP Client\\test.mpa" ,NULL);
\r
224 //So this is the real code, this prevents the feeder from calling noexisting objects!
\r
225 WaitForSingleObject(filtermutex,INFINITE);
\r
226 firstsynched=false;
\r
227 sourcefilter=new DsSourceFilter(); //Creating our Source filter for pushing Data
\r
229 if (hres=dsgraphbuilder->AddFilter(sourcefilter,L"Vomp Win Source Filter")!=S_OK) {
\r
230 Log::getInstance()->log("VideoWin", Log::WARN , "Failed adding Vomp Source Filter!");
\r
232 ReleaseMutex(filtermutex);
\r
236 if (hres=dsgraphbuilder->Render(sourcefilter->GetPin(0)/*audio*/)!=S_OK) {
\r
237 Log::getInstance()->log("VideoWin", Log::WARN , "Failed rendering audio!");
\r
239 ReleaseMutex(filtermutex);
\r
245 //We alloc the vmr9 as next step
\r
246 if (hres=CoCreateInstance(CLSID_VideoMixingRenderer9,0,
\r
247 CLSCTX_INPROC_SERVER,IID_IBaseFilter,(void**) &dsvmrrenderer)!=S_OK) {
\r
248 Log::getInstance()->log("VideoWin", Log::WARN ,"Failed creating VMR9 renderer!");
\r
250 ReleaseMutex(filtermutex);
\r
253 if (hres=dsgraphbuilder->AddFilter(dsvmrrenderer,L"VMR9")!=S_OK) {
\r
255 Log::getInstance()->log("VideoWin", Log::WARN ,"Failed adding VMR9 renderer!");
\r
256 ReleaseMutex(filtermutex);
\r
259 IVMRFilterConfig9* vmrfilconfig;
\r
260 if (dsvmrrenderer->QueryInterface(IID_IVMRFilterConfig9,(void**)&vmrfilconfig)!=S_OK) {
\r
262 Log::getInstance()->log("VideoWin", Log::WARN ,"Failed getting VMR9 Filterconfig interface!");
\r
263 ReleaseMutex(filtermutex);
\r
266 vmrfilconfig->SetRenderingMode(VMR9Mode_Renderless);
\r
267 vmrfilconfig->Release();
\r
269 if (dsvmrrenderer->QueryInterface(IID_IVMRSurfaceAllocatorNotify9,(void**)& dsvmrsurfnotify)!=S_OK) {
\r
271 Log::getInstance()->log("VideoWin", Log::WARN ,"Failed getting VMR9 Surface Allocator interface!");
\r
272 ReleaseMutex(filtermutex);
\r
275 allocatorvmr=new DsAllocator();
\r
276 dsvmrsurfnotify->AdviseSurfaceAllocator(NULL,allocatorvmr);
\r
277 allocatorvmr->AdviseNotify(dsvmrsurfnotify);
\r
282 /*VMR 9 stuff end */
\r
283 IFilterGraph2*fg2=NULL;
\r
284 if (dsgraphbuilder->QueryInterface(IID_IFilterGraph2,(void**)&fg2)!=S_OK) {
\r
285 Log::getInstance()->log("VideoWin", Log::WARN , "Failed querying for FilterGraph2 Interface!");
\r
287 ReleaseMutex(filtermutex);
\r
290 if (hres=fg2->RenderEx(sourcefilter->GetPin(1)/*video*/,
\r
291 AM_RENDEREX_RENDERTOEXISTINGRENDERERS,NULL)!=S_OK) {
\r
292 Log::getInstance()->log("VideoWin", Log::WARN , "Failed rendering Video!");
\r
295 ReleaseMutex(filtermutex);
\r
301 if (hres=CoCreateInstance(CLSID_SystemClock,NULL,CLSCTX_INPROC_SERVER,
\r
302 IID_IReferenceClock,(void**)&dsrefclock)!=S_OK) {
\r
306 dsgraphbuilder->QueryInterface(IID_IMediaFilter,(void **) &dsmediafilter);
\r
307 dsmediafilter->SetSyncSource(dsrefclock);
\r
309 dsgraphbuilder->QueryInterface(IID_IMediaControl,(void **) &dsmediacontrol);
\r
310 dsgraphbuilder->QueryInterface(IID_IBasicAudio,(void **) &dsbasicaudio);
\r
312 dsmediacontrol->Run();
\r
313 iframemode=false;//exit iframe mode
\r
314 ReleaseMutex(filtermutex);
\r
318 int VideoWin::EnterIframePlayback()
\r
320 if (!initted) return 0;
\r
322 iframemode=true;//enter iframe mode
\r
323 //Build filter graph
\r
325 if (hres=CoCreateInstance(CLSID_FilterGraph,NULL,CLSCTX_INPROC_SERVER,
\r
326 IID_IGraphBuilder,(void**)&dsgraphbuilder)!=S_OK) {
\r
330 AddToRot(dsgraphbuilder,&graphidentifier);
\r
332 //So this is the real code, this prevents the feeder from calling noexisting objects!
\r
333 WaitForSingleObject(filtermutex,INFINITE);
\r
334 //firstsynched=false;
\r
335 sourcefilter=new DsSourceFilter(); //Creating our Source filter for pushing Data
\r
337 if (hres=dsgraphbuilder->AddFilter(sourcefilter,L"Vomp Win Source Filter")!=S_OK) {
\r
338 Log::getInstance()->log("VideoWin", Log::WARN , "Failed adding Vomp Source Filter!");
\r
340 ReleaseMutex(filtermutex);
\r
345 //We alloc the vmr9 as next step
\r
346 if (hres=CoCreateInstance(CLSID_VideoMixingRenderer9,0,
\r
347 CLSCTX_INPROC_SERVER,IID_IBaseFilter,(void**) &dsvmrrenderer)!=S_OK) {
\r
348 Log::getInstance()->log("VideoWin", Log::WARN ,"Failed creating VMR9 renderer!");
\r
350 ReleaseMutex(filtermutex);
\r
354 if (hres=dsgraphbuilder->AddFilter(dsvmrrenderer,L"VMR9")!=S_OK) {
\r
356 Log::getInstance()->log("VideoWin", Log::WARN ,"Failed adding VMR9 renderer!");
\r
357 ReleaseMutex(filtermutex);
\r
360 IVMRFilterConfig9* vmrfilconfig;
\r
361 if (dsvmrrenderer->QueryInterface(IID_IVMRFilterConfig9,(void**)&vmrfilconfig)!=S_OK) {
\r
363 Log::getInstance()->log("VideoWin", Log::WARN ,"Failed getting VMR9 Filterconfig interface!");
\r
364 ReleaseMutex(filtermutex);
\r
367 vmrfilconfig->SetRenderingMode(VMR9Mode_Renderless);
\r
368 vmrfilconfig->Release();
\r
370 if (dsvmrrenderer->QueryInterface(IID_IVMRSurfaceAllocatorNotify9,(void**)& dsvmrsurfnotify)!=S_OK) {
\r
372 Log::getInstance()->log("VideoWin", Log::WARN ,"Failed getting VMR9 Surface Allocator interface!");
\r
373 ReleaseMutex(filtermutex);
\r
376 allocatorvmr=new DsAllocator();
\r
377 dsvmrsurfnotify->AdviseSurfaceAllocator(NULL,allocatorvmr);
\r
378 allocatorvmr->AdviseNotify(dsvmrsurfnotify);
\r
380 /*VMR 9 stuff end */
\r
381 IFilterGraph2*fg2=NULL;
\r
382 if (dsgraphbuilder->QueryInterface(IID_IFilterGraph2,(void**)&fg2)!=S_OK) {
\r
383 Log::getInstance()->log("VideoWin", Log::WARN , "Failed querying for FilterGraph2 Interface!");
\r
385 ReleaseMutex(filtermutex);
\r
388 if (hres=fg2->RenderEx(sourcefilter->GetPin(1)/*video*/,
\r
389 AM_RENDEREX_RENDERTOEXISTINGRENDERERS,NULL)!=S_OK) {
\r
390 Log::getInstance()->log("VideoWin", Log::WARN , "Failed rendering Video!");
\r
393 ReleaseMutex(filtermutex);
\r
399 /* if (hres=CoCreateInstance(CLSID_SystemClock,NULL,CLSCTX_INPROC_SERVER,
\r
400 IID_IReferenceClock,(void**)&dsrefclock)!=S_OK) {
\r
404 dsgraphbuilder->QueryInterface(IID_IMediaFilter,(void **) &dsmediafilter);
\r
405 dsmediafilter->SetSyncSource(/*dsrefclock*/NULL); //Run as fast as you can!
\r
407 dsgraphbuilder->QueryInterface(IID_IMediaControl,(void **) &dsmediacontrol);
\r
408 dsgraphbuilder->QueryInterface(IID_IBasicAudio,(void **) &dsbasicaudio);
\r
410 dsmediacontrol->Run();
\r
411 ReleaseMutex(filtermutex);
\r
416 int VideoWin::dsstop()
\r
418 if (!initted) return 0;
\r
426 int VideoWin::stop()
\r
428 if (!initted) return 0;
\r
434 int VideoWin::reset()
\r
436 if (!initted) return 0;
\r
442 int VideoWin::dsreset()
\r
444 if (!initted) return 0;
\r
447 iframemode=false;//exit iframe mode
\r
453 int VideoWin::dspause()
\r
455 if (!initted) return 0;
\r
456 if (dsmediacontrol) dsmediacontrol->Pause();
\r
460 int VideoWin::pause()
\r
462 if (!initted) return 0;
\r
467 int VideoWin::unPause() // FIXME get rid - same as play!!
\r
468 {//No on windows this is not the same, I don't get rid of!
\r
469 if (!initted) return 0;
\r
473 int VideoWin::dsunPause() // FIXME get rid - same as play!!
\r
474 {//No on windows this is not the same, I don't get rid of!
\r
475 if (!initted) return 0;
\r
476 if (dsmediacontrol) dsmediacontrol->Run();
\r
480 int VideoWin::fastForward()
\r
482 if (!initted) return 0;
\r
487 int VideoWin::unFastForward()
\r
489 if (!initted) return 0;
\r
494 int VideoWin::attachFrameBuffer()
\r
496 if (!initted) return 0;
\r
500 int VideoWin::blank(void)
\r
502 ((OsdWin*)Osd::getInstance())->Blank();
\r
506 ULLONG VideoWin::getCurrentTimestamp()
\r
508 REFERENCE_TIME startoffset;
\r
509 REFERENCE_TIME ncr_time;
\r
510 if (iframemode) return 0; //Not in iframe mode!
\r
511 if (!dsrefclock || !sourcefilter) return 0;
\r
512 FILTER_STATE state;
\r
513 sourcefilter->GetState(10,&state);
\r
515 if (state==State_Running) dsrefclock->GetTime(&cr_time);
\r
517 startoffset=sourcefilter->getStartOffset();
\r
518 ncr_time-=startoffset;
\r
519 ncr_time-=lastreftimeRT;
\r
520 /* ULLONG result=frameNumberToTimecode(
\r
521 VDR::getInstance()->frameNumberFromPosition(lastreftimeBYTE));*/
\r
522 ULLONG result=lastreftimePTS;
\r
523 result+=(ULLONG)(ncr_time/10000LL*90LL);
\r
528 ULONG VideoWin::timecodeToFrameNumber(ULLONG timecode)
\r
530 if (format == PAL) return (ULONG)(((double)timecode / (double)90000) * (double)25);
\r
531 else return (ULONG)(((double)timecode / (double)90000) * (double)30);
\r
534 ULLONG VideoWin::frameNumberToTimecode(ULONG framenumber)
\r
536 if (format == PAL) return (ULLONG)(((double)framenumber * (double)90000) / (double)25);
\r
537 else return (ULLONG)(((double)framenumber * (double)90000) / (double)30);
\r
540 void VideoWin::CleanupDS()
\r
542 WaitForSingleObject(filtermutex,INFINITE);
\r
543 if (dsmediacontrol)dsmediacontrol->Stop();
\r
544 if (cur_audio_media_sample) {
\r
545 cur_audio_media_sample->Release();
\r
546 cur_audio_media_sample=NULL;
\r
548 if (cur_video_media_sample) {
\r
549 cur_video_media_sample->Release();
\r
550 cur_video_media_sample=NULL;
\r
552 if (dsbasicaudio) {
\r
553 dsbasicaudio->Release();
\r
556 if (dsvmrsurfnotify) {
\r
557 dsvmrsurfnotify->Release();
\r
558 dsvmrsurfnotify=NULL;
\r
560 if (dsvmrrenderer) {
\r
561 dsvmrrenderer->Release();
\r
562 dsvmrrenderer=NULL;
\r
565 if (allocatorvmr) {
\r
566 allocatorvmr->Release();
\r
571 dsrefclock->Release();
\r
574 if (dsmediafilter) {
\r
575 dsmediafilter->Release();
\r
576 dsmediafilter=NULL;
\r
581 if (dsmediacontrol) {
\r
582 dsmediacontrol->Stop();
\r
583 dsmediacontrol->Release();
\r
584 dsmediacontrol=NULL;
\r
586 if (dsgraphbuilder){
\r
588 RemoveFromRot(graphidentifier);
\r
590 dsgraphbuilder->Release();
\r
591 dsgraphbuilder=NULL;
\r
592 sourcefilter=NULL; //The Graph Builder destroys our SourceFilter
\r
594 ReleaseMutex(filtermutex);
\r
598 void VideoWin::PrepareMediaSample(const MediaPacketList& mplist,
\r
601 mediapacket = mplist.front();
\r
604 UINT VideoWin::DeliverMediaSample(const UCHAR* buffer, UINT *samplepos)
\r
606 DeliverMediaPacket(mediapacket, buffer, samplepos);
\r
607 if (*samplepos == mediapacket.length) {
\r
614 UINT VideoWin::DeliverMediaPacket(MediaPacket packet,
\r
615 const UCHAR* buffer,
\r
618 /*First Check, if we have an audio sample*/
\r
620 /*First Check, if we have an audio sample*/
\r
624 return 0; //Not in iframe mode!
\r
626 IMediaSample* ms=NULL;
\r
627 REFERENCE_TIME reftime1=0;
\r
628 REFERENCE_TIME reftime2=0;
\r
630 UINT headerstrip=0;
\r
631 if (packet.disconti) {
\r
632 firstsynched=false;
\r
633 DeliverVideoMediaSample();
\r
638 /*Inspect PES-Header */
\r
640 if (*samplepos==0) {//stripheader
\r
641 headerstrip=buffer[packet.pos_buffer+8]+9/*is this right*/;
\r
642 *samplepos+=headerstrip;
\r
643 if ( packet.synched ) {
\r
644 DeliverVideoMediaSample();//write out old data
\r
645 /* if (packet.presentation_time<0) { //Preroll?
\r
646 *samplepos=packet.length;//if we have not processed at least one
\r
647 return packet.length;//synched packet ignore it!
\r
650 reftime1=packet.presentation_time;
\r
651 reftime2=reftime1+1;
\r
654 if (!firstsynched) {//
\r
655 *samplepos=packet.length;//if we have not processed at least one
\r
656 return packet.length;//synched packet ignore it!
\r
664 if (!getCurrentVideoMediaSample(&ms) || ms==NULL) {// get the current sample
\r
669 ms_pos=ms->GetActualDataLength();
\r
670 ms_length=ms->GetSize();
\r
671 haveToCopy=min(ms_length-ms_pos,packet.length-*samplepos);
\r
672 if ((ms_length-ms_pos)<1) {
\r
673 DeliverVideoMediaSample(); //we are full!
\r
674 if (!getCurrentVideoMediaSample(&ms) || ms==NULL) {// get the current sample
\r
679 ms_pos=ms->GetActualDataLength();
\r
680 ms_length=ms->GetSize();
\r
681 haveToCopy=min(ms_length-ms_pos,packet.length-*samplepos);
\r
683 ms->GetPointer(&ms_buf);
\r
686 if (ms_pos==0) {//will only be changed on first packet
\r
687 if (packet.disconti) {
\r
688 ms->SetDiscontinuity(TRUE);
\r
690 ms->SetDiscontinuity(FALSE);
\r
692 if (packet.synched) {
\r
693 ms->SetSyncPoint(TRUE);
\r
694 ms->SetTime(&reftime1,&reftime2);
\r
695 //ms->SetTime(NULL,NULL);
\r
696 ms->SetMediaTime(NULL, NULL);
\r
697 if (reftime1<0) ms->SetPreroll(TRUE);
\r
698 else ms->SetPreroll(FALSE);
\r
699 /*Timecode handling*/
\r
700 lastreftimeRT=reftime1;
\r
701 lastreftimePTS=packet.pts;
\r
704 ms->SetSyncPoint(FALSE);
\r
705 ms->SetTime(NULL,NULL);
\r
706 ms->SetMediaTime(NULL, NULL);
\r
707 ms->SetPreroll(FALSE);
\r
709 // ms->SetSyncPoint(TRUE);
\r
714 memcpy(ms_buf+ms_pos,buffer+packet.pos_buffer+*samplepos,haveToCopy);
\r
715 ms->SetActualDataLength(haveToCopy+ms_pos);
\r
717 *samplepos+=haveToCopy;
\r
719 return haveToCopy+headerstrip;
\r
723 *samplepos+=packet.length;
\r
724 MILLISLEEP(0); //yet not implemented//bad idea
\r
725 return packet.length;
\r
729 int VideoWin::getCurrentAudioMediaSample(IMediaSample** ms)
\r
731 //WaitForSingleObject(filtermutex,INFINITE);
\r
732 if (!sourcefilter){
\r
733 // ReleaseMutex(filtermutex);
\r
736 if (cur_audio_media_sample) {
\r
737 *ms=cur_audio_media_sample;//already open
\r
740 if (!sourcefilter->getCurrentAudioMediaSample(ms)) {
\r
741 // ReleaseMutex(filtermutex);
\r
743 if (*ms) (*ms)->SetActualDataLength(0);
\r
744 cur_audio_media_sample=*ms;
\r
745 //Don't release the mutex before deliver
\r
749 int VideoWin::getCurrentVideoMediaSample(IMediaSample** ms)
\r
751 //WaitForSingleObject(filtermutex,INFINITE);
\r
752 if (!sourcefilter){
\r
753 // ReleaseMutex(filtermutex);
\r
756 if (cur_video_media_sample) {
\r
757 *ms=cur_video_media_sample;//already open
\r
760 if (!sourcefilter->getCurrentVideoMediaSample(ms)) {
\r
761 // ReleaseMutex(filtermutex);
\r
763 if (*ms) (*ms)->SetActualDataLength(0);
\r
765 cur_video_media_sample=*ms;
\r
766 //Don't release the mutex before deliver
\r
770 int VideoWin::DeliverAudioMediaSample(){
\r
771 if (cur_audio_media_sample) {
\r
772 sourcefilter->DeliverAudioMediaSample(cur_audio_media_sample);
\r
773 cur_audio_media_sample=NULL;
\r
775 //ReleaseMutex(filtermutex);
\r
779 int VideoWin::DeliverVideoMediaSample(){
\r
780 if (cur_video_media_sample) {
\r
781 sourcefilter->DeliverVideoMediaSample(cur_video_media_sample);
\r
782 cur_video_media_sample=NULL;
\r
784 //ReleaseMutex(filtermutex);
\r
788 long long VideoWin::SetStartOffset(long long curreftime, bool *rsync)
\r
791 if (offsetnotset) {
\r
792 startoffset=curreftime;//offset is set for audio
\r
793 offsetnotset=false;
\r
794 offsetvideonotset=false;
\r
798 if (offsetvideonotset) {
\r
799 offsetvideonotset=false;
\r
802 if ( (curreftime-lastrefvideotime)>10000000LL
\r
803 || (curreftime-lastrefvideotime)<-10000000LL) {//if pts jumps to big resync
\r
804 startoffset+=curreftime-lastrefvideotime;
\r
805 lastrefaudiotime+=curreftime-lastrefvideotime;
\r
807 offsetaudionotset=true;
\r
814 lastrefvideotime=curreftime;
\r
816 return startoffset;
\r
820 long long VideoWin::SetStartAudioOffset(long long curreftime, bool *rsync)
\r
823 if (offsetnotset) {
\r
824 startoffset=curreftime;
\r
825 offsetnotset=false;
\r
826 offsetaudionotset=false;
\r
828 if (offsetaudionotset) {
\r
829 offsetaudionotset=false;
\r
832 if ( (curreftime-lastrefaudiotime)>10000000LL
\r
833 || (curreftime-lastrefaudiotime)<-10000000LL) {//if pts jumps to big resync
\r
834 startoffset+=curreftime-lastrefaudiotime;
\r
835 lastrefvideotime+=curreftime-lastrefaudiotime;
\r
837 offsetvideonotset=true;
\r
843 lastrefaudiotime=curreftime;
\r
844 return startoffset;
\r
847 void VideoWin::ResetTimeOffsets() {
\r
848 offsetnotset=true; //called from demuxer
\r
849 offsetvideonotset=true;
\r
850 offsetaudionotset=true;
\r
852 lastrefaudiotime=0;
\r
853 lastrefvideotime=0;
\r
860 void VideoWin::SetAudioVolume(long volume)
\r
862 if (dsbasicaudio) dsbasicaudio->put_Volume(volume);
\r
865 void VideoWin::displayIFrame(const UCHAR* buffer, UINT length)
\r
867 if (!iframemode) EnterIframePlayback();
\r
871 IMediaSample* ms=NULL;
\r
872 REFERENCE_TIME reftime1=0;
\r
873 REFERENCE_TIME reftime2=0;
\r
874 if (!getCurrentVideoMediaSample(&ms) || ms==NULL) {// get the current sample
\r
880 ms->GetPointer(&ms_buf);
\r
881 ms_length=ms->GetSize();
\r
883 /*First Check, if we have an video sample*/
\r
884 DWORD read_pos = 0, write_pos = 0;
\r
885 DWORD pattern, packet_length;
\r
886 DWORD headerstrip=0;
\r
888 if (length < 4) return ;
\r
889 //Now we strip the pes header
\r
890 pattern = (buffer[0] << 16) | (buffer[1] << 8) | (buffer[2]);
\r
891 while (read_pos + 7 <= length)
\r
893 pattern = ((pattern << 8) & 0xFFFFFFFF) | buffer[read_pos+3];
\r
894 if (pattern < 0x000001E0 || pattern > 0x000001EF)
\r
898 headerstrip=buffer[read_pos+8]+9/*is this right*/;
\r
899 packet_length = ((buffer[read_pos+4] << 8) | (buffer[read_pos+5])) + 6;
\r
900 if (read_pos + packet_length > length)
\r
904 if ((write_pos+packet_length-headerstrip)>ms_length) {
\r
905 if (first) {ms->SetSyncPoint(TRUE);first=false;}
\r
906 else ms->SetSyncPoint(FALSE);
\r
907 ms->SetTime(NULL,NULL);
\r
908 ms->SetMediaTime(NULL, NULL);
\r
909 ms->SetActualDataLength(write_pos);
\r
910 DeliverVideoMediaSample();
\r
912 if (!getCurrentVideoMediaSample(&ms) || ms==NULL) {// get the current sample
\r
917 ms_length=ms->GetSize();
\r
918 ms->GetPointer(&ms_buf);
\r
920 if (packet_length-headerstrip>0) {
\r
921 memcpy(ms_buf+write_pos, buffer+read_pos+headerstrip, packet_length-headerstrip);
\r
922 write_pos += packet_length-headerstrip;
\r
924 read_pos += packet_length;
\r
926 pattern = (buffer[read_pos] << 16) | (buffer[read_pos+1] << 8)
\r
927 | (buffer[read_pos+2]);
\r
932 if (first) {ms->SetSyncPoint(TRUE);first=false;}
\r
933 else ms->SetSyncPoint(FALSE);
\r
934 ms->SetTime(NULL,NULL);
\r
935 ms->SetMediaTime(NULL, NULL);
\r
936 ms->SetActualDataLength(write_pos);
\r
937 DeliverVideoMediaSample();
\r
941 *samplepos+=packet.length;
\r
942 MILLISLEEP(0); //yet not implemented//bad idea
\r
943 return packet.length;
\r
949 int VideoWin::test()
\r
954 int VideoWin::test2()
\r