From d71ea45f8ec75f89f5cc4c55e651f109b59b6461 Mon Sep 17 00:00:00 2001 From: Marten Richter Date: Sun, 3 Jun 2012 20:04:48 +0200 Subject: [PATCH] ffmpeg softdecoding only Y (bw) --- osdopengl.cc | 141 ++++++++++++---- osdopengl.h | 19 ++- videovpeogl.cc | 437 ++++++++++++++++++++++++++++++++++++++----------- videovpeogl.h | 45 ++++- 4 files changed, 504 insertions(+), 138 deletions(-) diff --git a/osdopengl.cc b/osdopengl.cc index f4ba7f7..682a4e9 100755 --- a/osdopengl.cc +++ b/osdopengl.cc @@ -30,6 +30,7 @@ #include "shaders/generic__vertex_shader.h" #include "shaders/osd__frag_shader.h" +#include "shaders/frame__frag_shader.h" #define BACKBUFFER_WIDTH 1920 #define BACKBUFFER_HEIGHT 1080 @@ -56,6 +57,7 @@ OsdOpenGL::OsdOpenGL() osd_shader=0; gen_shader=0; osd_program=0; + frame_program=0; #ifdef BENCHMARK_FPS last_benchmark_time=getTimeMS(); @@ -123,7 +125,7 @@ int OsdOpenGL::init(void* device) const EGLint attributs[]={ EGL_RED_SIZE,8,EGL_GREEN_SIZE, 8,EGL_BLUE_SIZE, 8,EGL_ALPHA_SIZE, 8, - EGL_SURFACE_TYPE, EGL_WINDOW_BIT, + EGL_SURFACE_TYPE, EGL_WINDOW_BIT|EGL_PBUFFER_BIT, EGL_CONFORMANT, EGL_OPENGL_ES2_BIT, EGL_NONE }; // Here, we might have to select the resolution! @@ -218,7 +220,7 @@ int OsdOpenGL::init(void* device) gen_shader=CreateShader(generic_vertex_shader, GL_VERTEX_SHADER); osd_shader=CreateShader(osd_frag_shader, GL_FRAGMENT_SHADER); - + // Create the program for osd rendering osd_program=glCreateProgram(); if (osd_program==0) { Log::getInstance()->log("OSD", Log::WARN, "Creating glsl program failed!%d",glGetError()); @@ -243,6 +245,37 @@ int OsdOpenGL::init(void* device) glDeleteProgram(osd_program); return 0; } + // create the program for yuv frame rendering + frame_shader=CreateShader(frame_frag_shader, GL_FRAGMENT_SHADER); + + frame_program=glCreateProgram(); + if (frame_program==0) { + Log::getInstance()->log("OSD", Log::WARN, "Creating glsl program failed!%d",glGetError()); + return 0; + } + glAttachShader(frame_program,gen_shader); + glAttachShader(frame_program,frame_shader); + glBindAttribLocation(frame_program,0,"vec_pos"); + glBindAttribLocation(frame_program,1,"tex_coord"); + + frame_sampler_locY=glGetUniformLocation(frame_program,"textureY"); + //frame_sampler_locU=glGetUniformLocation(frame_program,"textureU"); + // frame_sampler_locV=glGetUniformLocation(frame_program,"textureV"); + + glLinkProgram(frame_program); + //GLint link_status; + glGetShaderiv(frame_program,GL_LINK_STATUS, &link_status); + + if (!link_status) { + char buffer[1024]; + glGetProgramInfoLog(frame_program,1024,NULL,buffer); + Log::getInstance()->log("OSD", Log::WARN, "Compiling Programm failed!"); + Log::getInstance()->log("OSD", Log::WARN, "%s",buffer); + glDeleteProgram(frame_program); + return 0; + } + + glClearColor(0.0f,0.0f,0.0f,1.f); @@ -373,32 +406,34 @@ void OsdOpenGL::threadMethod() //glmutex.Lock(); //glmutex.Unlock(); + VPEOGLFrame *frame=NULL; + struct timespec ts; + ts.tv_sec=0; + ts.tv_nsec=0; + VideoVPEOGL* video =(VideoVPEOGL*) Video::getInstance(); while (true) { + ts.tv_nsec=10*1000000LL; unsigned int waittime=10; if (initted){ - // if (evrstate==EVR_pres_off || evrstate==EVR_pres_pause) - // { - Render(); - //TODO get surfaces from Video object - /* } else if (evrstate==EVR_pres_started) - { - LPDIRECT3DSURFACE9 surf; - if (dsallocator) dsallocator->GetNextSurface(&surf,&waittime); - if (surf==NULL) - { - Render(); - } - else - { - RenderDS(surf); - surf->Release(); - if (dsallocator) dsallocator->DiscardSurfaceandgetWait(&waittime); - } - }*/ + if (!frame) frame=video->getReadyOGLFrame(); + if (frame) { + InternalRendering(frame); + lastrendertime=getTimeMS(); + video->returnOGLFrame(frame); //please recycle it + frame=NULL; + } else { + long long time1=getTimeMS(); + if ((time1-lastrendertime)>200) {//5 fps for OSD updates are enough, avoids tearing + InternalRendering(NULL); + lastrendertime=getTimeMS(); + } + } + frame=video->getReadyOGLFrame(); } + if (frame) ts.tv_nsec=0; threadCheckExit(); - if (waittime!=0) MILLISLEEP(min(10,waittime)); + if (ts.tv_nsec!=0) threadWaitForSignalTimed(&ts); //Sleep(1); } //eglMakeCurrent(egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT ); @@ -411,14 +446,14 @@ void OsdOpenGL::threadPostStopCleanup() //goo; } - +/* // This function is called from the WinMain function in order to get Screen updates void OsdOpenGL::Render() { if (!initted) return ; + VPEOGLFrame* frame=NULL; if (external_driving) { long long time1=getTimeMS(); - if ((time1-lastrendertime)>200) {//5 fps for OSD updates are enough, avoids tearing InternalRendering(NULL); lastrendertime=getTimeMS(); @@ -429,7 +464,7 @@ void OsdOpenGL::Render() struct timespec ts; clock_gettime(CLOCK_MONOTONIC, &ts); long long time1=ts.tv_sec*1000+ts.tv_nsec/1000000LL; - if ((time1-lastrendertime)>50) {//10 fps for OSD updates are enough, avoids tearing + if ((time1-lastrendertime)>100) {//10 fps for OSD updates are enough, avoids tearing InternalRendering(NULL); lastrendertime=getTimeMS(); } else { @@ -440,16 +475,16 @@ void OsdOpenGL::Render() } } -void OsdOpenGL::RenderDS(GLuint present){ +void OsdOpenGL::RenderDS(VPEOGLFrame* frame){ if (!initted) return; if (external_driving) { - InternalRendering(present); + InternalRendering(frame); lastrendertime=getTimeMS(); } -} +}*/ -void OsdOpenGL::InternalRendering(GLuint present){ +void OsdOpenGL::InternalRendering(VPEOGLFrame* frame){ BeginPainting(); /* HRESULT losty=d3ddevice->TestCooperativeLevel(); if (losty==D3DERR_DEVICELOST) { @@ -528,16 +563,58 @@ void OsdOpenGL::InternalRendering(GLuint present){ glViewport(0, 0, display_width,display_height); glClear(GL_COLOR_BUFFER_BIT); - glUseProgram(osd_program); + + + + + if (frame) { + glUseProgram(frame_program); + // Log::getInstance()->log("OSD", Log::WARN, "mark1 glerror %x",glGetError()); + glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), osdvertices); + glEnableVertexAttribArray(0); + glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), &(osdvertices[0].u)); + glEnableVertexAttribArray(1); + glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), osdvertices); + glEnableVertexAttribArray(0); + glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), &(osdvertices[0].u)); + glEnableVertexAttribArray(1); + + glActiveTexture(GL_TEXTURE0); + glBindTexture(GL_TEXTURE_2D,frame->textures[0]); + //Log::getInstance()->log("OSD", Log::WARN, "mark2 glerror %x",glGetError()); + // glActiveTexture(GL_TEXTURE1); + // Log::getInstance()->log("OSD", Log::WARN, "mark3 glerror %x",glGetError()); + // glBindTexture(GL_TEXTURE_2D,frame->textures[1]); + // Log::getInstance()->log("OSD", Log::WARN, "mark4 glerror %x",glGetError()); + // glActiveTexture(GL_TEXTURE2); + // Log::getInstance()->log("OSD", Log::WARN, "mark5 glerror %x",glGetError()); + // glBindTexture(GL_TEXTURE_2D,frame->textures[2]); + + // Log::getInstance()->log("OSD", Log::WARN, "mark6 glerror %x",glGetError()); + glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); + // Log::getInstance()->log("OSD", Log::WARN, "mark7 glerror %x",glGetError()); + glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); + //Log::getInstance()->log("OSD", Log::WARN, "mark8 glerror %x",glGetError()); + + glUniform1i(frame_sampler_locY,0); + //glUniform1i(frame_sampler_locU,1); + //glUniform1i(frame_sampler_locV,2); + + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + glEnable(GL_BLEND); + glBlendFuncSeparate (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA,GL_ZERO,GL_ONE); + + } + + glUseProgram(osd_program); glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), osdvertices); glEnableVertexAttribArray(0); glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), &(osdvertices[0].u)); glEnableVertexAttribArray(1); - - glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D,((SurfaceOpenGL*)screen)->getTexture()); diff --git a/osdopengl.h b/osdopengl.h index ccdd317..f92cadc 100755 --- a/osdopengl.h +++ b/osdopengl.h @@ -34,6 +34,7 @@ #include "log.h" #include "threadp.h" #include "mutex.h" +#include "videovpeogl.h" #define BENCHMARK_FPS @@ -81,22 +82,25 @@ class OsdOpenGL : public Osd, public Thread_TYPE // This function is called from the threadMethod function in order to get Screen updates - void Render(); - void RenderDS(GLuint present); +/* void Render(); + void RenderDS(VPEOGLFrame* frame);*/ void BeginPainting(); void EndPainting(); void setExternalDriving(/*DsAllocator* dsall,*/ unsigned int width, unsigned int height); void Blank(); - void getEGLObjs(EGLDisplay *i_egl_display,EGLSurface *i_egl_surface,EGLContext *i_egl_context){ + void getEGLObjs(EGLDisplay *i_egl_display,EGLSurface *i_egl_surface,EGLContext *i_egl_context, EGLConfig *i_egl_config){ *i_egl_display=egl_display; *i_egl_surface=egl_surface; *i_egl_context=egl_context; + *i_egl_config=egl_ourconfig; }; EGLConfig getEGLConfig() {return egl_ourconfig;}; + void AdviseAboutNewFrame() {threadSignal();}; + private: @@ -112,7 +116,7 @@ private: bool external_driving; Mutex glmutex; long long lastrendertime; - void InternalRendering(GLuint present); + void InternalRendering(VPEOGLFrame* frame); bool DoLost(); void InitVertexBuffer(float scalex,float scaley); OSDVERTEX osdvertices[4]; @@ -124,6 +128,13 @@ private: GLuint osd_program; + GLuint frame_shader; + GLuint frame_program; + + GLint frame_sampler_locY; + GLint frame_sampler_locU; + GLint frame_sampler_locV; + GLint osd_sampler_loc; diff --git a/videovpeogl.cc b/videovpeogl.cc index 8d944d8..1ef6d93 100755 --- a/videovpeogl.cc +++ b/videovpeogl.cc @@ -43,7 +43,9 @@ VideoVPEOGL::VideoVPEOGL() #ifdef VPE_FFMPEG_SUPPORT mpeg2codec_context_ff=NULL; - dec_frame_ff=NULL; + ffmpeg_running=false; + dec_frame_ff_uploading=NULL; + dec_frame_ff_decoding=NULL; #endif } @@ -75,7 +77,10 @@ int VideoVPEOGL::init(UCHAR tformat) /* new stuff */ - stop(); + + + //stop(); + return 1; @@ -86,12 +91,36 @@ int VideoVPEOGL::initUsingOSDObjects() EGLDisplay i_egl_display; EGLSurface i_egl_surface; EGLContext i_egl_context; + EGLConfig i_egl_config; OsdOpenGL *osd=(OsdOpenGL*)osd->getInstance(); - osd->getEGLObjs(&i_egl_display,&i_egl_surface,&i_egl_context); + osd->getEGLObjs(&i_egl_display,&i_egl_surface,&i_egl_context, &i_egl_config); + const EGLint attr_context[]={ + EGL_CONTEXT_CLIENT_VERSION,2, + EGL_NONE + }; egl_display=i_egl_display; - egl_surface=i_egl_surface; - egl_context=i_egl_context; + egl_context=eglCreateContext(egl_display,i_egl_config,i_egl_context,attr_context); + if (egl_context==EGL_NO_CONTEXT) { + Log::getInstance()->log("Video", Log::WARN, "Creating egl context failed! %d",eglGetError()); + return 0; + } + // We create a dummy surface here, in order to allow two contexts + const EGLint attr_pbuffer[]={ + EGL_WIDTH, 1, EGL_HEIGHT,1, + EGL_NONE + }; + egl_surface=eglCreatePbufferSurface(egl_display,i_egl_config,attr_pbuffer); + if (egl_surface==EGL_NO_SURFACE) { + Log::getInstance()->log("Video", Log::WARN, "Creating egl pbuffer failed! %d",eglGetError()); + return 0; + } + + + + + //egl_surface=i_egl_surface; + //egl_context=i_egl_context; #ifdef VPE_OMX_SUPPORT @@ -118,6 +147,7 @@ int VideoVPEOGL::initUsingOSDObjects() } #endif + threadStart(); return 1; } @@ -187,6 +217,7 @@ int VideoVPEOGL::shutdown() { if (!initted) return 0; initted = 0; + threadCancel(); decoding_backend=0; #ifdef VPE_OMX_SUPPORT @@ -201,6 +232,167 @@ int VideoVPEOGL::shutdown() return 1; } +int VideoVPEOGL::AllocateYUVOglTexture(VPEOGLFrame* outframe,int width,int height,int stride) +{ + Log::getInstance()->log("Video", Log::NOTICE, "Allocate ogl texture"); + // Y + glGenTextures(1, &outframe->textures[0]); + glBindTexture(GL_TEXTURE_2D, outframe->textures[0]); + glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, stride, height, 0, GL_LUMINANCE, + GL_UNSIGNED_BYTE, NULL); + // U + glGenTextures(1, &outframe->textures[1]); + glBindTexture(GL_TEXTURE_2D, outframe->textures[1]); + glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, stride>>1, height>>1, 0, GL_LUMINANCE, + GL_UNSIGNED_BYTE, NULL); + // V + glGenTextures(1, &outframe->textures[2]); + glBindTexture(GL_TEXTURE_2D, outframe->textures[2]); + glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, stride>>1, height>>1, 0, GL_LUMINANCE, + GL_UNSIGNED_BYTE, NULL); + outframe->height=height; + outframe->width=width; + outframe->stride=stride; + return 1; +} + + +VPEOGLFrame *VideoVPEOGL::getReadyOGLFrame(){ + VPEOGLFrame *return_obj=NULL; + ogl_frame_mutex.Lock(); + if (ready_ogl_frames.size()>0) { + return_obj=ready_ogl_frames.front(); + ready_ogl_frames.pop_front(); + ogl_frame_outside=true; + } + ogl_frame_mutex.Unlock(); + return return_obj; +} + +void VideoVPEOGL::returnOGLFrame(VPEOGLFrame *frame) +{ + ogl_frame_mutex.Lock(); + if (frame) { + ogl_frame_outside=false; + free_ogl_frames.push_back(frame); + } + ogl_frame_mutex.Unlock(); +} + +void VideoVPEOGL::threadMethod() +{ + if (eglMakeCurrent(egl_display, egl_surface, egl_surface, egl_context)== EGL_FALSE) { + Log::getInstance()->log("Video", Log::WARN, "Making egl Current failed in thread %d",eglGetError()); + return; + } + while (1) { + bool sleep=true; +#ifdef VPE_FFMPEG_SUPPORT + dec_frame_ff_mutex.Lock(); + if (dec_frame_ff_upload_pending.size()>0) { + dec_frame_ff_uploading=dec_frame_ff_upload_pending.front(); + dec_frame_ff_upload_pending.pop_front(); + if (dec_frame_ff_upload_pending.size()>0) sleep=false; + } + dec_frame_ff_mutex.Unlock(); + if (dec_frame_ff_uploading) { + int width,height,pixfmt; + //First get a free ogl image + VPEOGLFrame* out_frame=NULL; + while (!out_frame) { + ogl_frame_mutex.Lock(); + if (all_ogl_frames.size()==0) { + ogl_frame_mutex.Unlock(); break; + } + + if (free_ogl_frames.size()>0) { + width=ffwidth; + height=ffheight; + pixfmt=ffpixfmt; + out_frame=free_ogl_frames.front(); + free_ogl_frames.pop_front(); + } else MILLISLEEP(2); + ogl_frame_mutex.Unlock(); + } + bool failed=false; + if (out_frame) { + if (out_frame->textures[0]==0 || out_frame->width!=width || + out_frame->height!=height || out_frame->stride!=dec_frame_ff_uploading->linesize[0]) { + if (out_frame->textures[0]==0) { + glDeleteTextures(1,&out_frame->textures[0]); + out_frame->textures[0]=0; + } + if (out_frame->textures[1]==0) { + glDeleteTextures(1,&out_frame->textures[1]); + out_frame->textures[1]=0; + } + if (out_frame->textures[2]==0) { + glDeleteTextures(1,&out_frame->textures[2]); + out_frame->textures[2]=0; + } + if (!AllocateYUVOglTexture(out_frame,width,height,dec_frame_ff_uploading->linesize[0])) failed=true; + } + if (!failed) { + //up to now only YUV data, this is for reference only, since the pi is too slow. + glBindTexture(GL_TEXTURE_2D, out_frame->textures[0]); + glPixelStorei(GL_UNPACK_ALIGNMENT,1); + + glTexSubImage2D(GL_TEXTURE_2D,0,0,0, + dec_frame_ff_uploading->linesize[0],height, + GL_LUMINANCE,GL_UNSIGNED_BYTE, + dec_frame_ff_uploading->data[0]); + + + glBindTexture(GL_TEXTURE_2D, out_frame->textures[1]); + glPixelStorei(GL_UNPACK_ALIGNMENT,1); + glTexSubImage2D(GL_TEXTURE_2D,0,0,0, + dec_frame_ff_uploading->linesize[1],height>>1, + GL_LUMINANCE,GL_UNSIGNED_BYTE, + dec_frame_ff_uploading->data[1]); + + glBindTexture(GL_TEXTURE_2D, out_frame->textures[2]); + glPixelStorei(GL_UNPACK_ALIGNMENT,1); + glTexSubImage2D(GL_TEXTURE_2D,0,0,0, + dec_frame_ff_uploading->linesize[2],height>>1, + GL_LUMINANCE,GL_UNSIGNED_BYTE, + dec_frame_ff_uploading->data[2]); + ogl_frame_mutex.Lock(); + ready_ogl_frames.push_back(out_frame); + ogl_frame_mutex.Unlock(); + ((OsdOpenGL*)Osd::getInstance())->AdviseAboutNewFrame(); //Tell him, that we have a frame waiting + + } + + dec_frame_ff_mutex.Lock(); + dec_frame_ff_free.push_back(dec_frame_ff_uploading); + dec_frame_ff_uploading=NULL; + dec_frame_ff_mutex.Unlock(); + + + + + } + + + + + } +#endif + + if (sleep) threadWaitForSignal(); + threadCheckExit(); + } + +} + +void VideoVPEOGL::threadPostStopCleanup() +{ + eglMakeCurrent(egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT ); +#ifdef VPE_FFMPEG_SUPPORT + dec_frame_ff_uploading=NULL; +#endif +} + @@ -1136,6 +1328,7 @@ int VideoVPEOGL::DeAllocateCodecsOMX() int VideoVPEOGL::AllocateCodecsFFMPEG() { + ffmpeg_hastime=false; Log::getInstance()->log("Video", Log::NOTICE, "AllocateCodecsFFmpeg"); mpeg2codec_context_ff=avcodec_alloc_context(); if (mpeg2codec_context_ff==NULL) { @@ -1146,11 +1339,40 @@ int VideoVPEOGL::AllocateCodecsFFMPEG() Log::getInstance()->log("Video", Log::DEBUG, "Opening ffmpeg codec failed"); return 0; } - dec_frame_ff=avcodec_alloc_frame(); // may be we need multiple frames, if we want to use async texture upload - if (!dec_frame_ff) { - Log::getInstance()->log("Video", Log::DEBUG, "Allocating dec_frame failed"); - return 0; + Log::getInstance()->log("Video", Log::NOTICE, "AllocateCodecsFFmpeg mark1"); + dec_frame_ff_mutex.Lock(); + Log::getInstance()->log("Video", Log::NOTICE, "AllocateCodecsFFmpeg mark2"); + for (int i=0;i<3;i++) { + AVFrame *dec_frame_ff=avcodec_alloc_frame(); // may be we need multiple frames, if we want to use async texture upload + if (!dec_frame_ff) { + Log::getInstance()->log("Video", Log::DEBUG, "Allocating dec_frame failed"); + return 0; + } + dec_frame_ff_all.push_back(dec_frame_ff); + dec_frame_ff_free.push_back(dec_frame_ff); } + dec_frame_ff_decoding=NULL; + Log::getInstance()->log("Video", Log::NOTICE, "AllocateCodecsFFmpeg mark 3"); + dec_frame_ff_mutex.Unlock(); + + ogl_frame_mutex.Lock(); + //Allocate texture structs, since we do not know the sizes, we do not allocate the textures yet + for (int i=0;i<3;i++) { + VPEOGLFrame *new_frame=(VPEOGLFrame *)malloc(sizeof(VPEOGLFrame)); + new_frame->type=1; //1 = YUV, 2 RGB + new_frame->textures[0]=0; + new_frame->textures[1]=0; + new_frame->textures[2]=0; + new_frame->width=new_frame->height=0; + all_ogl_frames.push_back(new_frame); + free_ogl_frames.push_back(new_frame); + + } + ogl_frame_outside=false; + + ogl_frame_mutex.Unlock(); + + ffmpeg_running=true; return 1; @@ -1158,11 +1380,55 @@ int VideoVPEOGL::AllocateCodecsFFMPEG() int VideoVPEOGL::DeAllocateCodecsFFMPEG() { + ffmpeg_running=false; Log::getInstance()->log("Video", Log::NOTICE, "DeAllocateCodecsFFmpeg"); - if (dec_frame_ff) { - av_free(dec_frame_ff); - dec_frame_ff=NULL; + dec_frame_ff_mutex.Lock(); + dec_frame_ff_upload_pending.clear(); + dec_frame_ff_free.clear(); + dec_frame_ff_mutex.Unlock(); + while (dec_frame_ff_uploading) { + Log::getInstance()->log("Video", Log::NOTICE, "Wait for uploading to finish"); + MILLISLEEP(20); + } + dec_frame_ff_mutex.Lock(); + for (int i=0; i< dec_frame_ff_all.size();i++) { + av_free(dec_frame_ff_all[i]); + } + + dec_frame_ff_all.clear(); + dec_frame_ff_mutex.Unlock(); + dec_frame_ff_decoding=NULL; + + while (ogl_frame_outside) { + Log::getInstance()->log("Video", Log::NOTICE, "Wait for ogl frame from outside"); + MILLISLEEP(20); + } + + ((OsdOpenGL*)Osd::getInstance())->BeginPainting(); // get osd's context + ogl_frame_mutex.Lock(); + for (int i=0; i< dec_frame_ff_all.size();i++) { + VPEOGLFrame * del_frame=all_ogl_frames[i]; + if (del_frame->textures[0]==0) { + glDeleteTextures(1,&del_frame->textures[0]); + del_frame->textures[0]=0; + } + if (del_frame->textures[1]==0) { + glDeleteTextures(1,&del_frame->textures[1]); + del_frame->textures[1]=0; + } + if (del_frame->textures[2]==0) { + glDeleteTextures(1,&del_frame->textures[2]); + del_frame->textures[2]=0; + } + free(all_ogl_frames[i]); } + all_ogl_frames.clear(); + free_ogl_frames.clear(); + ready_ogl_frames.clear(); + ogl_frame_mutex.Unlock(); + ((OsdOpenGL*)Osd::getInstance())->EndPainting(); + + if (mpeg2codec_context_ff) { avcodec_close(mpeg2codec_context_ff); av_free(mpeg2codec_context_ff); @@ -1355,7 +1621,7 @@ UINT VideoVPEOGL::DeliverMediaPacketOMX(MediaPacket packet, OMX_ERRORTYPE error; - OMX_PARAM_PORTDEFINITIONTYPE port_image; +/* OMX_PARAM_PORTDEFINITIONTYPE port_image; memset(&port_image,0,sizeof(port_image)); port_image.nSize=sizeof(port_image); port_image.nVersion.nVersion=OMX_VERSION; @@ -1364,7 +1630,7 @@ UINT VideoVPEOGL::DeliverMediaPacketOMX(MediaPacket packet, if (error!= OMX_ErrorNone){ Log::getInstance()->log("Video", Log::DEBUG, "OMX_GetParameter failed %x", error); } - Log::getInstance()->log("Video", Log::DEBUG, "Image port %d %d", port_image.format.video.nFrameWidth , port_image.format.video.nFrameHeight); + Log::getInstance()->log("Video", Log::DEBUG, "Image port %d %d", port_image.format.video.nFrameWidth , port_image.format.video.nFrameHeight);*/ /*First Check, if we have an audio sample*/ if (iframemode) { @@ -1387,12 +1653,11 @@ UINT VideoVPEOGL::DeliverMediaPacketOMX(MediaPacket packet, /*Inspect PES-Header */ - OMX_STATETYPE temp_state; - OMX_GetState(omx_vid_dec,&temp_state); +// OMX_STATETYPE temp_state; +// OMX_GetState(omx_vid_dec,&temp_state); if (*samplepos==0) {//stripheader headerstrip=buffer[packet.pos_buffer+8]+9/*is this right*/; - //headerstrip+=6; //h264 *samplepos+=headerstrip; if ( packet.synched ) { @@ -1509,32 +1774,15 @@ UINT VideoVPEOGL::DeliverMediaPacketFFMPEG(MediaPacket packet, const UCHAR* buffer, UINT *samplepos) { - -#if 0 //Later add fail back code for ffmpeg /*if (!videoon) { *samplepos+=packet.length; return packet.length; }*/ + if (!ffmpeg_running) return 0; // if we are not runnig do not do this - if (!omx_running) return 0; // if we are not runnig do not do this - - - OMX_ERRORTYPE error; - - OMX_PARAM_PORTDEFINITIONTYPE port_image; - memset(&port_image,0,sizeof(port_image)); - port_image.nSize=sizeof(port_image); - port_image.nVersion.nVersion=OMX_VERSION; - port_image.nPortIndex =omx_codec_output_port; - error=OMX_GetParameter(omx_vid_dec,OMX_IndexParamPortDefinition, &port_image); - if (error!= OMX_ErrorNone){ - Log::getInstance()->log("Video", Log::DEBUG, "OMX_GetParameter failed %x", error); - } - Log::getInstance()->log("Video", Log::DEBUG, "Image port %d %d", port_image.format.video.nFrameWidth , port_image.format.video.nFrameHeight); - /*First Check, if we have an audio sample*/ if (iframemode) { //samplepos=0; MILLISLEEP(10); @@ -1542,7 +1790,7 @@ UINT VideoVPEOGL::DeliverMediaPacketFFMPEG(MediaPacket packet, } UINT headerstrip=0; - if (packet.disconti) { +/* if (packet.disconti) { firstsynched=false; if (cur_input_buf_omx) { OMX_ERRORTYPE error=OMX_EmptyThisBuffer(omx_vid_dec,cur_input_buf_omx); @@ -1551,27 +1799,40 @@ UINT VideoVPEOGL::DeliverMediaPacketFFMPEG(MediaPacket packet, } cur_input_buf_omx=NULL; } - } + }*/ /*Inspect PES-Header */ + if (!dec_frame_ff_decoding) { + dec_frame_ff_mutex.Lock(); + if (dec_frame_ff_free.size()>0) { + dec_frame_ff_decoding=dec_frame_ff_free.front(); + dec_frame_ff_free.pop_front(); + dec_frame_ff_mutex.Unlock(); + } else { + Log::getInstance()->log("Video", Log::DEBUG, "We have no free buffers"); + dec_frame_ff_mutex.Unlock(); + // No free Buffers + return 0; + } + } + - OMX_STATETYPE temp_state; - OMX_GetState(omx_vid_dec,&temp_state); if (*samplepos==0) {//stripheader headerstrip=buffer[packet.pos_buffer+8]+9/*is this right*/; - //headerstrip+=6; //h264 *samplepos+=headerstrip; if ( packet.synched ) { - if (cur_input_buf_omx) { + /*if (cur_input_buf_omx) { OMX_ERRORTYPE error=OMX_EmptyThisBuffer(omx_vid_dec,cur_input_buf_omx); if (error!=OMX_ErrorNone){ Log::getInstance()->log("Video", Log::DEBUG, "OMX_EmptyThisBuffer failed %x", error); } cur_input_buf_omx=NULL;//write out old data - } + }*/ + ffmpeg_time=packet.presentation_time; + ffmpeg_hastime=true; // reftime1=packet.presentation_time; // reftime2=reftime1+1; firstsynched=true; @@ -1583,31 +1844,17 @@ UINT VideoVPEOGL::DeliverMediaPacketFFMPEG(MediaPacket packet, } } - if (!cur_input_buf_omx) { - input_bufs_omx_mutex.Lock(); - if (input_bufs_omx_free.size()==0) { - input_bufs_omx_mutex.Unlock(); - Log::getInstance()->log("Video", Log::DEBUG, "Deliver MediaPacket no free sample"); - return 0; // we do not have a free media sample - } - cur_input_buf_omx=input_bufs_omx_free.front(); - cur_input_buf_omx->nFilledLen=0; - cur_input_buf_omx->nOffset=0; - cur_input_buf_omx->nTimeStamp=0; - input_bufs_omx_free.pop_front(); - input_bufs_omx_mutex.Unlock(); - } - if (cur_input_buf_omx->nFilledLen==0) {//will only be changed on first packet + /*if (cur_input_buf_omx->nFilledLen==0) {//will only be changed on first packet /*if (packet.disconti) { ms->SetDiscontinuity(TRUE); } else { ms->SetDiscontinuity(FALSE); - }*/ + }* //if (packet.synched) { //lastreftimePTS=packet.pts; @@ -1625,49 +1872,51 @@ UINT VideoVPEOGL::DeliverMediaPacketFFMPEG(MediaPacket packet, // ms->SetSyncPoint(TRUE); //} - } + }*/ unsigned int haveToCopy=packet.length-*samplepos; + while (haveToCopy>0) { + int dec_bytes=0; + int frame_ready=0; + + // Log::getInstance()->log("Video", Log::DEBUG, "Push data to decoder"); + dec_bytes=avcodec_decode_video(mpeg2codec_context_ff, dec_frame_ff_decoding, + &frame_ready, buffer+packet.pos_buffer+*samplepos, haveToCopy); + if (dec_bytes<0) { + Log::getInstance()->log("Video", Log::DEBUG, "Decoding frame failed %x", dec_bytes); + return *samplepos; + } + *samplepos+=dec_bytes; + haveToCopy-=dec_bytes; + if (frame_ready) { + // Log::getInstance()->log("Video", Log::DEBUG, "We have a frame push it to osd"); + + dec_frame_ff_mutex.Lock(); + ffwidth=mpeg2codec_context_ff->width; + ffheight=mpeg2codec_context_ff->height; + ffpixfmt=mpeg2codec_context_ff->pix_fmt; + // Log::getInstance()->log("Video", Log::DEBUG, "Frame info %d %d %d",ffwidth,ffheight,ffpixfmt); + + dec_frame_ff_upload_pending.push_back(dec_frame_ff_decoding); + dec_frame_ff_decoding=NULL; + if (dec_frame_ff_free.size()>0) { + dec_frame_ff_decoding=dec_frame_ff_free.front(); + dec_frame_ff_free.pop_front(); + dec_frame_ff_mutex.Unlock(); + threadSignal(); + ffmpeg_hastime=false; + } else { + ffmpeg_hastime=false; + dec_frame_ff_mutex.Unlock(); + // No free Buffers + return *samplepos; + } - while (haveToCopy> (cur_input_buf_omx->nAllocLen-cur_input_buf_omx->nFilledLen)) { - unsigned int cancopy=cur_input_buf_omx->nAllocLen-cur_input_buf_omx->nFilledLen; - memcpy(cur_input_buf_omx->pBuffer+cur_input_buf_omx->nFilledLen,buffer+packet.pos_buffer+*samplepos,cancopy); - haveToCopy-=cancopy; - cur_input_buf_omx->nFilledLen+=cancopy; - *samplepos+=cancopy; - // push old buffer out - OMX_ERRORTYPE error=OMX_EmptyThisBuffer(omx_vid_dec,cur_input_buf_omx); - if (error!=OMX_ErrorNone){ - Log::getInstance()->log("Video", Log::DEBUG, "OMX_EmptyThisBuffer failed %x", error); } - // get5 new buffer - input_bufs_omx_mutex.Lock(); - if (input_bufs_omx_free.size()==0) { - input_bufs_omx_mutex.Unlock(); - //Log::getInstance()->log("Video", Log::DEBUG, "Deliver MediaPacket no free sample"); - return *samplepos; // we do not have a free media sample - } - cur_input_buf_omx=input_bufs_omx_free.front(); - cur_input_buf_omx->nFilledLen=0; - cur_input_buf_omx->nOffset=0; - cur_input_buf_omx->nTimeStamp=0; - input_bufs_omx_free.pop_front(); - input_bufs_omx_mutex.Unlock(); - - cur_input_buf_omx->nFlags=OMX_BUFFERFLAG_TIME_UNKNOWN; } - memcpy(cur_input_buf_omx->pBuffer+cur_input_buf_omx->nFilledLen, - buffer+packet.pos_buffer+*samplepos,haveToCopy); - cur_input_buf_omx->nFilledLen+=haveToCopy; - - - - *samplepos+=haveToCopy; - return *samplepos; -#endif - return 0; + } diff --git a/videovpeogl.h b/videovpeogl.h index 855b91a..aab1590 100755 --- a/videovpeogl.h +++ b/videovpeogl.h @@ -24,6 +24,7 @@ #include "mutex.h" + #include #include #include @@ -37,6 +38,7 @@ #include "defines.h" #include "video.h" +#include "threadsystem.h" //#define EGL_EGLEXT_PROTOTYPES @@ -74,19 +76,21 @@ extern "C" { -/* + struct VPEOGLFrame { - int type; //1 = RGB, 2 YUV + int type; //1 = YUV, 2 RGB GLuint textures[3]; // 0=RGB or Y, 1=U 2=V - + int width, height; + int stride; +/* #ifdef VPE_OMX_SUPPORT //OMX EGLImageKHR khr_image; OMX_BUFFERHEADERTYPE *omx_buf; -#endif -};*/ +#endif*/ +}; -class VideoVPEOGL : public Video +class VideoVPEOGL : public Video, public Thread_TYPE { public: VideoVPEOGL(); @@ -132,6 +136,8 @@ class VideoVPEOGL : public Video void WriteOutPATPMT(); + VPEOGLFrame *getReadyOGLFrame(); + void returnOGLFrame(VPEOGLFrame *frame); @@ -225,19 +231,42 @@ class VideoVPEOGL : public Video #ifdef VPE_FFMPEG_SUPPORT AVCodec *mpeg2codec_ff; AVCodecContext *mpeg2codec_context_ff; - AVFrame *dec_frame_ff; + vector dec_frame_ff_all; + list dec_frame_ff_free; + list dec_frame_ff_upload_pending; + AVFrame* dec_frame_ff_uploading; + AVFrame* dec_frame_ff_decoding; + Mutex dec_frame_ff_mutex; UINT DeliverMediaPacketFFMPEG(MediaPacket packet,const UCHAR* buffer,UINT *samplepos); int AllocateCodecsFFMPEG(); int DeAllocateCodecsFFMPEG(); + bool ffmpeg_running; + bool ffmpeg_hastime; // signals if a pts is now + long long ffmpeg_time; + + int ffwidth,ffheight,ffpixfmt; + #endif + vector all_ogl_frames; + list free_ogl_frames; + list ready_ogl_frames; + bool ogl_frame_outside; + Mutex ogl_frame_mutex; + + int AllocateYUVOglTexture(VPEOGLFrame* outframe,int width,int height,int stride); + + virtual void threadMethod(); + virtual void threadPostStopCleanup(); + + bool firstsynched; - int lastpacketnum; + int lastpacketnum; EGLDisplay egl_display; EGLSurface egl_surface; -- 2.39.5