ffmpeg softdecoding only Y (bw)
authorMarten Richter <marten.richter@freenet.de>
Sun, 3 Jun 2012 18:04:48 +0000 (20:04 +0200)
committerMarten Richter <marten.richter@freenet.de>
Sun, 3 Jun 2012 18:04:48 +0000 (20:04 +0200)
osdopengl.cc
osdopengl.h
videovpeogl.cc
videovpeogl.h

index f4ba7f717703d94aaaf5b786ea374c81294c40e0..682a4e9e633536267cc95751f1409b43a303e551 100755 (executable)
@@ -30,6 +30,7 @@
 \r
 #include "shaders/generic__vertex_shader.h"\r
 #include "shaders/osd__frag_shader.h"\r
+#include "shaders/frame__frag_shader.h"\r
 \r
 #define  BACKBUFFER_WIDTH 1920\r
 #define  BACKBUFFER_HEIGHT 1080\r
@@ -56,6 +57,7 @@ OsdOpenGL::OsdOpenGL()
   osd_shader=0;\r
   gen_shader=0;\r
   osd_program=0;\r
+  frame_program=0;\r
 \r
 #ifdef BENCHMARK_FPS\r
        last_benchmark_time=getTimeMS();\r
@@ -123,7 +125,7 @@ int OsdOpenGL::init(void* device)
 \r
    const EGLint attributs[]={\r
                 EGL_RED_SIZE,8,EGL_GREEN_SIZE, 8,EGL_BLUE_SIZE, 8,EGL_ALPHA_SIZE, 8,\r
-         EGL_SURFACE_TYPE, EGL_WINDOW_BIT,\r
+         EGL_SURFACE_TYPE, EGL_WINDOW_BIT|EGL_PBUFFER_BIT,\r
          EGL_CONFORMANT, EGL_OPENGL_ES2_BIT,\r
          EGL_NONE\r
    }; // Here, we might have to select the resolution!\r
@@ -218,7 +220,7 @@ int OsdOpenGL::init(void* device)
   gen_shader=CreateShader(generic_vertex_shader, GL_VERTEX_SHADER);\r
   osd_shader=CreateShader(osd_frag_shader, GL_FRAGMENT_SHADER);\r
 \r
-\r
+  // Create the program for osd rendering\r
   osd_program=glCreateProgram();\r
   if (osd_program==0) {\r
          Log::getInstance()->log("OSD", Log::WARN, "Creating glsl program failed!%d",glGetError());\r
@@ -243,6 +245,37 @@ int OsdOpenGL::init(void* device)
          glDeleteProgram(osd_program);\r
          return 0;\r
   }\r
+  // create the program for yuv frame rendering\r
+  frame_shader=CreateShader(frame_frag_shader, GL_FRAGMENT_SHADER);\r
+\r
+  frame_program=glCreateProgram();\r
+  if (frame_program==0) {\r
+         Log::getInstance()->log("OSD", Log::WARN, "Creating glsl program failed!%d",glGetError());\r
+      return 0;\r
+  }\r
+  glAttachShader(frame_program,gen_shader);\r
+  glAttachShader(frame_program,frame_shader);\r
+  glBindAttribLocation(frame_program,0,"vec_pos");\r
+  glBindAttribLocation(frame_program,1,"tex_coord");\r
+\r
+  frame_sampler_locY=glGetUniformLocation(frame_program,"textureY");\r
+  //frame_sampler_locU=glGetUniformLocation(frame_program,"textureU");\r
+ // frame_sampler_locV=glGetUniformLocation(frame_program,"textureV");\r
+\r
+  glLinkProgram(frame_program);\r
+  //GLint link_status;\r
+  glGetShaderiv(frame_program,GL_LINK_STATUS, &link_status);\r
+\r
+  if (!link_status) {\r
+         char buffer[1024];\r
+         glGetProgramInfoLog(frame_program,1024,NULL,buffer);\r
+         Log::getInstance()->log("OSD", Log::WARN, "Compiling Programm failed!");\r
+         Log::getInstance()->log("OSD", Log::WARN, "%s",buffer);\r
+         glDeleteProgram(frame_program);\r
+         return 0;\r
+  }\r
+\r
+\r
 \r
   glClearColor(0.0f,0.0f,0.0f,1.f);\r
 \r
@@ -373,32 +406,34 @@ void OsdOpenGL::threadMethod()
        //glmutex.Lock();\r
 \r
        //glmutex.Unlock();\r
+       VPEOGLFrame *frame=NULL;\r
+       struct timespec ts;\r
+       ts.tv_sec=0;\r
+       ts.tv_nsec=0;\r
+       VideoVPEOGL* video =(VideoVPEOGL*) Video::getInstance();\r
        while (true)\r
        {\r
+               ts.tv_nsec=10*1000000LL;\r
                unsigned int waittime=10;\r
                if (initted){\r
-               //      if (evrstate==EVR_pres_off || evrstate==EVR_pres_pause)\r
-               //      {\r
-                               Render();\r
-                               //TODO get surfaces from Video object\r
-       /*              } else if (evrstate==EVR_pres_started)\r
-                       {\r
-                               LPDIRECT3DSURFACE9 surf;\r
-                               if (dsallocator) dsallocator->GetNextSurface(&surf,&waittime);\r
-                               if (surf==NULL)\r
-                               {\r
-                                       Render();\r
-                               }\r
-                               else\r
-                               {\r
-                                       RenderDS(surf);\r
-                                       surf->Release();\r
-                                       if (dsallocator) dsallocator->DiscardSurfaceandgetWait(&waittime);\r
-                               }\r
-                       }*/\r
+                       if (!frame) frame=video->getReadyOGLFrame();\r
+                       if (frame) {\r
+                               InternalRendering(frame);\r
+                               lastrendertime=getTimeMS();\r
+                               video->returnOGLFrame(frame); //please recycle it\r
+                               frame=NULL;\r
+                       } else {\r
+                                long long time1=getTimeMS();\r
+                                if ((time1-lastrendertime)>200) {//5 fps for OSD updates are enough, avoids tearing\r
+                                       InternalRendering(NULL);\r
+                                       lastrendertime=getTimeMS();\r
+                                }\r
+                       }\r
+                       frame=video->getReadyOGLFrame();\r
                }\r
+               if (frame) ts.tv_nsec=0;\r
                threadCheckExit();\r
-               if (waittime!=0) MILLISLEEP(min(10,waittime));\r
+               if (ts.tv_nsec!=0) threadWaitForSignalTimed(&ts);\r
                //Sleep(1);\r
        }\r
        //eglMakeCurrent(egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT );\r
@@ -411,14 +446,14 @@ void OsdOpenGL::threadPostStopCleanup()
        //goo;\r
 }\r
 \r
-\r
+/*\r
 // This function is called from the WinMain function in order to get Screen updates\r
 void OsdOpenGL::Render()\r
 {\r
        if (!initted) return ;\r
+       VPEOGLFrame* frame=NULL;\r
        if (external_driving) {\r
         long long time1=getTimeMS();\r
-\r
                if ((time1-lastrendertime)>200) {//5 fps for OSD updates are enough, avoids tearing\r
                        InternalRendering(NULL);\r
                        lastrendertime=getTimeMS();\r
@@ -429,7 +464,7 @@ void OsdOpenGL::Render()
                struct timespec ts;\r
                clock_gettime(CLOCK_MONOTONIC, &ts);\r
                long long time1=ts.tv_sec*1000+ts.tv_nsec/1000000LL;\r
-               if ((time1-lastrendertime)>50) {//10 fps for OSD updates are enough, avoids tearing\r
+               if ((time1-lastrendertime)>100) {//10 fps for OSD updates are enough, avoids tearing\r
                        InternalRendering(NULL);\r
                        lastrendertime=getTimeMS();\r
                } else {\r
@@ -440,16 +475,16 @@ void OsdOpenGL::Render()
        }\r
 }\r
 \r
-void OsdOpenGL::RenderDS(GLuint  present){\r
+void OsdOpenGL::RenderDS(VPEOGLFrame* frame){\r
        if (!initted) return; \r
        if (external_driving) {\r
-               InternalRendering(present);\r
+               InternalRendering(frame);\r
                lastrendertime=getTimeMS();\r
        }\r
-}\r
+}*/\r
 \r
 \r
-void OsdOpenGL::InternalRendering(GLuint  present){\r
+void OsdOpenGL::InternalRendering(VPEOGLFrame* frame){\r
     BeginPainting();\r
   /*  HRESULT losty=d3ddevice->TestCooperativeLevel();\r
     if (losty==D3DERR_DEVICELOST) {\r
@@ -528,16 +563,58 @@ void OsdOpenGL::InternalRendering(GLuint  present){
        glViewport(0, 0, display_width,display_height);\r
 \r
        glClear(GL_COLOR_BUFFER_BIT);\r
-       glUseProgram(osd_program);\r
 \r
+\r
+\r
+\r
+\r
+       if (frame) {\r
+               glUseProgram(frame_program);\r
+       //      Log::getInstance()->log("OSD", Log::WARN, "mark1 glerror %x",glGetError());\r
+               glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), osdvertices);\r
+               glEnableVertexAttribArray(0);\r
+               glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), &(osdvertices[0].u));\r
+               glEnableVertexAttribArray(1);\r
+               glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), osdvertices);\r
+               glEnableVertexAttribArray(0);\r
+               glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), &(osdvertices[0].u));\r
+               glEnableVertexAttribArray(1);\r
+\r
+               glActiveTexture(GL_TEXTURE0);\r
+               glBindTexture(GL_TEXTURE_2D,frame->textures[0]);\r
+               //Log::getInstance()->log("OSD", Log::WARN, "mark2 glerror %x",glGetError());\r
+       //      glActiveTexture(GL_TEXTURE1);\r
+       //      Log::getInstance()->log("OSD", Log::WARN, "mark3 glerror %x",glGetError());\r
+       //      glBindTexture(GL_TEXTURE_2D,frame->textures[1]);\r
+       //      Log::getInstance()->log("OSD", Log::WARN, "mark4 glerror %x",glGetError());\r
+       //      glActiveTexture(GL_TEXTURE2);\r
+       //      Log::getInstance()->log("OSD", Log::WARN, "mark5 glerror %x",glGetError());\r
+       //      glBindTexture(GL_TEXTURE_2D,frame->textures[2]);\r
+\r
+       //      Log::getInstance()->log("OSD", Log::WARN, "mark6 glerror %x",glGetError());\r
+               glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);\r
+       //      Log::getInstance()->log("OSD", Log::WARN, "mark7 glerror %x",glGetError());\r
+               glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);\r
+               //Log::getInstance()->log("OSD", Log::WARN, "mark8 glerror %x",glGetError());\r
+\r
+               glUniform1i(frame_sampler_locY,0);\r
+               //glUniform1i(frame_sampler_locU,1);\r
+               //glUniform1i(frame_sampler_locV,2);\r
+\r
+\r
+               glDrawArrays(GL_TRIANGLE_STRIP, 0,  4);\r
+               glEnable(GL_BLEND);\r
+               glBlendFuncSeparate (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA,GL_ZERO,GL_ONE);\r
+\r
+       }\r
+\r
+       glUseProgram(osd_program);\r
        glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), osdvertices);\r
        glEnableVertexAttribArray(0);\r
        glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), &(osdvertices[0].u));\r
        glEnableVertexAttribArray(1);\r
 \r
 \r
-\r
-\r
        glActiveTexture(GL_TEXTURE0);\r
        glBindTexture(GL_TEXTURE_2D,((SurfaceOpenGL*)screen)->getTexture());\r
 \r
index ccdd3171b0a4ddb7f0bab8087f668863ae4038c6..f92cadc02db6e8d3493d6920b1bb5f58e6e64619 100755 (executable)
@@ -34,6 +34,7 @@
 #include "log.h"\r
 #include "threadp.h"\r
 #include "mutex.h"\r
+#include "videovpeogl.h"\r
 \r
 #define BENCHMARK_FPS\r
 \r
@@ -81,22 +82,25 @@ class OsdOpenGL : public Osd, public Thread_TYPE
 \r
 \r
        // This function is called from the threadMethod function in order to get Screen updates\r
-       void Render();\r
-       void RenderDS(GLuint  present);\r
+/*     void Render();\r
+       void RenderDS(VPEOGLFrame* frame);*/\r
        void BeginPainting();\r
        void EndPainting();\r
 \r
        void setExternalDriving(/*DsAllocator* dsall,*/ unsigned int width, unsigned int height);\r
        void Blank();\r
 \r
-       void getEGLObjs(EGLDisplay *i_egl_display,EGLSurface *i_egl_surface,EGLContext *i_egl_context){\r
+       void getEGLObjs(EGLDisplay *i_egl_display,EGLSurface *i_egl_surface,EGLContext *i_egl_context, EGLConfig *i_egl_config){\r
                *i_egl_display=egl_display;\r
                *i_egl_surface=egl_surface;\r
                *i_egl_context=egl_context;\r
+               *i_egl_config=egl_ourconfig;\r
        };\r
 \r
        EGLConfig getEGLConfig() {return egl_ourconfig;};\r
 \r
+       void AdviseAboutNewFrame() {threadSignal();};\r
+\r
 \r
 \r
 private:\r
@@ -112,7 +116,7 @@ private:
        bool external_driving;\r
        Mutex glmutex;\r
        long long  lastrendertime;\r
-       void InternalRendering(GLuint present);\r
+       void InternalRendering(VPEOGLFrame* frame);\r
        bool DoLost();\r
        void InitVertexBuffer(float  scalex,float scaley);\r
        OSDVERTEX osdvertices[4];\r
@@ -124,6 +128,13 @@ private:
 \r
        GLuint osd_program;\r
 \r
+       GLuint frame_shader;\r
+       GLuint frame_program;\r
+\r
+       GLint frame_sampler_locY;\r
+       GLint frame_sampler_locU;\r
+       GLint frame_sampler_locV;\r
+\r
        GLint osd_sampler_loc;\r
 \r
 \r
index 8d944d8730937ab2c2965d8bc8aacf6ce894157c..1ef6d93ecf55aa969b53305ddc55e529be3232d1 100755 (executable)
@@ -43,7 +43,9 @@ VideoVPEOGL::VideoVPEOGL()
 
 #ifdef VPE_FFMPEG_SUPPORT
   mpeg2codec_context_ff=NULL;
-  dec_frame_ff=NULL;
+  ffmpeg_running=false;
+  dec_frame_ff_uploading=NULL;
+  dec_frame_ff_decoding=NULL;
 #endif
   
 }
@@ -75,7 +77,10 @@ int VideoVPEOGL::init(UCHAR tformat)
   /* new stuff */
 
 
-  stop();
+
+
+  //stop();
+
 
 
   return 1;
@@ -86,12 +91,36 @@ int VideoVPEOGL::initUsingOSDObjects()
        EGLDisplay i_egl_display;
        EGLSurface i_egl_surface;
        EGLContext i_egl_context;
+       EGLConfig i_egl_config;
        OsdOpenGL *osd=(OsdOpenGL*)osd->getInstance();
-       osd->getEGLObjs(&i_egl_display,&i_egl_surface,&i_egl_context);
+       osd->getEGLObjs(&i_egl_display,&i_egl_surface,&i_egl_context, &i_egl_config);
+       const EGLint attr_context[]={
+                       EGL_CONTEXT_CLIENT_VERSION,2,
+               EGL_NONE
+       };
 
        egl_display=i_egl_display;
-       egl_surface=i_egl_surface;
-       egl_context=i_egl_context;
+       egl_context=eglCreateContext(egl_display,i_egl_config,i_egl_context,attr_context);
+       if (egl_context==EGL_NO_CONTEXT) {
+                Log::getInstance()->log("Video", Log::WARN, "Creating egl context failed! %d",eglGetError());
+                return 0;
+       }
+       // We create a dummy surface here, in order to allow two contexts
+       const EGLint attr_pbuffer[]={
+                       EGL_WIDTH, 1, EGL_HEIGHT,1,
+                   EGL_NONE
+       };
+       egl_surface=eglCreatePbufferSurface(egl_display,i_egl_config,attr_pbuffer);
+       if (egl_surface==EGL_NO_SURFACE) {
+                Log::getInstance()->log("Video", Log::WARN, "Creating egl pbuffer failed! %d",eglGetError());
+                return 0;
+       }
+
+
+
+
+       //egl_surface=i_egl_surface;
+       //egl_context=i_egl_context;
 
 
 #ifdef VPE_OMX_SUPPORT
@@ -118,6 +147,7 @@ int VideoVPEOGL::initUsingOSDObjects()
        }
 
 #endif
+       threadStart();
        return 1;
 }
 
@@ -187,6 +217,7 @@ int VideoVPEOGL::shutdown()
 {
   if (!initted) return 0;
   initted = 0;
+  threadCancel();
 
   decoding_backend=0;
 #ifdef VPE_OMX_SUPPORT
@@ -201,6 +232,167 @@ int VideoVPEOGL::shutdown()
   return 1;
 }
 
+int VideoVPEOGL::AllocateYUVOglTexture(VPEOGLFrame* outframe,int width,int height,int stride)
+{
+       Log::getInstance()->log("Video", Log::NOTICE, "Allocate ogl texture");
+       // Y
+       glGenTextures(1, &outframe->textures[0]);
+       glBindTexture(GL_TEXTURE_2D, outframe->textures[0]);
+       glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, stride, height, 0, GL_LUMINANCE,
+                               GL_UNSIGNED_BYTE, NULL);
+       // U
+       glGenTextures(1, &outframe->textures[1]);
+       glBindTexture(GL_TEXTURE_2D, outframe->textures[1]);
+       glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, stride>>1, height>>1, 0, GL_LUMINANCE,
+                               GL_UNSIGNED_BYTE, NULL);
+       // V
+       glGenTextures(1, &outframe->textures[2]);
+       glBindTexture(GL_TEXTURE_2D, outframe->textures[2]);
+       glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, stride>>1, height>>1, 0, GL_LUMINANCE,
+                       GL_UNSIGNED_BYTE, NULL);
+       outframe->height=height;
+       outframe->width=width;
+       outframe->stride=stride;
+       return 1;
+}
+
+
+VPEOGLFrame *VideoVPEOGL::getReadyOGLFrame(){
+       VPEOGLFrame *return_obj=NULL;
+       ogl_frame_mutex.Lock();
+       if (ready_ogl_frames.size()>0) {
+               return_obj=ready_ogl_frames.front();
+               ready_ogl_frames.pop_front();
+               ogl_frame_outside=true;
+       }
+       ogl_frame_mutex.Unlock();
+       return return_obj;
+}
+
+void VideoVPEOGL::returnOGLFrame(VPEOGLFrame *frame)
+{
+       ogl_frame_mutex.Lock();
+       if (frame) {
+               ogl_frame_outside=false;
+               free_ogl_frames.push_back(frame);
+       }
+       ogl_frame_mutex.Unlock();
+}
+
+void VideoVPEOGL::threadMethod()
+{
+       if (eglMakeCurrent(egl_display, egl_surface, egl_surface, egl_context)== EGL_FALSE) {
+               Log::getInstance()->log("Video", Log::WARN, "Making egl Current failed in thread %d",eglGetError());
+               return;
+       }
+       while (1) {
+               bool sleep=true;
+#ifdef VPE_FFMPEG_SUPPORT
+               dec_frame_ff_mutex.Lock();
+               if (dec_frame_ff_upload_pending.size()>0) {
+                       dec_frame_ff_uploading=dec_frame_ff_upload_pending.front();
+                       dec_frame_ff_upload_pending.pop_front();
+                       if (dec_frame_ff_upload_pending.size()>0) sleep=false;
+               }
+               dec_frame_ff_mutex.Unlock();
+               if (dec_frame_ff_uploading) {
+                       int width,height,pixfmt;
+                        //First get a free ogl image
+                       VPEOGLFrame* out_frame=NULL;
+                       while (!out_frame) {
+                               ogl_frame_mutex.Lock();
+                               if (all_ogl_frames.size()==0) {
+                                       ogl_frame_mutex.Unlock(); break;
+                               }
+
+                               if (free_ogl_frames.size()>0) {
+                                       width=ffwidth;
+                                       height=ffheight;
+                                       pixfmt=ffpixfmt;
+                                       out_frame=free_ogl_frames.front();
+                                       free_ogl_frames.pop_front();
+                               } else MILLISLEEP(2);
+                               ogl_frame_mutex.Unlock();
+                       }
+                       bool failed=false;
+                       if (out_frame) {
+                               if (out_frame->textures[0]==0 || out_frame->width!=width ||
+                                               out_frame->height!=height || out_frame->stride!=dec_frame_ff_uploading->linesize[0]) {
+                                       if (out_frame->textures[0]==0) {
+                                               glDeleteTextures(1,&out_frame->textures[0]);
+                                               out_frame->textures[0]=0;
+                                       }
+                                       if (out_frame->textures[1]==0) {
+                                               glDeleteTextures(1,&out_frame->textures[1]);
+                                               out_frame->textures[1]=0;
+                                       }
+                                       if (out_frame->textures[2]==0) {
+                                               glDeleteTextures(1,&out_frame->textures[2]);
+                                               out_frame->textures[2]=0;
+                                       }
+                                       if (!AllocateYUVOglTexture(out_frame,width,height,dec_frame_ff_uploading->linesize[0])) failed=true;
+                               }
+                               if (!failed) {
+                                       //up to now only YUV data, this is for reference only, since the pi is too slow.
+                                       glBindTexture(GL_TEXTURE_2D, out_frame->textures[0]);
+                                       glPixelStorei(GL_UNPACK_ALIGNMENT,1);
+
+                                       glTexSubImage2D(GL_TEXTURE_2D,0,0,0,
+                                                       dec_frame_ff_uploading->linesize[0],height,
+                                                       GL_LUMINANCE,GL_UNSIGNED_BYTE,
+                                                       dec_frame_ff_uploading->data[0]);
+
+
+                                       glBindTexture(GL_TEXTURE_2D, out_frame->textures[1]);
+                                       glPixelStorei(GL_UNPACK_ALIGNMENT,1);
+                                       glTexSubImage2D(GL_TEXTURE_2D,0,0,0,
+                                                       dec_frame_ff_uploading->linesize[1],height>>1,
+                                                       GL_LUMINANCE,GL_UNSIGNED_BYTE,
+                                                       dec_frame_ff_uploading->data[1]);
+
+                                       glBindTexture(GL_TEXTURE_2D, out_frame->textures[2]);
+                                       glPixelStorei(GL_UNPACK_ALIGNMENT,1);
+                                       glTexSubImage2D(GL_TEXTURE_2D,0,0,0,
+                                                       dec_frame_ff_uploading->linesize[2],height>>1,
+                                                       GL_LUMINANCE,GL_UNSIGNED_BYTE,
+                                                       dec_frame_ff_uploading->data[2]);
+                                       ogl_frame_mutex.Lock();
+                                       ready_ogl_frames.push_back(out_frame);
+                                       ogl_frame_mutex.Unlock();
+                                       ((OsdOpenGL*)Osd::getInstance())->AdviseAboutNewFrame(); //Tell him, that we have a frame waiting
+
+                               }
+
+                               dec_frame_ff_mutex.Lock();
+                               dec_frame_ff_free.push_back(dec_frame_ff_uploading);
+                               dec_frame_ff_uploading=NULL;
+                               dec_frame_ff_mutex.Unlock();
+
+
+
+
+                       }
+
+
+
+
+               }
+#endif
+
+               if (sleep) threadWaitForSignal();
+               threadCheckExit();
+       }
+
+}
+
+void VideoVPEOGL::threadPostStopCleanup()
+{
+       eglMakeCurrent(egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT );
+#ifdef VPE_FFMPEG_SUPPORT
+       dec_frame_ff_uploading=NULL;
+#endif
+}
+
 
 
 
@@ -1136,6 +1328,7 @@ int VideoVPEOGL::DeAllocateCodecsOMX()
 
 int VideoVPEOGL::AllocateCodecsFFMPEG()
 {
+       ffmpeg_hastime=false;
        Log::getInstance()->log("Video", Log::NOTICE, "AllocateCodecsFFmpeg");
        mpeg2codec_context_ff=avcodec_alloc_context();
        if (mpeg2codec_context_ff==NULL) {
@@ -1146,11 +1339,40 @@ int VideoVPEOGL::AllocateCodecsFFMPEG()
                Log::getInstance()->log("Video", Log::DEBUG, "Opening ffmpeg codec  failed");
                return 0;
        }
-       dec_frame_ff=avcodec_alloc_frame(); // may be we need multiple frames, if we want to use async texture upload
-       if (!dec_frame_ff) {
-               Log::getInstance()->log("Video", Log::DEBUG, "Allocating dec_frame  failed");
-               return 0;
+       Log::getInstance()->log("Video", Log::NOTICE, "AllocateCodecsFFmpeg mark1");
+       dec_frame_ff_mutex.Lock();
+       Log::getInstance()->log("Video", Log::NOTICE, "AllocateCodecsFFmpeg mark2");
+       for (int i=0;i<3;i++) {
+                       AVFrame *dec_frame_ff=avcodec_alloc_frame(); // may be we need multiple frames, if we want to use async texture upload
+                       if (!dec_frame_ff) {
+                               Log::getInstance()->log("Video", Log::DEBUG, "Allocating dec_frame  failed");
+                               return 0;
+                       }
+                       dec_frame_ff_all.push_back(dec_frame_ff);
+                       dec_frame_ff_free.push_back(dec_frame_ff);
        }
+       dec_frame_ff_decoding=NULL;
+       Log::getInstance()->log("Video", Log::NOTICE, "AllocateCodecsFFmpeg mark 3");
+       dec_frame_ff_mutex.Unlock();
+
+       ogl_frame_mutex.Lock();
+       //Allocate texture structs, since we do not know the sizes, we do not allocate the textures yet
+       for (int i=0;i<3;i++) {
+               VPEOGLFrame *new_frame=(VPEOGLFrame *)malloc(sizeof(VPEOGLFrame));
+               new_frame->type=1; //1 = YUV, 2 RGB
+               new_frame->textures[0]=0;
+               new_frame->textures[1]=0;
+               new_frame->textures[2]=0;
+               new_frame->width=new_frame->height=0;
+               all_ogl_frames.push_back(new_frame);
+               free_ogl_frames.push_back(new_frame);
+
+       }
+       ogl_frame_outside=false;
+
+       ogl_frame_mutex.Unlock();
+
+       ffmpeg_running=true;
 
        return 1;
 
@@ -1158,11 +1380,55 @@ int VideoVPEOGL::AllocateCodecsFFMPEG()
 
 int VideoVPEOGL::DeAllocateCodecsFFMPEG()
 {
+       ffmpeg_running=false;
        Log::getInstance()->log("Video", Log::NOTICE, "DeAllocateCodecsFFmpeg");
-       if (dec_frame_ff) {
-               av_free(dec_frame_ff);
-               dec_frame_ff=NULL;
+       dec_frame_ff_mutex.Lock();
+       dec_frame_ff_upload_pending.clear();
+       dec_frame_ff_free.clear();
+       dec_frame_ff_mutex.Unlock();
+       while (dec_frame_ff_uploading) {
+               Log::getInstance()->log("Video", Log::NOTICE, "Wait for uploading to finish");
+               MILLISLEEP(20);
+       }
+       dec_frame_ff_mutex.Lock();
+       for (int i=0; i< dec_frame_ff_all.size();i++) {
+               av_free(dec_frame_ff_all[i]);
+       }
+
+       dec_frame_ff_all.clear();
+       dec_frame_ff_mutex.Unlock();
+       dec_frame_ff_decoding=NULL;
+
+       while (ogl_frame_outside) {
+               Log::getInstance()->log("Video", Log::NOTICE, "Wait for ogl frame from outside");
+               MILLISLEEP(20);
+       }
+
+       ((OsdOpenGL*)Osd::getInstance())->BeginPainting(); // get osd's context
+       ogl_frame_mutex.Lock();
+       for (int i=0; i< dec_frame_ff_all.size();i++) {
+               VPEOGLFrame * del_frame=all_ogl_frames[i];
+               if (del_frame->textures[0]==0) {
+                       glDeleteTextures(1,&del_frame->textures[0]);
+                       del_frame->textures[0]=0;
+               }
+               if (del_frame->textures[1]==0) {
+                       glDeleteTextures(1,&del_frame->textures[1]);
+                       del_frame->textures[1]=0;
+               }
+               if (del_frame->textures[2]==0) {
+                       glDeleteTextures(1,&del_frame->textures[2]);
+                       del_frame->textures[2]=0;
+               }
+               free(all_ogl_frames[i]);
        }
+       all_ogl_frames.clear();
+       free_ogl_frames.clear();
+       ready_ogl_frames.clear();
+       ogl_frame_mutex.Unlock();
+       ((OsdOpenGL*)Osd::getInstance())->EndPainting();
+
+
        if (mpeg2codec_context_ff) {
                avcodec_close(mpeg2codec_context_ff);
                av_free(mpeg2codec_context_ff);
@@ -1355,7 +1621,7 @@ UINT VideoVPEOGL::DeliverMediaPacketOMX(MediaPacket packet,
 
        OMX_ERRORTYPE error;
 
-       OMX_PARAM_PORTDEFINITIONTYPE port_image;
+/*     OMX_PARAM_PORTDEFINITIONTYPE port_image;
        memset(&port_image,0,sizeof(port_image));
        port_image.nSize=sizeof(port_image);
        port_image.nVersion.nVersion=OMX_VERSION;
@@ -1364,7 +1630,7 @@ UINT VideoVPEOGL::DeliverMediaPacketOMX(MediaPacket packet,
        if (error!= OMX_ErrorNone){
                Log::getInstance()->log("Video", Log::DEBUG, "OMX_GetParameter failed %x", error);
        }
-       Log::getInstance()->log("Video", Log::DEBUG, "Image port %d %d", port_image.format.video.nFrameWidth , port_image.format.video.nFrameHeight);
+       Log::getInstance()->log("Video", Log::DEBUG, "Image port %d %d", port_image.format.video.nFrameWidth , port_image.format.video.nFrameHeight);*/
 
        /*First Check, if we have an audio sample*/
        if (iframemode) {
@@ -1387,12 +1653,11 @@ UINT VideoVPEOGL::DeliverMediaPacketOMX(MediaPacket packet,
 
        /*Inspect PES-Header */
 
-       OMX_STATETYPE temp_state;
-       OMX_GetState(omx_vid_dec,&temp_state);
+//     OMX_STATETYPE temp_state;
+//     OMX_GetState(omx_vid_dec,&temp_state);
 
        if (*samplepos==0) {//stripheader
                headerstrip=buffer[packet.pos_buffer+8]+9/*is this right*/;
-               //headerstrip+=6; //h264
                *samplepos+=headerstrip;
                if ( packet.synched ) {
 
@@ -1509,32 +1774,15 @@ UINT VideoVPEOGL::DeliverMediaPacketFFMPEG(MediaPacket packet,
                const UCHAR* buffer,
                UINT *samplepos)
 {
-
-#if 0
        //Later add fail back code for ffmpeg
        /*if (!videoon) {
                *samplepos+=packet.length;
                return packet.length;
        }*/
 
+       if (!ffmpeg_running) return 0; // if we are not runnig do not do this
 
-       if (!omx_running) return 0; // if we are not runnig do not do this
-
-
-       OMX_ERRORTYPE error;
-
-       OMX_PARAM_PORTDEFINITIONTYPE port_image;
-       memset(&port_image,0,sizeof(port_image));
-       port_image.nSize=sizeof(port_image);
-       port_image.nVersion.nVersion=OMX_VERSION;
-       port_image.nPortIndex =omx_codec_output_port;
-       error=OMX_GetParameter(omx_vid_dec,OMX_IndexParamPortDefinition, &port_image);
-       if (error!= OMX_ErrorNone){
-               Log::getInstance()->log("Video", Log::DEBUG, "OMX_GetParameter failed %x", error);
-       }
-       Log::getInstance()->log("Video", Log::DEBUG, "Image port %d %d", port_image.format.video.nFrameWidth , port_image.format.video.nFrameHeight);
 
-       /*First Check, if we have an audio sample*/
        if (iframemode) {
                //samplepos=0;
                MILLISLEEP(10);
@@ -1542,7 +1790,7 @@ UINT VideoVPEOGL::DeliverMediaPacketFFMPEG(MediaPacket packet,
        }
 
        UINT headerstrip=0;
-       if (packet.disconti) {
+/*     if (packet.disconti) {
                firstsynched=false;
                if (cur_input_buf_omx) {
                        OMX_ERRORTYPE error=OMX_EmptyThisBuffer(omx_vid_dec,cur_input_buf_omx);
@@ -1551,27 +1799,40 @@ UINT VideoVPEOGL::DeliverMediaPacketFFMPEG(MediaPacket packet,
                        }
                        cur_input_buf_omx=NULL;
                }
-       }
+       }*/
 
        /*Inspect PES-Header */
+       if (!dec_frame_ff_decoding) {
+               dec_frame_ff_mutex.Lock();
+               if (dec_frame_ff_free.size()>0) {
+                       dec_frame_ff_decoding=dec_frame_ff_free.front();
+                       dec_frame_ff_free.pop_front();
+                       dec_frame_ff_mutex.Unlock();
+               } else {
+                       Log::getInstance()->log("Video", Log::DEBUG, "We have no free buffers");
+                       dec_frame_ff_mutex.Unlock();
+                       // No free Buffers
+                       return 0;
+               }
+       }
+
 
-       OMX_STATETYPE temp_state;
-       OMX_GetState(omx_vid_dec,&temp_state);
 
        if (*samplepos==0) {//stripheader
                headerstrip=buffer[packet.pos_buffer+8]+9/*is this right*/;
-               //headerstrip+=6; //h264
                *samplepos+=headerstrip;
                if ( packet.synched ) {
 
-                       if (cur_input_buf_omx) {
+                       /*if (cur_input_buf_omx) {
                                OMX_ERRORTYPE error=OMX_EmptyThisBuffer(omx_vid_dec,cur_input_buf_omx);
                                if (error!=OMX_ErrorNone){
                                        Log::getInstance()->log("Video", Log::DEBUG, "OMX_EmptyThisBuffer failed %x", error);
                                }
 
                                cur_input_buf_omx=NULL;//write out old data
-                       }
+                       }*/
+                       ffmpeg_time=packet.presentation_time;
+                       ffmpeg_hastime=true;
                //      reftime1=packet.presentation_time;
                //      reftime2=reftime1+1;
                        firstsynched=true;
@@ -1583,31 +1844,17 @@ UINT VideoVPEOGL::DeliverMediaPacketFFMPEG(MediaPacket packet,
                }
        }
 
-       if (!cur_input_buf_omx) {
-               input_bufs_omx_mutex.Lock();
-               if (input_bufs_omx_free.size()==0) {
-                       input_bufs_omx_mutex.Unlock();
-                       Log::getInstance()->log("Video", Log::DEBUG, "Deliver MediaPacket no free sample");
-                       return 0; // we do not have a free media sample
 
-               }
-               cur_input_buf_omx=input_bufs_omx_free.front();
-               cur_input_buf_omx->nFilledLen=0;
-               cur_input_buf_omx->nOffset=0;
-               cur_input_buf_omx->nTimeStamp=0;
-               input_bufs_omx_free.pop_front();
-               input_bufs_omx_mutex.Unlock();
-       }
 
 
 
 
-       if (cur_input_buf_omx->nFilledLen==0) {//will only be changed on first packet
+       /*if (cur_input_buf_omx->nFilledLen==0) {//will only be changed on first packet
                /*if (packet.disconti) {
                        ms->SetDiscontinuity(TRUE);
                } else {
                        ms->SetDiscontinuity(FALSE);
-               }*/
+               }*
                //if (packet.synched) {
 
                        //lastreftimePTS=packet.pts;
@@ -1625,49 +1872,51 @@ UINT VideoVPEOGL::DeliverMediaPacketFFMPEG(MediaPacket packet,
                        //  ms->SetSyncPoint(TRUE);
                //}
 
-       }
+       }*/
        unsigned int haveToCopy=packet.length-*samplepos;
+       while (haveToCopy>0) {
+               int dec_bytes=0;
+               int frame_ready=0;
+
+       //      Log::getInstance()->log("Video", Log::DEBUG, "Push data to decoder");
+               dec_bytes=avcodec_decode_video(mpeg2codec_context_ff, dec_frame_ff_decoding,
+                               &frame_ready, buffer+packet.pos_buffer+*samplepos, haveToCopy);
+               if (dec_bytes<0) {
+                       Log::getInstance()->log("Video", Log::DEBUG, "Decoding frame failed %x", dec_bytes);
+                       return *samplepos;
+               }
+               *samplepos+=dec_bytes;
+               haveToCopy-=dec_bytes;
+               if (frame_ready) {
+               //      Log::getInstance()->log("Video", Log::DEBUG, "We have a frame push it to osd");
+
+                       dec_frame_ff_mutex.Lock();
+                       ffwidth=mpeg2codec_context_ff->width;
+                       ffheight=mpeg2codec_context_ff->height;
+                       ffpixfmt=mpeg2codec_context_ff->pix_fmt;
+               //      Log::getInstance()->log("Video", Log::DEBUG, "Frame info %d %d %d",ffwidth,ffheight,ffpixfmt);
+
+                       dec_frame_ff_upload_pending.push_back(dec_frame_ff_decoding);
+                       dec_frame_ff_decoding=NULL;
+                       if (dec_frame_ff_free.size()>0) {
+                               dec_frame_ff_decoding=dec_frame_ff_free.front();
+                               dec_frame_ff_free.pop_front();
+                               dec_frame_ff_mutex.Unlock();
+                               threadSignal();
+                               ffmpeg_hastime=false;
+                       } else {
+                               ffmpeg_hastime=false;
+                               dec_frame_ff_mutex.Unlock();
+                               // No free Buffers
+                               return *samplepos;
+                       }
 
-       while (haveToCopy> (cur_input_buf_omx->nAllocLen-cur_input_buf_omx->nFilledLen)) {
-               unsigned int cancopy=cur_input_buf_omx->nAllocLen-cur_input_buf_omx->nFilledLen;
-               memcpy(cur_input_buf_omx->pBuffer+cur_input_buf_omx->nFilledLen,buffer+packet.pos_buffer+*samplepos,cancopy);
-               haveToCopy-=cancopy;
-               cur_input_buf_omx->nFilledLen+=cancopy;
-               *samplepos+=cancopy;
-               // push old buffer out
 
-               OMX_ERRORTYPE error=OMX_EmptyThisBuffer(omx_vid_dec,cur_input_buf_omx);
-               if (error!=OMX_ErrorNone){
-                       Log::getInstance()->log("Video", Log::DEBUG, "OMX_EmptyThisBuffer failed %x", error);
                }
-               // get5 new buffer
-               input_bufs_omx_mutex.Lock();
-               if (input_bufs_omx_free.size()==0) {
-                       input_bufs_omx_mutex.Unlock();
-                       //Log::getInstance()->log("Video", Log::DEBUG, "Deliver MediaPacket no free sample");
-                       return *samplepos; // we do not have a free media sample
-               }
-               cur_input_buf_omx=input_bufs_omx_free.front();
-               cur_input_buf_omx->nFilledLen=0;
-               cur_input_buf_omx->nOffset=0;
-               cur_input_buf_omx->nTimeStamp=0;
-               input_bufs_omx_free.pop_front();
-               input_bufs_omx_mutex.Unlock();
-
-               cur_input_buf_omx->nFlags=OMX_BUFFERFLAG_TIME_UNKNOWN;
 
        }
-       memcpy(cur_input_buf_omx->pBuffer+cur_input_buf_omx->nFilledLen,
-                       buffer+packet.pos_buffer+*samplepos,haveToCopy);
-       cur_input_buf_omx->nFilledLen+=haveToCopy;
-
-
-
-       *samplepos+=haveToCopy;
-
        return *samplepos;
-#endif
-       return 0;
+
 
 }
 
index 855b91aaac46d2d3194950bc23a098d27b81b8d0..aab1590bc2a4fa96ca01ebb07e3f4f0d6d98054f 100755 (executable)
@@ -24,6 +24,7 @@
 \r
 #include "mutex.h"\r
 \r
+\r
 #include <stdio.h>\r
 #include <unistd.h>\r
 #include <fcntl.h>\r
@@ -37,6 +38,7 @@
 \r
 #include "defines.h"\r
 #include "video.h"\r
+#include "threadsystem.h"\r
 \r
 //#define EGL_EGLEXT_PROTOTYPES\r
 \r
@@ -74,19 +76,21 @@ extern "C" {
 \r
 \r
 \r
-/*\r
+\r
 struct VPEOGLFrame {\r
-       int type; //1 = RGB, 2 YUV\r
+       int type; //1 = YUV, 2 RGB\r
        GLuint textures[3]; // 0=RGB or Y, 1=U 2=V\r
-\r
+       int width, height;\r
+       int stride;\r
+/*\r
 #ifdef VPE_OMX_SUPPORT\r
        //OMX\r
        EGLImageKHR khr_image;\r
        OMX_BUFFERHEADERTYPE *omx_buf;\r
-#endif\r
-};*/\r
+#endif*/\r
+};\r
 \r
-class VideoVPEOGL : public Video\r
+class VideoVPEOGL : public Video, public Thread_TYPE\r
 {\r
   public:\r
     VideoVPEOGL();\r
@@ -132,6 +136,8 @@ class VideoVPEOGL : public Video
        void WriteOutPATPMT();\r
 \r
 \r
+       VPEOGLFrame *getReadyOGLFrame();\r
+       void returnOGLFrame(VPEOGLFrame *frame);\r
 \r
 \r
 \r
@@ -225,19 +231,42 @@ class VideoVPEOGL : public Video
 #ifdef VPE_FFMPEG_SUPPORT\r
        AVCodec *mpeg2codec_ff;\r
        AVCodecContext *mpeg2codec_context_ff;\r
-       AVFrame *dec_frame_ff;\r
+       vector<AVFrame*> dec_frame_ff_all;\r
+       list<AVFrame*> dec_frame_ff_free;\r
+       list<AVFrame*> dec_frame_ff_upload_pending;\r
+       AVFrame* dec_frame_ff_uploading;\r
+       AVFrame* dec_frame_ff_decoding;\r
+       Mutex dec_frame_ff_mutex;\r
 \r
 \r
 \r
        UINT DeliverMediaPacketFFMPEG(MediaPacket packet,const UCHAR* buffer,UINT *samplepos);\r
        int AllocateCodecsFFMPEG();\r
        int DeAllocateCodecsFFMPEG();\r
+       bool ffmpeg_running;\r
+       bool ffmpeg_hastime; // signals if a pts is now\r
+       long long ffmpeg_time;\r
+\r
+       int ffwidth,ffheight,ffpixfmt;\r
+\r
        \r
 #endif\r
 \r
+       vector<VPEOGLFrame*> all_ogl_frames;\r
+       list<VPEOGLFrame*> free_ogl_frames;\r
+       list<VPEOGLFrame*> ready_ogl_frames;\r
+       bool ogl_frame_outside;\r
+       Mutex ogl_frame_mutex;\r
+\r
+       int AllocateYUVOglTexture(VPEOGLFrame* outframe,int width,int height,int stride);\r
+\r
+       virtual void threadMethod();\r
+       virtual void threadPostStopCleanup();\r
+\r
+\r
 \r
    bool firstsynched;\r
-         int lastpacketnum;\r
+   int lastpacketnum;\r
 \r
        EGLDisplay egl_display;\r
        EGLSurface egl_surface;\r