\r
#include "shaders/generic__vertex_shader.h"\r
#include "shaders/osd__frag_shader.h"\r
+#include "shaders/frame__frag_shader.h"\r
\r
#define BACKBUFFER_WIDTH 1920\r
#define BACKBUFFER_HEIGHT 1080\r
osd_shader=0;\r
gen_shader=0;\r
osd_program=0;\r
+ frame_program=0;\r
\r
#ifdef BENCHMARK_FPS\r
last_benchmark_time=getTimeMS();\r
\r
const EGLint attributs[]={\r
EGL_RED_SIZE,8,EGL_GREEN_SIZE, 8,EGL_BLUE_SIZE, 8,EGL_ALPHA_SIZE, 8,\r
- EGL_SURFACE_TYPE, EGL_WINDOW_BIT,\r
+ EGL_SURFACE_TYPE, EGL_WINDOW_BIT|EGL_PBUFFER_BIT,\r
EGL_CONFORMANT, EGL_OPENGL_ES2_BIT,\r
EGL_NONE\r
}; // Here, we might have to select the resolution!\r
gen_shader=CreateShader(generic_vertex_shader, GL_VERTEX_SHADER);\r
osd_shader=CreateShader(osd_frag_shader, GL_FRAGMENT_SHADER);\r
\r
-\r
+ // Create the program for osd rendering\r
osd_program=glCreateProgram();\r
if (osd_program==0) {\r
Log::getInstance()->log("OSD", Log::WARN, "Creating glsl program failed!%d",glGetError());\r
glDeleteProgram(osd_program);\r
return 0;\r
}\r
+ // create the program for yuv frame rendering\r
+ frame_shader=CreateShader(frame_frag_shader, GL_FRAGMENT_SHADER);\r
+\r
+ frame_program=glCreateProgram();\r
+ if (frame_program==0) {\r
+ Log::getInstance()->log("OSD", Log::WARN, "Creating glsl program failed!%d",glGetError());\r
+ return 0;\r
+ }\r
+ glAttachShader(frame_program,gen_shader);\r
+ glAttachShader(frame_program,frame_shader);\r
+ glBindAttribLocation(frame_program,0,"vec_pos");\r
+ glBindAttribLocation(frame_program,1,"tex_coord");\r
+\r
+ frame_sampler_locY=glGetUniformLocation(frame_program,"textureY");\r
+ //frame_sampler_locU=glGetUniformLocation(frame_program,"textureU");\r
+ // frame_sampler_locV=glGetUniformLocation(frame_program,"textureV");\r
+\r
+ glLinkProgram(frame_program);\r
+ //GLint link_status;\r
+ glGetShaderiv(frame_program,GL_LINK_STATUS, &link_status);\r
+\r
+ if (!link_status) {\r
+ char buffer[1024];\r
+ glGetProgramInfoLog(frame_program,1024,NULL,buffer);\r
+ Log::getInstance()->log("OSD", Log::WARN, "Compiling Programm failed!");\r
+ Log::getInstance()->log("OSD", Log::WARN, "%s",buffer);\r
+ glDeleteProgram(frame_program);\r
+ return 0;\r
+ }\r
+\r
+\r
\r
glClearColor(0.0f,0.0f,0.0f,1.f);\r
\r
//glmutex.Lock();\r
\r
//glmutex.Unlock();\r
+ VPEOGLFrame *frame=NULL;\r
+ struct timespec ts;\r
+ ts.tv_sec=0;\r
+ ts.tv_nsec=0;\r
+ VideoVPEOGL* video =(VideoVPEOGL*) Video::getInstance();\r
while (true)\r
{\r
+ ts.tv_nsec=10*1000000LL;\r
unsigned int waittime=10;\r
if (initted){\r
- // if (evrstate==EVR_pres_off || evrstate==EVR_pres_pause)\r
- // {\r
- Render();\r
- //TODO get surfaces from Video object\r
- /* } else if (evrstate==EVR_pres_started)\r
- {\r
- LPDIRECT3DSURFACE9 surf;\r
- if (dsallocator) dsallocator->GetNextSurface(&surf,&waittime);\r
- if (surf==NULL)\r
- {\r
- Render();\r
- }\r
- else\r
- {\r
- RenderDS(surf);\r
- surf->Release();\r
- if (dsallocator) dsallocator->DiscardSurfaceandgetWait(&waittime);\r
- }\r
- }*/\r
+ if (!frame) frame=video->getReadyOGLFrame();\r
+ if (frame) {\r
+ InternalRendering(frame);\r
+ lastrendertime=getTimeMS();\r
+ video->returnOGLFrame(frame); //please recycle it\r
+ frame=NULL;\r
+ } else {\r
+ long long time1=getTimeMS();\r
+ if ((time1-lastrendertime)>200) {//5 fps for OSD updates are enough, avoids tearing\r
+ InternalRendering(NULL);\r
+ lastrendertime=getTimeMS();\r
+ }\r
+ }\r
+ frame=video->getReadyOGLFrame();\r
}\r
+ if (frame) ts.tv_nsec=0;\r
threadCheckExit();\r
- if (waittime!=0) MILLISLEEP(min(10,waittime));\r
+ if (ts.tv_nsec!=0) threadWaitForSignalTimed(&ts);\r
//Sleep(1);\r
}\r
//eglMakeCurrent(egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT );\r
//goo;\r
}\r
\r
-\r
+/*\r
// This function is called from the WinMain function in order to get Screen updates\r
void OsdOpenGL::Render()\r
{\r
if (!initted) return ;\r
+ VPEOGLFrame* frame=NULL;\r
if (external_driving) {\r
long long time1=getTimeMS();\r
-\r
if ((time1-lastrendertime)>200) {//5 fps for OSD updates are enough, avoids tearing\r
InternalRendering(NULL);\r
lastrendertime=getTimeMS();\r
struct timespec ts;\r
clock_gettime(CLOCK_MONOTONIC, &ts);\r
long long time1=ts.tv_sec*1000+ts.tv_nsec/1000000LL;\r
- if ((time1-lastrendertime)>50) {//10 fps for OSD updates are enough, avoids tearing\r
+ if ((time1-lastrendertime)>100) {//10 fps for OSD updates are enough, avoids tearing\r
InternalRendering(NULL);\r
lastrendertime=getTimeMS();\r
} else {\r
}\r
}\r
\r
-void OsdOpenGL::RenderDS(GLuint present){\r
+void OsdOpenGL::RenderDS(VPEOGLFrame* frame){\r
if (!initted) return; \r
if (external_driving) {\r
- InternalRendering(present);\r
+ InternalRendering(frame);\r
lastrendertime=getTimeMS();\r
}\r
-}\r
+}*/\r
\r
\r
-void OsdOpenGL::InternalRendering(GLuint present){\r
+void OsdOpenGL::InternalRendering(VPEOGLFrame* frame){\r
BeginPainting();\r
/* HRESULT losty=d3ddevice->TestCooperativeLevel();\r
if (losty==D3DERR_DEVICELOST) {\r
glViewport(0, 0, display_width,display_height);\r
\r
glClear(GL_COLOR_BUFFER_BIT);\r
- glUseProgram(osd_program);\r
\r
+\r
+\r
+\r
+\r
+ if (frame) {\r
+ glUseProgram(frame_program);\r
+ // Log::getInstance()->log("OSD", Log::WARN, "mark1 glerror %x",glGetError());\r
+ glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), osdvertices);\r
+ glEnableVertexAttribArray(0);\r
+ glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), &(osdvertices[0].u));\r
+ glEnableVertexAttribArray(1);\r
+ glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), osdvertices);\r
+ glEnableVertexAttribArray(0);\r
+ glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), &(osdvertices[0].u));\r
+ glEnableVertexAttribArray(1);\r
+\r
+ glActiveTexture(GL_TEXTURE0);\r
+ glBindTexture(GL_TEXTURE_2D,frame->textures[0]);\r
+ //Log::getInstance()->log("OSD", Log::WARN, "mark2 glerror %x",glGetError());\r
+ // glActiveTexture(GL_TEXTURE1);\r
+ // Log::getInstance()->log("OSD", Log::WARN, "mark3 glerror %x",glGetError());\r
+ // glBindTexture(GL_TEXTURE_2D,frame->textures[1]);\r
+ // Log::getInstance()->log("OSD", Log::WARN, "mark4 glerror %x",glGetError());\r
+ // glActiveTexture(GL_TEXTURE2);\r
+ // Log::getInstance()->log("OSD", Log::WARN, "mark5 glerror %x",glGetError());\r
+ // glBindTexture(GL_TEXTURE_2D,frame->textures[2]);\r
+\r
+ // Log::getInstance()->log("OSD", Log::WARN, "mark6 glerror %x",glGetError());\r
+ glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);\r
+ // Log::getInstance()->log("OSD", Log::WARN, "mark7 glerror %x",glGetError());\r
+ glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);\r
+ //Log::getInstance()->log("OSD", Log::WARN, "mark8 glerror %x",glGetError());\r
+\r
+ glUniform1i(frame_sampler_locY,0);\r
+ //glUniform1i(frame_sampler_locU,1);\r
+ //glUniform1i(frame_sampler_locV,2);\r
+\r
+\r
+ glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\r
+ glEnable(GL_BLEND);\r
+ glBlendFuncSeparate (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA,GL_ZERO,GL_ONE);\r
+\r
+ }\r
+\r
+ glUseProgram(osd_program);\r
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), osdvertices);\r
glEnableVertexAttribArray(0);\r
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE,sizeof(OSDVERTEX), &(osdvertices[0].u));\r
glEnableVertexAttribArray(1);\r
\r
\r
-\r
-\r
glActiveTexture(GL_TEXTURE0);\r
glBindTexture(GL_TEXTURE_2D,((SurfaceOpenGL*)screen)->getTexture());\r
\r
#ifdef VPE_FFMPEG_SUPPORT
mpeg2codec_context_ff=NULL;
- dec_frame_ff=NULL;
+ ffmpeg_running=false;
+ dec_frame_ff_uploading=NULL;
+ dec_frame_ff_decoding=NULL;
#endif
}
/* new stuff */
- stop();
+
+
+ //stop();
+
return 1;
EGLDisplay i_egl_display;
EGLSurface i_egl_surface;
EGLContext i_egl_context;
+ EGLConfig i_egl_config;
OsdOpenGL *osd=(OsdOpenGL*)osd->getInstance();
- osd->getEGLObjs(&i_egl_display,&i_egl_surface,&i_egl_context);
+ osd->getEGLObjs(&i_egl_display,&i_egl_surface,&i_egl_context, &i_egl_config);
+ const EGLint attr_context[]={
+ EGL_CONTEXT_CLIENT_VERSION,2,
+ EGL_NONE
+ };
egl_display=i_egl_display;
- egl_surface=i_egl_surface;
- egl_context=i_egl_context;
+ egl_context=eglCreateContext(egl_display,i_egl_config,i_egl_context,attr_context);
+ if (egl_context==EGL_NO_CONTEXT) {
+ Log::getInstance()->log("Video", Log::WARN, "Creating egl context failed! %d",eglGetError());
+ return 0;
+ }
+ // We create a dummy surface here, in order to allow two contexts
+ const EGLint attr_pbuffer[]={
+ EGL_WIDTH, 1, EGL_HEIGHT,1,
+ EGL_NONE
+ };
+ egl_surface=eglCreatePbufferSurface(egl_display,i_egl_config,attr_pbuffer);
+ if (egl_surface==EGL_NO_SURFACE) {
+ Log::getInstance()->log("Video", Log::WARN, "Creating egl pbuffer failed! %d",eglGetError());
+ return 0;
+ }
+
+
+
+
+ //egl_surface=i_egl_surface;
+ //egl_context=i_egl_context;
#ifdef VPE_OMX_SUPPORT
}
#endif
+ threadStart();
return 1;
}
{
if (!initted) return 0;
initted = 0;
+ threadCancel();
decoding_backend=0;
#ifdef VPE_OMX_SUPPORT
return 1;
}
+int VideoVPEOGL::AllocateYUVOglTexture(VPEOGLFrame* outframe,int width,int height,int stride)
+{
+ Log::getInstance()->log("Video", Log::NOTICE, "Allocate ogl texture");
+ // Y
+ glGenTextures(1, &outframe->textures[0]);
+ glBindTexture(GL_TEXTURE_2D, outframe->textures[0]);
+ glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, stride, height, 0, GL_LUMINANCE,
+ GL_UNSIGNED_BYTE, NULL);
+ // U
+ glGenTextures(1, &outframe->textures[1]);
+ glBindTexture(GL_TEXTURE_2D, outframe->textures[1]);
+ glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, stride>>1, height>>1, 0, GL_LUMINANCE,
+ GL_UNSIGNED_BYTE, NULL);
+ // V
+ glGenTextures(1, &outframe->textures[2]);
+ glBindTexture(GL_TEXTURE_2D, outframe->textures[2]);
+ glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, stride>>1, height>>1, 0, GL_LUMINANCE,
+ GL_UNSIGNED_BYTE, NULL);
+ outframe->height=height;
+ outframe->width=width;
+ outframe->stride=stride;
+ return 1;
+}
+
+
+VPEOGLFrame *VideoVPEOGL::getReadyOGLFrame(){
+ VPEOGLFrame *return_obj=NULL;
+ ogl_frame_mutex.Lock();
+ if (ready_ogl_frames.size()>0) {
+ return_obj=ready_ogl_frames.front();
+ ready_ogl_frames.pop_front();
+ ogl_frame_outside=true;
+ }
+ ogl_frame_mutex.Unlock();
+ return return_obj;
+}
+
+void VideoVPEOGL::returnOGLFrame(VPEOGLFrame *frame)
+{
+ ogl_frame_mutex.Lock();
+ if (frame) {
+ ogl_frame_outside=false;
+ free_ogl_frames.push_back(frame);
+ }
+ ogl_frame_mutex.Unlock();
+}
+
+void VideoVPEOGL::threadMethod()
+{
+ if (eglMakeCurrent(egl_display, egl_surface, egl_surface, egl_context)== EGL_FALSE) {
+ Log::getInstance()->log("Video", Log::WARN, "Making egl Current failed in thread %d",eglGetError());
+ return;
+ }
+ while (1) {
+ bool sleep=true;
+#ifdef VPE_FFMPEG_SUPPORT
+ dec_frame_ff_mutex.Lock();
+ if (dec_frame_ff_upload_pending.size()>0) {
+ dec_frame_ff_uploading=dec_frame_ff_upload_pending.front();
+ dec_frame_ff_upload_pending.pop_front();
+ if (dec_frame_ff_upload_pending.size()>0) sleep=false;
+ }
+ dec_frame_ff_mutex.Unlock();
+ if (dec_frame_ff_uploading) {
+ int width,height,pixfmt;
+ //First get a free ogl image
+ VPEOGLFrame* out_frame=NULL;
+ while (!out_frame) {
+ ogl_frame_mutex.Lock();
+ if (all_ogl_frames.size()==0) {
+ ogl_frame_mutex.Unlock(); break;
+ }
+
+ if (free_ogl_frames.size()>0) {
+ width=ffwidth;
+ height=ffheight;
+ pixfmt=ffpixfmt;
+ out_frame=free_ogl_frames.front();
+ free_ogl_frames.pop_front();
+ } else MILLISLEEP(2);
+ ogl_frame_mutex.Unlock();
+ }
+ bool failed=false;
+ if (out_frame) {
+ if (out_frame->textures[0]==0 || out_frame->width!=width ||
+ out_frame->height!=height || out_frame->stride!=dec_frame_ff_uploading->linesize[0]) {
+ if (out_frame->textures[0]==0) {
+ glDeleteTextures(1,&out_frame->textures[0]);
+ out_frame->textures[0]=0;
+ }
+ if (out_frame->textures[1]==0) {
+ glDeleteTextures(1,&out_frame->textures[1]);
+ out_frame->textures[1]=0;
+ }
+ if (out_frame->textures[2]==0) {
+ glDeleteTextures(1,&out_frame->textures[2]);
+ out_frame->textures[2]=0;
+ }
+ if (!AllocateYUVOglTexture(out_frame,width,height,dec_frame_ff_uploading->linesize[0])) failed=true;
+ }
+ if (!failed) {
+ //up to now only YUV data, this is for reference only, since the pi is too slow.
+ glBindTexture(GL_TEXTURE_2D, out_frame->textures[0]);
+ glPixelStorei(GL_UNPACK_ALIGNMENT,1);
+
+ glTexSubImage2D(GL_TEXTURE_2D,0,0,0,
+ dec_frame_ff_uploading->linesize[0],height,
+ GL_LUMINANCE,GL_UNSIGNED_BYTE,
+ dec_frame_ff_uploading->data[0]);
+
+
+ glBindTexture(GL_TEXTURE_2D, out_frame->textures[1]);
+ glPixelStorei(GL_UNPACK_ALIGNMENT,1);
+ glTexSubImage2D(GL_TEXTURE_2D,0,0,0,
+ dec_frame_ff_uploading->linesize[1],height>>1,
+ GL_LUMINANCE,GL_UNSIGNED_BYTE,
+ dec_frame_ff_uploading->data[1]);
+
+ glBindTexture(GL_TEXTURE_2D, out_frame->textures[2]);
+ glPixelStorei(GL_UNPACK_ALIGNMENT,1);
+ glTexSubImage2D(GL_TEXTURE_2D,0,0,0,
+ dec_frame_ff_uploading->linesize[2],height>>1,
+ GL_LUMINANCE,GL_UNSIGNED_BYTE,
+ dec_frame_ff_uploading->data[2]);
+ ogl_frame_mutex.Lock();
+ ready_ogl_frames.push_back(out_frame);
+ ogl_frame_mutex.Unlock();
+ ((OsdOpenGL*)Osd::getInstance())->AdviseAboutNewFrame(); //Tell him, that we have a frame waiting
+
+ }
+
+ dec_frame_ff_mutex.Lock();
+ dec_frame_ff_free.push_back(dec_frame_ff_uploading);
+ dec_frame_ff_uploading=NULL;
+ dec_frame_ff_mutex.Unlock();
+
+
+
+
+ }
+
+
+
+
+ }
+#endif
+
+ if (sleep) threadWaitForSignal();
+ threadCheckExit();
+ }
+
+}
+
+void VideoVPEOGL::threadPostStopCleanup()
+{
+ eglMakeCurrent(egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT );
+#ifdef VPE_FFMPEG_SUPPORT
+ dec_frame_ff_uploading=NULL;
+#endif
+}
+
int VideoVPEOGL::AllocateCodecsFFMPEG()
{
+ ffmpeg_hastime=false;
Log::getInstance()->log("Video", Log::NOTICE, "AllocateCodecsFFmpeg");
mpeg2codec_context_ff=avcodec_alloc_context();
if (mpeg2codec_context_ff==NULL) {
Log::getInstance()->log("Video", Log::DEBUG, "Opening ffmpeg codec failed");
return 0;
}
- dec_frame_ff=avcodec_alloc_frame(); // may be we need multiple frames, if we want to use async texture upload
- if (!dec_frame_ff) {
- Log::getInstance()->log("Video", Log::DEBUG, "Allocating dec_frame failed");
- return 0;
+ Log::getInstance()->log("Video", Log::NOTICE, "AllocateCodecsFFmpeg mark1");
+ dec_frame_ff_mutex.Lock();
+ Log::getInstance()->log("Video", Log::NOTICE, "AllocateCodecsFFmpeg mark2");
+ for (int i=0;i<3;i++) {
+ AVFrame *dec_frame_ff=avcodec_alloc_frame(); // may be we need multiple frames, if we want to use async texture upload
+ if (!dec_frame_ff) {
+ Log::getInstance()->log("Video", Log::DEBUG, "Allocating dec_frame failed");
+ return 0;
+ }
+ dec_frame_ff_all.push_back(dec_frame_ff);
+ dec_frame_ff_free.push_back(dec_frame_ff);
}
+ dec_frame_ff_decoding=NULL;
+ Log::getInstance()->log("Video", Log::NOTICE, "AllocateCodecsFFmpeg mark 3");
+ dec_frame_ff_mutex.Unlock();
+
+ ogl_frame_mutex.Lock();
+ //Allocate texture structs, since we do not know the sizes, we do not allocate the textures yet
+ for (int i=0;i<3;i++) {
+ VPEOGLFrame *new_frame=(VPEOGLFrame *)malloc(sizeof(VPEOGLFrame));
+ new_frame->type=1; //1 = YUV, 2 RGB
+ new_frame->textures[0]=0;
+ new_frame->textures[1]=0;
+ new_frame->textures[2]=0;
+ new_frame->width=new_frame->height=0;
+ all_ogl_frames.push_back(new_frame);
+ free_ogl_frames.push_back(new_frame);
+
+ }
+ ogl_frame_outside=false;
+
+ ogl_frame_mutex.Unlock();
+
+ ffmpeg_running=true;
return 1;
int VideoVPEOGL::DeAllocateCodecsFFMPEG()
{
+ ffmpeg_running=false;
Log::getInstance()->log("Video", Log::NOTICE, "DeAllocateCodecsFFmpeg");
- if (dec_frame_ff) {
- av_free(dec_frame_ff);
- dec_frame_ff=NULL;
+ dec_frame_ff_mutex.Lock();
+ dec_frame_ff_upload_pending.clear();
+ dec_frame_ff_free.clear();
+ dec_frame_ff_mutex.Unlock();
+ while (dec_frame_ff_uploading) {
+ Log::getInstance()->log("Video", Log::NOTICE, "Wait for uploading to finish");
+ MILLISLEEP(20);
+ }
+ dec_frame_ff_mutex.Lock();
+ for (int i=0; i< dec_frame_ff_all.size();i++) {
+ av_free(dec_frame_ff_all[i]);
+ }
+
+ dec_frame_ff_all.clear();
+ dec_frame_ff_mutex.Unlock();
+ dec_frame_ff_decoding=NULL;
+
+ while (ogl_frame_outside) {
+ Log::getInstance()->log("Video", Log::NOTICE, "Wait for ogl frame from outside");
+ MILLISLEEP(20);
+ }
+
+ ((OsdOpenGL*)Osd::getInstance())->BeginPainting(); // get osd's context
+ ogl_frame_mutex.Lock();
+ for (int i=0; i< dec_frame_ff_all.size();i++) {
+ VPEOGLFrame * del_frame=all_ogl_frames[i];
+ if (del_frame->textures[0]==0) {
+ glDeleteTextures(1,&del_frame->textures[0]);
+ del_frame->textures[0]=0;
+ }
+ if (del_frame->textures[1]==0) {
+ glDeleteTextures(1,&del_frame->textures[1]);
+ del_frame->textures[1]=0;
+ }
+ if (del_frame->textures[2]==0) {
+ glDeleteTextures(1,&del_frame->textures[2]);
+ del_frame->textures[2]=0;
+ }
+ free(all_ogl_frames[i]);
}
+ all_ogl_frames.clear();
+ free_ogl_frames.clear();
+ ready_ogl_frames.clear();
+ ogl_frame_mutex.Unlock();
+ ((OsdOpenGL*)Osd::getInstance())->EndPainting();
+
+
if (mpeg2codec_context_ff) {
avcodec_close(mpeg2codec_context_ff);
av_free(mpeg2codec_context_ff);
OMX_ERRORTYPE error;
- OMX_PARAM_PORTDEFINITIONTYPE port_image;
+/* OMX_PARAM_PORTDEFINITIONTYPE port_image;
memset(&port_image,0,sizeof(port_image));
port_image.nSize=sizeof(port_image);
port_image.nVersion.nVersion=OMX_VERSION;
if (error!= OMX_ErrorNone){
Log::getInstance()->log("Video", Log::DEBUG, "OMX_GetParameter failed %x", error);
}
- Log::getInstance()->log("Video", Log::DEBUG, "Image port %d %d", port_image.format.video.nFrameWidth , port_image.format.video.nFrameHeight);
+ Log::getInstance()->log("Video", Log::DEBUG, "Image port %d %d", port_image.format.video.nFrameWidth , port_image.format.video.nFrameHeight);*/
/*First Check, if we have an audio sample*/
if (iframemode) {
/*Inspect PES-Header */
- OMX_STATETYPE temp_state;
- OMX_GetState(omx_vid_dec,&temp_state);
+// OMX_STATETYPE temp_state;
+// OMX_GetState(omx_vid_dec,&temp_state);
if (*samplepos==0) {//stripheader
headerstrip=buffer[packet.pos_buffer+8]+9/*is this right*/;
- //headerstrip+=6; //h264
*samplepos+=headerstrip;
if ( packet.synched ) {
const UCHAR* buffer,
UINT *samplepos)
{
-
-#if 0
//Later add fail back code for ffmpeg
/*if (!videoon) {
*samplepos+=packet.length;
return packet.length;
}*/
+ if (!ffmpeg_running) return 0; // if we are not runnig do not do this
- if (!omx_running) return 0; // if we are not runnig do not do this
-
-
- OMX_ERRORTYPE error;
-
- OMX_PARAM_PORTDEFINITIONTYPE port_image;
- memset(&port_image,0,sizeof(port_image));
- port_image.nSize=sizeof(port_image);
- port_image.nVersion.nVersion=OMX_VERSION;
- port_image.nPortIndex =omx_codec_output_port;
- error=OMX_GetParameter(omx_vid_dec,OMX_IndexParamPortDefinition, &port_image);
- if (error!= OMX_ErrorNone){
- Log::getInstance()->log("Video", Log::DEBUG, "OMX_GetParameter failed %x", error);
- }
- Log::getInstance()->log("Video", Log::DEBUG, "Image port %d %d", port_image.format.video.nFrameWidth , port_image.format.video.nFrameHeight);
- /*First Check, if we have an audio sample*/
if (iframemode) {
//samplepos=0;
MILLISLEEP(10);
}
UINT headerstrip=0;
- if (packet.disconti) {
+/* if (packet.disconti) {
firstsynched=false;
if (cur_input_buf_omx) {
OMX_ERRORTYPE error=OMX_EmptyThisBuffer(omx_vid_dec,cur_input_buf_omx);
}
cur_input_buf_omx=NULL;
}
- }
+ }*/
/*Inspect PES-Header */
+ if (!dec_frame_ff_decoding) {
+ dec_frame_ff_mutex.Lock();
+ if (dec_frame_ff_free.size()>0) {
+ dec_frame_ff_decoding=dec_frame_ff_free.front();
+ dec_frame_ff_free.pop_front();
+ dec_frame_ff_mutex.Unlock();
+ } else {
+ Log::getInstance()->log("Video", Log::DEBUG, "We have no free buffers");
+ dec_frame_ff_mutex.Unlock();
+ // No free Buffers
+ return 0;
+ }
+ }
+
- OMX_STATETYPE temp_state;
- OMX_GetState(omx_vid_dec,&temp_state);
if (*samplepos==0) {//stripheader
headerstrip=buffer[packet.pos_buffer+8]+9/*is this right*/;
- //headerstrip+=6; //h264
*samplepos+=headerstrip;
if ( packet.synched ) {
- if (cur_input_buf_omx) {
+ /*if (cur_input_buf_omx) {
OMX_ERRORTYPE error=OMX_EmptyThisBuffer(omx_vid_dec,cur_input_buf_omx);
if (error!=OMX_ErrorNone){
Log::getInstance()->log("Video", Log::DEBUG, "OMX_EmptyThisBuffer failed %x", error);
}
cur_input_buf_omx=NULL;//write out old data
- }
+ }*/
+ ffmpeg_time=packet.presentation_time;
+ ffmpeg_hastime=true;
// reftime1=packet.presentation_time;
// reftime2=reftime1+1;
firstsynched=true;
}
}
- if (!cur_input_buf_omx) {
- input_bufs_omx_mutex.Lock();
- if (input_bufs_omx_free.size()==0) {
- input_bufs_omx_mutex.Unlock();
- Log::getInstance()->log("Video", Log::DEBUG, "Deliver MediaPacket no free sample");
- return 0; // we do not have a free media sample
- }
- cur_input_buf_omx=input_bufs_omx_free.front();
- cur_input_buf_omx->nFilledLen=0;
- cur_input_buf_omx->nOffset=0;
- cur_input_buf_omx->nTimeStamp=0;
- input_bufs_omx_free.pop_front();
- input_bufs_omx_mutex.Unlock();
- }
- if (cur_input_buf_omx->nFilledLen==0) {//will only be changed on first packet
+ /*if (cur_input_buf_omx->nFilledLen==0) {//will only be changed on first packet
/*if (packet.disconti) {
ms->SetDiscontinuity(TRUE);
} else {
ms->SetDiscontinuity(FALSE);
- }*/
+ }*
//if (packet.synched) {
//lastreftimePTS=packet.pts;
// ms->SetSyncPoint(TRUE);
//}
- }
+ }*/
unsigned int haveToCopy=packet.length-*samplepos;
+ while (haveToCopy>0) {
+ int dec_bytes=0;
+ int frame_ready=0;
+
+ // Log::getInstance()->log("Video", Log::DEBUG, "Push data to decoder");
+ dec_bytes=avcodec_decode_video(mpeg2codec_context_ff, dec_frame_ff_decoding,
+ &frame_ready, buffer+packet.pos_buffer+*samplepos, haveToCopy);
+ if (dec_bytes<0) {
+ Log::getInstance()->log("Video", Log::DEBUG, "Decoding frame failed %x", dec_bytes);
+ return *samplepos;
+ }
+ *samplepos+=dec_bytes;
+ haveToCopy-=dec_bytes;
+ if (frame_ready) {
+ // Log::getInstance()->log("Video", Log::DEBUG, "We have a frame push it to osd");
+
+ dec_frame_ff_mutex.Lock();
+ ffwidth=mpeg2codec_context_ff->width;
+ ffheight=mpeg2codec_context_ff->height;
+ ffpixfmt=mpeg2codec_context_ff->pix_fmt;
+ // Log::getInstance()->log("Video", Log::DEBUG, "Frame info %d %d %d",ffwidth,ffheight,ffpixfmt);
+
+ dec_frame_ff_upload_pending.push_back(dec_frame_ff_decoding);
+ dec_frame_ff_decoding=NULL;
+ if (dec_frame_ff_free.size()>0) {
+ dec_frame_ff_decoding=dec_frame_ff_free.front();
+ dec_frame_ff_free.pop_front();
+ dec_frame_ff_mutex.Unlock();
+ threadSignal();
+ ffmpeg_hastime=false;
+ } else {
+ ffmpeg_hastime=false;
+ dec_frame_ff_mutex.Unlock();
+ // No free Buffers
+ return *samplepos;
+ }
- while (haveToCopy> (cur_input_buf_omx->nAllocLen-cur_input_buf_omx->nFilledLen)) {
- unsigned int cancopy=cur_input_buf_omx->nAllocLen-cur_input_buf_omx->nFilledLen;
- memcpy(cur_input_buf_omx->pBuffer+cur_input_buf_omx->nFilledLen,buffer+packet.pos_buffer+*samplepos,cancopy);
- haveToCopy-=cancopy;
- cur_input_buf_omx->nFilledLen+=cancopy;
- *samplepos+=cancopy;
- // push old buffer out
- OMX_ERRORTYPE error=OMX_EmptyThisBuffer(omx_vid_dec,cur_input_buf_omx);
- if (error!=OMX_ErrorNone){
- Log::getInstance()->log("Video", Log::DEBUG, "OMX_EmptyThisBuffer failed %x", error);
}
- // get5 new buffer
- input_bufs_omx_mutex.Lock();
- if (input_bufs_omx_free.size()==0) {
- input_bufs_omx_mutex.Unlock();
- //Log::getInstance()->log("Video", Log::DEBUG, "Deliver MediaPacket no free sample");
- return *samplepos; // we do not have a free media sample
- }
- cur_input_buf_omx=input_bufs_omx_free.front();
- cur_input_buf_omx->nFilledLen=0;
- cur_input_buf_omx->nOffset=0;
- cur_input_buf_omx->nTimeStamp=0;
- input_bufs_omx_free.pop_front();
- input_bufs_omx_mutex.Unlock();
-
- cur_input_buf_omx->nFlags=OMX_BUFFERFLAG_TIME_UNKNOWN;
}
- memcpy(cur_input_buf_omx->pBuffer+cur_input_buf_omx->nFilledLen,
- buffer+packet.pos_buffer+*samplepos,haveToCopy);
- cur_input_buf_omx->nFilledLen+=haveToCopy;
-
-
-
- *samplepos+=haveToCopy;
-
return *samplepos;
-#endif
- return 0;
+
}