From 31bcaec7e866294960f0a68feaf97e5fa671aeed Mon Sep 17 00:00:00 2001
From: Marten Richter <marten.richter@freenet.de>
Date: Sun, 10 Jun 2012 13:26:53 +0200
Subject: [PATCH] Prepare libav Buffer handling for XvMc

---
 defines.h      |   2 +-
 osdopengl.cc   |   1 +
 videovpeogl.cc | 544 ++++++++++++++++++++++++++++++++++++-------------
 videovpeogl.h  | 101 ++++++---
 4 files changed, 478 insertions(+), 170 deletions(-)

diff --git a/defines.h b/defines.h
index c04928e..a46a06d 100755
--- a/defines.h
+++ b/defines.h
@@ -104,7 +104,7 @@ long long getTimeMS();
    #define VPE_OMX_VIDEO_DEINTERLACE "OMX.broadcom.image_fx"
    #define VPE_OMX_CLOCK "OMX.broadcom.clock"
 
-   #define  VPE_FFMPEG_SUPPORT
+   #define  VPE_LIBAV_SUPPORT
 
 #endif
 #ifdef VOMP_PLATTFORM_MVP
diff --git a/osdopengl.cc b/osdopengl.cc
index 17b46d3..d835570 100755
--- a/osdopengl.cc
+++ b/osdopengl.cc
@@ -297,6 +297,7 @@ int OsdOpenGL::init(void* device)
 
 
   glClearColor(0.0f,0.0f,0.0f,1.f);
+  eglSwapInterval(egl_display, 1 );
 
   eglMakeCurrent(egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT );
 
diff --git a/videovpeogl.cc b/videovpeogl.cc
index d5bc781..77d5bcb 100755
--- a/videovpeogl.cc
+++ b/videovpeogl.cc
@@ -41,12 +41,14 @@ VideoVPEOGL::VideoVPEOGL()
   omx_h264=omx_mpeg2=true;
 #endif
 
-#ifdef VPE_FFMPEG_SUPPORT
-  mpeg2codec_context_ff=NULL;
-  ffmpeg_running=false;
-  dec_frame_ff_uploading=NULL;
-  dec_frame_ff_decoding=NULL;
+#ifdef VPE_LIBAV_SUPPORT
+  mpeg2codec_context_libav=NULL;
+  libav_running=false;
+  dec_frame_libav_uploading=NULL;
+  dec_frame_libav_decoding=NULL;
   ogl_frame_outside=false;
+  decoding_mode=VPE_NO_XVMC;
+  //decoding_mode=VPE_XVMC_MOCOMP;
 #endif
 
 #ifdef BENCHMARK_FPS
@@ -80,7 +82,7 @@ int VideoVPEOGL::init(UCHAR tformat)
 /*  if (format == PAL) setLetterboxBorder("38");
   else setLetterboxBorder("31");*/
 
-  /* new stuff */
+  /* new stulibav */
 
 
 
@@ -142,13 +144,16 @@ int VideoVPEOGL::initUsingOSDObjects()
 
 #endif
 
-#ifdef VPE_FFMPEG_SUPPORT
+#ifdef VPE_LIBAV_SUPPORT
 
 	av_register_all();
-	mpeg2codec_ff=avcodec_find_decoder(CODEC_ID_MPEG2VIDEO);
-	//mpeg2codec_ff=avcodec_find_decoder(CODEC_ID_MPEG2VIDEO_XVMC);
-	if (mpeg2codec_ff==NULL) {
-		Log::getInstance()->log("Video", Log::DEBUG, "Find ffmpeg mpeg2 codec failed");
+	if (decoding_mode==VPE_NO_XVMC) {
+		mpeg2codec_libav=avcodec_find_decoder(CODEC_ID_MPEG2VIDEO);
+	} else {
+		mpeg2codec_libav=avcodec_find_decoder(CODEC_ID_MPEG2VIDEO_XVMC);
+	}
+	if (mpeg2codec_libav==NULL) {
+		Log::getInstance()->log("Video", Log::DEBUG, "Find libav mpeg2 codec failed");
 		return 0;
 	}
 
@@ -230,8 +235,8 @@ int VideoVPEOGL::shutdown()
   DeAllocateCodecsOMX();
   OMX_Deinit();
 #endif
-#ifdef VPE_FFMPEG_SUPPORT
-  DeAllocateCodecsFFMPEG();
+#ifdef VPE_LIBAV_SUPPORT
+  DeAllocateCodecsLibav();
 #endif
   eglDestroyContext(egl_display,egl_context);
 //  close(fdVideo);
@@ -293,15 +298,15 @@ void VideoVPEOGL::threadMethod()
 	}
 	while (1) {
 		bool sleep=true;
-#ifdef VPE_FFMPEG_SUPPORT
-		dec_frame_ff_mutex.Lock();
-		if (dec_frame_ff_upload_pending.size()>0) {
-			dec_frame_ff_uploading=dec_frame_ff_upload_pending.front();
-			dec_frame_ff_upload_pending.pop_front();
-			if (dec_frame_ff_upload_pending.size()>0) sleep=false;
+#ifdef VPE_LIBAV_SUPPORT
+		dec_frame_libav_mutex.Lock();
+		if (dec_frame_libav_upload_pending.size()>0) {
+			dec_frame_libav_uploading=dec_frame_libav_upload_pending.front();
+			dec_frame_libav_upload_pending.pop_front();
+			if (dec_frame_libav_upload_pending.size()>0) sleep=false;
 		}
-		dec_frame_ff_mutex.Unlock();
-		if (dec_frame_ff_uploading) {
+		dec_frame_libav_mutex.Unlock();
+		if (dec_frame_libav_uploading) {
 			int width,height,pixfmt;
 			 //First get a free ogl image
 			VPEOGLFrame* out_frame=NULL;
@@ -312,9 +317,9 @@ void VideoVPEOGL::threadMethod()
 				}
 
 				if (free_ogl_frames.size()>0) {
-					width=ffwidth;
-					height=ffheight;
-					pixfmt=ffpixfmt;
+					width=libavwidth;
+					height=libavheight;
+					pixfmt=libavpixfmt;
 					out_frame=free_ogl_frames.front();
 					free_ogl_frames.pop_front();
 				} else MILLISLEEP(2);
@@ -323,20 +328,20 @@ void VideoVPEOGL::threadMethod()
 			bool failed=false;
 			if (out_frame) {
 				if (out_frame->textures[0]==0 || out_frame->width!=width ||
-						out_frame->height!=height || out_frame->stride!=dec_frame_ff_uploading->linesize[0]) {
-					if (out_frame->textures[0]==0) {
+						out_frame->height!=height || out_frame->stride!=dec_frame_libav_uploading->linesize[0]) {
+					if (out_frame->textures[0]!=0) {
 						glDeleteTextures(1,&out_frame->textures[0]);
 						out_frame->textures[0]=0;
 					}
-					if (out_frame->textures[1]==0) {
+					if (out_frame->textures[1]!=0) {
 						glDeleteTextures(1,&out_frame->textures[1]);
 						out_frame->textures[1]=0;
 					}
-					if (out_frame->textures[2]==0) {
+					if (out_frame->textures[2]!=0) {
 						glDeleteTextures(1,&out_frame->textures[2]);
 						out_frame->textures[2]=0;
 					}
-					if (!AllocateYUVOglTexture(out_frame,width,height,dec_frame_ff_uploading->linesize[0])) failed=true;
+					if (!AllocateYUVOglTexture(out_frame,width,height,dec_frame_libav_uploading->linesize[0])) failed=true;
 				}
 				if (!failed) {
 					//up to now only YUV data, this is for reference only, since the pi is too slow.
@@ -346,7 +351,7 @@ void VideoVPEOGL::threadMethod()
 					glTexSubImage2D(GL_TEXTURE_2D,0,0,0,
 							out_frame->stride,out_frame->height,
 							GL_LUMINANCE,GL_UNSIGNED_BYTE,
-							dec_frame_ff_uploading->data[0]);
+							dec_frame_libav_uploading->data[0]);
 
 
 					glBindTexture(GL_TEXTURE_2D, out_frame->textures[1]);
@@ -354,14 +359,19 @@ void VideoVPEOGL::threadMethod()
 					glTexSubImage2D(GL_TEXTURE_2D,0,0,0,
 							out_frame->stride>>1,out_frame->height>>1,
 							GL_LUMINANCE,GL_UNSIGNED_BYTE,
-							dec_frame_ff_uploading->data[1]);
+							dec_frame_libav_uploading->data[1]);
 
 					glBindTexture(GL_TEXTURE_2D, out_frame->textures[2]);
 					glPixelStorei(GL_UNPACK_ALIGNMENT,1);
 					glTexSubImage2D(GL_TEXTURE_2D,0,0,0,
 							out_frame->stride>>1,out_frame->height>>1,
 							GL_LUMINANCE,GL_UNSIGNED_BYTE,
-							dec_frame_ff_uploading->data[2]);
+							dec_frame_libav_uploading->data[2]);
+
+
+					releaseFrameBufUpload((VPE_FrameBuf*)dec_frame_libav_uploading->base[0]);
+
+
 					ogl_frame_mutex.Lock();
 					ready_ogl_frames.push_back(out_frame);
 					ogl_frame_mutex.Unlock();
@@ -369,10 +379,10 @@ void VideoVPEOGL::threadMethod()
 
 				}
 
-				dec_frame_ff_mutex.Lock();
-				dec_frame_ff_free.push_back(dec_frame_ff_uploading);
-				dec_frame_ff_uploading=NULL;
-				dec_frame_ff_mutex.Unlock();
+				dec_frame_libav_mutex.Lock();
+				dec_frame_libav_free.push_back(dec_frame_libav_uploading);
+				dec_frame_libav_uploading=NULL;
+				dec_frame_libav_mutex.Unlock();
 
 
 
@@ -394,8 +404,8 @@ void VideoVPEOGL::threadMethod()
 void VideoVPEOGL::threadPostStopCleanup()
 {
 	eglMakeCurrent(egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT );
-#ifdef VPE_FFMPEG_SUPPORT
-	dec_frame_ff_uploading=NULL;
+#ifdef VPE_LIBAV_SUPPORT
+	dec_frame_libav_uploading=NULL;
 #endif
 }
 
@@ -411,7 +421,7 @@ int VideoVPEOGL::setTVsize(UCHAR ttvsize)
   close(fdVideo);
   if ((fdVideo = open("/dev/vdec_dev", O_WRONLY)) < 0) return 0;
   if (!setSource())                  { shutdown(); return 0; }
-  if (!attachFrameBuffer())          { shutdown(); return 0; }
+  if (!attachFramebuffer())          { shutdown(); return 0; }
 
   // Reopening the fd causes the scart aspect line to go back to 4:3
   // Set this again to the same as the tv screen size
@@ -583,7 +593,7 @@ int VideoVPEOGL::play()
 	  if (AllocateCodecsOMX()) {
 		  decoding_backend=VPE_DECODER_OMX;
 		  return 1;
-		  // Otherwise fall back to ffmpeg
+		  // Otherwise fall back to libav
 	  } else {
 		  if (h264) {
 			  omx_h264=false;
@@ -595,11 +605,11 @@ int VideoVPEOGL::play()
 	  }
   }
 #endif
-#ifdef VPE_FFMPEG_SUPPORT
-  if (AllocateCodecsFFMPEG()) {
-	  decoding_backend=VPE_DECODER_FFMPEG;
+#ifdef VPE_LIBAV_SUPPORT
+  if (AllocateCodecsLibav()) {
+	  decoding_backend=VPE_DECODER_libav;
 	  return 1;
-	  	  // Otherwise fall back to ffmpeg
+	  	  // Otherwise fall back to libav
   }
 #endif
   return 0;
@@ -1113,11 +1123,11 @@ int VideoVPEOGL::PrepareInputBufsOMX()
 	input_bufs_omx_mutex.Lock();
 	for (unsigned int i=0; i< port_def_type.nBufferCountActual;i++) {
 
-	//	unsigned char* new_buffer_data=(unsigned char*)malloc(port_def_type.nBufferSize);
+	//	unsigned char* new_buffer_data=(unsigned char*)malloc(port_def_type.nbufferSize);
 		OMX_BUFFERHEADERTYPE *buf_head=NULL;
-	/*	error=OMX_UseBuffer(omx_vid_dec,&buf_head,omx_codec_input_port,NULL,port_def_type.nBufferSize,new_buffer_data);
+	/*	error=OMX_Usebuffer(omx_vid_dec,&buf_head,omx_codec_input_port,NULL,port_def_type.nbufferSize,new_buffer_data);
 		if (error!=OMX_ErrorNone){
-			Log::getInstance()->log("Video", Log::DEBUG, "Use OMX_UseBuffer failed %x", error);
+			Log::getInstance()->log("Video", Log::DEBUG, "Use OMX_Usebuffer failed %x", error);
 			input_bufs_omx_mutex.Unlock();
 			return 0;
 		}*/
@@ -1330,37 +1340,182 @@ int VideoVPEOGL::DeAllocateCodecsOMX()
 #endif
 
 
-#ifdef VPE_FFMPEG_SUPPORT
+#ifdef VPE_LIBAV_SUPPORT
+
+enum PixelFormat VideoVPEOGL::get_format_libav(struct AVCodecContext *s, const enum PixelFormat *fmt)
+{
+	int dec_mode=((VideoVPEOGL*)getInstance())->getlibavDecodingMode();
+	enum PixelFormat ret_pix=PIX_FMT_NONE;
+	if (dec_mode==VPE_NO_XVMC) return PIX_FMT_NONE;
+	while (*fmt!=PIX_FMT_NONE) {
+		if (*fmt== PIX_FMT_XVMC_MPEG2_IDCT && dec_mode==VPE_XVMC_IDCT) {
+			ret_pix=PIX_FMT_XVMC_MPEG2_IDCT;
+		} else if (*fmt== PIX_FMT_XVMC_MPEG2_MC && dec_mode==VPE_XVMC_MOCOMP) {
+			ret_pix=PIX_FMT_XVMC_MPEG2_MC;
+		}
+		fmt++;
+	}
+	return ret_pix;
+}
 
-int VideoVPEOGL::AllocateCodecsFFMPEG()
+int VideoVPEOGL::reget_buffer_libav(struct AVCodecContext *c, AVFrame *pic)
 {
-	ffmpeg_hastime=false;
-	Log::getInstance()->log("Video", Log::NOTICE, "AllocateCodecsFFmpeg");
-	mpeg2codec_context_ff=avcodec_alloc_context();
-	if (mpeg2codec_context_ff==NULL) {
-		Log::getInstance()->log("Video", Log::DEBUG, "Creating ffmpeg codec context failed");
+	Log::getInstance()->log("Video", Log::ERR, "Buffer reusing! Should not happen!Not Implemented!");
+	return -1;
+}
+
+int VideoVPEOGL::get_buffer_libav(struct AVCodecContext *c, AVFrame *pic)
+{
+	unsigned int want_sizes[4]={0,0,0,0};
+	AVPicture pict;
+	bool normal_pixs=false;
+	int s_a[4];
+	//reget logic from mplayer
+	if (pic->opaque && pic->data[0] && (!pic->buffer_hints ||pic->buffer_hints & FF_BUFFER_HINTS_REUSABLE )){
+		Log::getInstance()->log("Video", Log::ERR, "Buffer reusing! Should not happen!");
 		return 0;
 	}
-	if(avcodec_open(mpeg2codec_context_ff, mpeg2codec_ff)<0) {
-		Log::getInstance()->log("Video", Log::DEBUG, "Opening ffmpeg codec  failed");
+
+
+	if (c->pix_fmt!=PIX_FMT_XVMC_MPEG2_IDCT &&c->pix_fmt!=PIX_FMT_XVMC_MPEG2_MC) {
+		normal_pixs=true;
+		// standard pixel format
+		// this is written using much inspiration from libav util.c, so portions from there
+		int width,height;
+
+		width=c->width;
+		height=c->height;
+
+		avcodec_align_dimensions2(c, &width, &height, s_a);
+		if ((c->flags & CODEC_FLAG_EMU_EDGE)==0) {
+			width+=2*16;
+			height+=2*16;
+		}
+		// Now we have to determine alignment size
+		bool unaligned=true;
+		while (unaligned) {
+			av_image_fill_linesizes(pict.linesize, c->pix_fmt, width); //linesizes are derived
+			width+=width & ~(width-1); //trick from libav, now determine, if the alignment is ok
+			unaligned=false;
+			for (int i=0;i<4;i++) {
+				if ((pict.linesize[i]%s_a[i])!=0) {
+					unaligned=true;
+					break;
+				}
+			}
+		}
+		int tot_size=av_image_fill_pointers(pict.data, c->pix_fmt, height, NULL, pict.linesize); //get sizes
+		for (int i=0;i<4 ;i++) {
+			if (i<3 && pict.data[i+1]) {
+				want_sizes[i]=pict.data[i+1]-pict.data[i];
+				want_sizes[i]+=16;
+			} else {
+				want_sizes[i]=(tot_size-(pict.data[i]-pict.data[0]));
+				want_sizes[i]+=16;
+				break;
+			}
+		}
+
+	} else {
+		//TODE set linesizes!
+		Log::getInstance()->log("Video", Log::ERR, "Not implemented");
+		return -1;
+
+	}
+
+	VPE_FrameBuf *frame_buf=((VideoVPEOGL*)Video::getInstance())->getFrameBuf(want_sizes);
+	//Log::getInstance()->log("Video", Log::NOTICE, "get buffer %x",frame_buf);
+	if (!frame_buf) {
+		Log::getInstance()->log("Video", Log::ERR, "Getting buffer libav failed");
+		return -1;
+	}
+	//populate pict
+	pic->type=FF_BUFFER_TYPE_USER; // we are controlling the buffers
+	int hchr_shift,vchr_shift;
+	avcodec_get_chroma_sub_sample(c->pix_fmt,&hchr_shift,&vchr_shift);
+	const int pixel_size = av_pix_fmt_descriptors[c->pix_fmt].comp[0].step_minus1+1;
+	for (int i=0;i<4;i++) {
+		pic->data[i]=(uint8_t*)frame_buf->data[i];
+		pic->linesize[i]=pict.linesize[i];
+		if (normal_pixs) {
+
+			int edge_width=16;
+			int edge_height=16;
+			if (i!=0) {
+				edge_width>>=hchr_shift;
+				edge_height>>=vchr_shift;
+			}
+			pic->data[i]+=FFALIGN((pic->linesize[i]*16) + (pixel_size*edge_width), s_a[i]);
+		}
+	}
+
+	pic->base[0]=(uint8_t*)frame_buf; // our structure
+	//pic->extended_data=pic->data;
+	if(c->pkt) pic->pkt_pts=c->pkt->pts;
+	else pic->pkt_pts=AV_NOPTS_VALUE;
+	pic->width=c->width;
+	pic->height=c->height;
+	pic->format=c->pix_fmt;
+	pic->sample_aspect_ratio=c->sample_aspect_ratio;
+	pic->reordered_opaque= c->reordered_opaque;
+	pic->age=INT_MAX;
+
+	return 0;
+
+}
+
+void VideoVPEOGL::release_buffer_libav(struct AVCodecContext *c, AVFrame *pic)
+{
+//	Log::getInstance()->log("Video", Log::NOTICE, "release buffer %x",pic->base[0]);
+	((VideoVPEOGL*)Video::getInstance())->releaseFrameBufLibav((VPE_FrameBuf*) pic->base[0]);
+	pic->base[0]=NULL;
+	pic->data[0]=pic->data[1]=pic->data[2]=pic->data[3]=NULL;
+
+
+}
+
+int VideoVPEOGL::AllocateCodecsLibav()
+{
+	libav_hastime=false;
+	Log::getInstance()->log("Video", Log::NOTICE, "AllocateCodecslibav");
+	mpeg2codec_context_libav=avcodec_alloc_context();
+	if (mpeg2codec_context_libav==NULL) {
+		Log::getInstance()->log("Video", Log::DEBUG, "Creating libav codec context failed");
 		return 0;
 	}
-	memset(&incom_packet_ff,0,sizeof(incom_packet_ff));
-	incom_packet_ff_size=200000;
-	incom_packet_ff.data=(uint8_t*)av_malloc(incom_packet_ff_size+FF_INPUT_BUFFER_PADDING_SIZE);
+	if (decoding_mode!=VPE_NO_XVMC) {
+		mpeg2codec_context_libav->slice_flags=SLICE_FLAG_CODED_ORDER|SLICE_FLAG_ALLOW_FIELD;
+		if (decoding_mode==VPE_XVMC_MOCOMP) mpeg2codec_context_libav->pix_fmt=PIX_FMT_XVMC_MPEG2_MC;
+		else mpeg2codec_context_libav->pix_fmt=PIX_FMT_XVMC_MPEG2_IDCT;
+		mpeg2codec_context_libav->get_format=get_format_libav;
+
+	}
+	mpeg2codec_context_libav->get_buffer=get_buffer_libav;
+	mpeg2codec_context_libav->reget_buffer=reget_buffer_libav;
+	mpeg2codec_context_libav->release_buffer=release_buffer_libav;
 
-	dec_frame_ff_mutex.Lock();
+
+	int avc_ret=avcodec_open(mpeg2codec_context_libav, mpeg2codec_libav);
+	if (avc_ret<0) {
+		Log::getInstance()->log("Video", Log::DEBUG, "Opening libav codec  failed ");
+		return 0;
+	}
+	memset(&incom_packet_libav,0,sizeof(incom_packet_libav));
+	incom_packet_libav_size=200000;
+	incom_packet_libav.data=(uint8_t*)av_malloc(incom_packet_libav_size+FF_INPUT_BUFFER_PADDING_SIZE);
+
+	dec_frame_libav_mutex.Lock();
 	for (int i=0;i<3;i++) {
-			AVFrame *dec_frame_ff=avcodec_alloc_frame(); // may be we need multiple frames, if we want to use async texture upload
-			if (!dec_frame_ff) {
+			AVFrame *dec_frame_libav=avcodec_alloc_frame(); // may be we need multiple frames, if we want to use async texture upload
+			if (!dec_frame_libav) {
 				Log::getInstance()->log("Video", Log::DEBUG, "Allocating dec_frame  failed");
 				return 0;
 			}
-			dec_frame_ff_all.push_back(dec_frame_ff);
-			dec_frame_ff_free.push_back(dec_frame_ff);
+			dec_frame_libav_all.push_back(dec_frame_libav);
+			dec_frame_libav_free.push_back(dec_frame_libav);
 	}
-	dec_frame_ff_decoding=NULL;
-	dec_frame_ff_mutex.Unlock();
+	dec_frame_libav_decoding=NULL;
+	dec_frame_libav_mutex.Unlock();
 
 	ogl_frame_mutex.Lock();
 	//Allocate texture structs, since we do not know the sizes, we do not allocate the textures yet
@@ -1379,37 +1534,37 @@ int VideoVPEOGL::AllocateCodecsFFMPEG()
 
 	ogl_frame_mutex.Unlock();
 
-	ffmpeg_running=true;
+	libav_running=true;
 
 	return 1;
 
 }
 
-int VideoVPEOGL::DeAllocateCodecsFFMPEG()
+int VideoVPEOGL::DeAllocateCodecsLibav()
 {
-	ffmpeg_running=false;
-	Log::getInstance()->log("Video", Log::NOTICE, "DeAllocateCodecsFFmpeg");
-	dec_frame_ff_mutex.Lock();
-	dec_frame_ff_upload_pending.clear();
-	dec_frame_ff_free.clear();
-	dec_frame_ff_mutex.Unlock();
-	while (dec_frame_ff_uploading) {
+	libav_running=false;
+	Log::getInstance()->log("Video", Log::NOTICE, "DeAllocateCodecslibav");
+	dec_frame_libav_mutex.Lock();
+	dec_frame_libav_upload_pending.clear();
+	dec_frame_libav_free.clear();
+	dec_frame_libav_mutex.Unlock();
+	while (dec_frame_libav_uploading) {
 		Log::getInstance()->log("Video", Log::NOTICE, "Wait for uploading to finish");
 		MILLISLEEP(20);
 	}
-	dec_frame_ff_mutex.Lock();
-	for (int i=0; i< dec_frame_ff_all.size();i++) {
-		av_free(dec_frame_ff_all[i]);
+	dec_frame_libav_mutex.Lock();
+	for (int i=0; i< dec_frame_libav_all.size();i++) {
+		av_free(dec_frame_libav_all[i]);
 	}
 
-	dec_frame_ff_all.clear();
+	dec_frame_libav_all.clear();
 
-	av_free(incom_packet_ff.data);
-	incom_packet_ff.data=NULL;
-	incom_packet_ff_size=0;
+	av_free(incom_packet_libav.data);
+	incom_packet_libav.data=NULL;
+	incom_packet_libav_size=0;
 
-	dec_frame_ff_mutex.Unlock();
-	dec_frame_ff_decoding=NULL;
+	dec_frame_libav_mutex.Unlock();
+	dec_frame_libav_decoding=NULL;
 	while (ogl_frame_outside) {
 		Log::getInstance()->log("Video", Log::NOTICE, "Wait for ogl frame from outside");
 		MILLISLEEP(20);
@@ -1417,7 +1572,7 @@ int VideoVPEOGL::DeAllocateCodecsFFMPEG()
 
 	((OsdOpenGL*)Osd::getInstance())->BeginPainting(); // get osd's context
 	ogl_frame_mutex.Lock();
-	for (int i=0; i< dec_frame_ff_all.size();i++) {
+	for (int i=0; i< dec_frame_libav_all.size();i++) {
 		VPEOGLFrame * del_frame=all_ogl_frames[i];
 		if (del_frame->textures[0]==0) {
 			glDeleteTextures(1,&del_frame->textures[0]);
@@ -1440,19 +1595,129 @@ int VideoVPEOGL::DeAllocateCodecsFFMPEG()
 	((OsdOpenGL*)Osd::getInstance())->EndPainting();
 
 
-	if (mpeg2codec_context_ff) {
-		avcodec_close(mpeg2codec_context_ff);
-		av_free(mpeg2codec_context_ff);
-		mpeg2codec_context_ff=NULL;
+	if (mpeg2codec_context_libav) {
+		avcodec_close(mpeg2codec_context_libav);
+		av_free(mpeg2codec_context_libav);
+		mpeg2codec_context_libav=NULL;
+
+	}
 
+
+
+	vpe_framebuf_mutex.Lock();
+
+	for (int i=0;i<all_frame_bufs.size();i++) {
+		VPE_FrameBuf* current=all_frame_bufs[i];
+		for (int x=0;x<4;x++) {
+			if (current->data[x]) {
+				av_free(current->data[x]);
+			}
+		}
+		free(current);
 	}
+	all_frame_bufs.clear();
+	free_frame_bufs.clear();
+	locked_libav_frame_buf.clear();
+	locked_uploading_frame_buf.clear();
 
+	vpe_framebuf_mutex.Unlock();
 
 
 	return 1;
 }
 
 
+VPE_FrameBuf *VideoVPEOGL::getFrameBuf(unsigned int *size)
+{ //for libav
+	VPE_FrameBuf* current=NULL;
+	vpe_framebuf_mutex.Lock();
+	if (free_frame_bufs.size()>0) {
+		current=free_frame_bufs.front();
+		free_frame_bufs.pop_front();
+	} else if (all_frame_bufs.size()<6) {
+		current=(VPE_FrameBuf*)malloc(sizeof(VPE_FrameBuf));
+		memset(current,0,sizeof(VPE_FrameBuf));
+	} else {
+		Log::getInstance()->log("Video", Log::NOTICE, "Framebuffer underrun!");
+		vpe_framebuf_mutex.Unlock();
+		return NULL; // We do not have a frame buffer
+	}
+	locked_libav_frame_buf.push_back(current);
+	vpe_framebuf_mutex.Unlock();
+	//check if we need reallocation
+	for (int x=0;x<4;x++) {
+		if (current->size[x]!=size[x]) {
+			current->data[x]=av_realloc(current->data[x],size[x]);
+			current->size[x]=size[x];
+		}
+	}
+	return current;
+
+}
+
+void VideoVPEOGL::lockFrameBufUpload(VPE_FrameBuf* buf)
+{
+	// first find frame_buf memory
+
+	//Log::getInstance()->log("Video", Log::NOTICE, "lock buffer upload %x",buf);
+	VPE_FrameBuf* current=buf;
+	vpe_framebuf_mutex.Lock();
+	if (current) locked_uploading_frame_buf.push_back(current); //locked
+	vpe_framebuf_mutex.Unlock();
+
+}
+
+
+void VideoVPEOGL::releaseFrameBufLibav(VPE_FrameBuf* buf)
+{
+	// first find frame_buf memory
+	//Log::getInstance()->log("Video", Log::NOTICE, "release buffer libav %x",buf);
+	VPE_FrameBuf* current=buf;
+	vpe_framebuf_mutex.Lock();
+	if (current) {
+		locked_libav_frame_buf.remove(current); //unlocked
+		list<VPE_FrameBuf*>::iterator itty=locked_uploading_frame_buf.begin();
+		bool inlist=false;
+		while (itty!=locked_uploading_frame_buf.end()) {
+			if (*itty==current) {
+				inlist=true;
+				break;
+			}
+			itty++;
+		}
+		if (!inlist) {
+			free_frame_bufs.push_back(current);
+		}
+	}
+	vpe_framebuf_mutex.Unlock();
+}
+
+void VideoVPEOGL::releaseFrameBufUpload(VPE_FrameBuf* buf)
+{
+	// first find frame_buf memory
+	VPE_FrameBuf* current=buf;
+	//Log::getInstance()->log("Video", Log::NOTICE, "release buffer upload %x",buf);
+	vpe_framebuf_mutex.Lock();
+	if (current) {
+		locked_uploading_frame_buf.remove(current); //unlocked
+		list<VPE_FrameBuf*>::iterator itty=locked_libav_frame_buf.begin();
+		bool inlist=false;
+		while (itty!=locked_libav_frame_buf.end()) {
+			if (*itty==current) {
+				inlist=true;
+				break;
+			}
+			itty++;
+		}
+		if (!inlist) {
+			free_frame_bufs.push_back(current);
+		}
+	}
+	vpe_framebuf_mutex.Unlock();
+}
+
+
+
 
 
 
@@ -1463,7 +1728,7 @@ int VideoVPEOGL::stop()
   if (!initted) return 0;
 
 #ifdef VPE_OMX_SUPPORT
-  //Check if ffmpeg mode
+  //Check if libav mode
   if (decoding_backend==VPE_DECODER_OMX) DeAllocateCodecsOMX();
   decoding_backend=0;
 
@@ -1502,7 +1767,7 @@ int VideoVPEOGL::fastForward()
 {
   if (!initted) return 0;
 
-//  if (ioctl(fdVideo, AV_SET_VID_FFWD, 1) != 0) return 0;
+//  if (ioctl(fdVideo, AV_SET_VID_libavWD, 1) != 0) return 0;
   return 1;
 }
 
@@ -1607,8 +1872,8 @@ UINT VideoVPEOGL::DeliverMediaPacket(MediaPacket packet,
 #ifdef VPE_OMX_SUPPORT
 	case VPE_DECODER_OMX: return DeliverMediaPacketOMX(packet,buffer,samplepos);
 #endif
-#ifdef VPE_FFMPEG_SUPPORT
-	case VPE_DECODER_FFMPEG: return DeliverMediaPacketFFMPEG(packet,buffer,samplepos);
+#ifdef VPE_LIBAV_SUPPORT
+	case VPE_DECODER_libav: return DeliverMediaPacketlibav(packet,buffer,samplepos);
 #endif
 	}
 }
@@ -1620,7 +1885,7 @@ UINT VideoVPEOGL::DeliverMediaPacketOMX(MediaPacket packet,
 {
 
 
-	//Later add fail back code for ffmpeg
+	//Later add fail back code for libav
 	/*if (!videoon) {
 		*samplepos+=packet.length;
 		return packet.length;
@@ -1780,11 +2045,11 @@ UINT VideoVPEOGL::DeliverMediaPacketOMX(MediaPacket packet,
 #endif
 
 
-#ifdef VPE_FFMPEG_SUPPORT
-int VideoVPEOGL::DecodePacketFFMPEG()
+#ifdef VPE_LIBAV_SUPPORT
+int VideoVPEOGL::DecodePacketlibav()
 {
-	unsigned int haveToCopy=incom_packet_ff.size;
-	if (incom_packet_ff.size==0) return 1; // we are already empty
+	unsigned int haveToCopy=incom_packet_libav.size;
+	if (incom_packet_libav.size==0) return 1; // we are already empty
 	while (haveToCopy>0) {
 		int dec_bytes=0;
 		int frame_ready=0;
@@ -1794,9 +2059,8 @@ int VideoVPEOGL::DecodePacketFFMPEG()
 #ifdef BENCHMARK_FPS
 	    int cur_time=getTimeMS();
 #endif
-
-		dec_bytes=avcodec_decode_video2(mpeg2codec_context_ff, dec_frame_ff_decoding,
-		                &frame_ready, &incom_packet_ff);
+		dec_bytes=avcodec_decode_video2(mpeg2codec_context_libav, dec_frame_libav_decoding,
+		                &frame_ready, &incom_packet_libav);
 #ifdef BENCHMARK_FPS
 		time_in_decoder+=getTimeMS()-cur_time;
 		if (frame_ready) num_frames++;
@@ -1814,24 +2078,26 @@ int VideoVPEOGL::DecodePacketFFMPEG()
 		if (frame_ready) {
 		//	Log::getInstance()->log("Video", Log::DEBUG, "We have a frame push it to osd");
 
-			dec_frame_ff_mutex.Lock();
-			ffwidth=mpeg2codec_context_ff->width;
-			ffheight=mpeg2codec_context_ff->height;
-			ffpixfmt=mpeg2codec_context_ff->pix_fmt;
-		//	Log::getInstance()->log("Video", Log::DEBUG, "Frame info %d %d %d",ffwidth,ffheight,ffpixfmt);
-
-			dec_frame_ff_upload_pending.push_back(dec_frame_ff_decoding);
-			dec_frame_ff_decoding=NULL;
-			if (dec_frame_ff_free.size()>0) {
-				dec_frame_ff_decoding=dec_frame_ff_free.front();
-				dec_frame_ff_free.pop_front();
-				dec_frame_ff_mutex.Unlock();
+			lockFrameBufUpload((VPE_FrameBuf*)dec_frame_libav_decoding->base[0]); //lock for upload, so that ffmpeg does not reuse
+			dec_frame_libav_mutex.Lock();
+			libavwidth=mpeg2codec_context_libav->width;
+			libavheight=mpeg2codec_context_libav->height;
+			libavpixfmt=mpeg2codec_context_libav->pix_fmt;
+		//	Log::getInstance()->log("Video", Log::DEBUG, "Frame info %d %d %d",libavwidth,libavheight,libavpixfmt);
+
+
+			dec_frame_libav_upload_pending.push_back(dec_frame_libav_decoding);
+			dec_frame_libav_decoding=NULL;
+			if (dec_frame_libav_free.size()>0) {
+				dec_frame_libav_decoding=dec_frame_libav_free.front();
+				dec_frame_libav_free.pop_front();
+				dec_frame_libav_mutex.Unlock();
 				threadSignal();
-				ffmpeg_hastime=false;
+				libav_hastime=false;
 			} else {
-				ffmpeg_hastime=false;
-				dec_frame_ff_mutex.Unlock();
-				// No free Buffers
+				libav_hastime=false;
+				dec_frame_libav_mutex.Unlock();
+				// No free buffers
 				return 0;
 			}
 
@@ -1839,23 +2105,23 @@ int VideoVPEOGL::DecodePacketFFMPEG()
 		}
 
 	}
-	incom_packet_ff.size=0;
+	incom_packet_libav.size=0;
 	return 1;
 
 }
 
 
-UINT VideoVPEOGL::DeliverMediaPacketFFMPEG(MediaPacket packet,
+UINT VideoVPEOGL::DeliverMediaPacketlibav(MediaPacket packet,
 		const UCHAR* buffer,
 		UINT *samplepos)
 {
-	//Later add fail back code for ffmpeg
+	//Later add fail back code for libav
 	/*if (!videoon) {
 		*samplepos+=packet.length;
 		return packet.length;
 	}*/
 
-	if (!ffmpeg_running) return 0; // if we are not runnig do not do this
+	if (!libav_running) return 0; // if we are not runnig do not do this
 
 
 	if (iframemode) {
@@ -1867,20 +2133,20 @@ UINT VideoVPEOGL::DeliverMediaPacketFFMPEG(MediaPacket packet,
 	UINT headerstrip=0;
 	if (packet.disconti) {
 		firstsynched=false;
-		if (!DecodePacketFFMPEG()) return 0;
+		if (!DecodePacketlibav()) return 0;
 	}
 
 	/*Inspect PES-Header */
-	if (!dec_frame_ff_decoding) {
-		dec_frame_ff_mutex.Lock();
-		if (dec_frame_ff_free.size()>0) {
-			dec_frame_ff_decoding=dec_frame_ff_free.front();
-			dec_frame_ff_free.pop_front();
-			dec_frame_ff_mutex.Unlock();
+	if (!dec_frame_libav_decoding) {
+		dec_frame_libav_mutex.Lock();
+		if (dec_frame_libav_free.size()>0) {
+			dec_frame_libav_decoding=dec_frame_libav_free.front();
+			dec_frame_libav_free.pop_front();
+			dec_frame_libav_mutex.Unlock();
 		} else {
 			Log::getInstance()->log("Video", Log::DEBUG, "We have no free buffers");
-			dec_frame_ff_mutex.Unlock();
-			// No free Buffers
+			dec_frame_libav_mutex.Unlock();
+			// No free buffers
 			return 0;
 		}
 	}
@@ -1892,10 +2158,10 @@ UINT VideoVPEOGL::DeliverMediaPacketFFMPEG(MediaPacket packet,
 		*samplepos+=headerstrip;
 		if ( packet.synched ) {
 
-			if (!DecodePacketFFMPEG()) return 0; // WriteOut old Data
+			if (!DecodePacketlibav()) return 0; // WriteOut old Data
 
-			ffmpeg_time=packet.presentation_time;
-			ffmpeg_hastime=true;
+			libav_time=packet.presentation_time;
+			libav_hastime=true;
 		//	reftime1=packet.presentation_time;
 		//	reftime2=reftime1+1;
 			firstsynched=true;
@@ -1938,14 +2204,14 @@ UINT VideoVPEOGL::DeliverMediaPacketFFMPEG(MediaPacket packet,
 	}*/
 	unsigned int haveToCopy=packet.length-*samplepos;
 
-	if  ((incom_packet_ff_size-incom_packet_ff.size)< haveToCopy) {
+	if  ((incom_packet_libav_size-incom_packet_libav.size)< haveToCopy) {
 		// if the buffer is to small reallocate
-		incom_packet_ff_size+=haveToCopy;
-		incom_packet_ff.data=(uint8_t*)av_realloc(incom_packet_ff.data,incom_packet_ff_size+FF_INPUT_BUFFER_PADDING_SIZE);
-		Log::getInstance()->log("Video", Log::DEBUG, "Reallocate avpacket buffer to %d", incom_packet_ff_size);
+		incom_packet_libav_size+=haveToCopy;
+		incom_packet_libav.data=(uint8_t*)av_realloc(incom_packet_libav.data,incom_packet_libav_size+FF_INPUT_BUFFER_PADDING_SIZE);
+		Log::getInstance()->log("Video", Log::DEBUG, "Reallocate avpacket buffer to %d", incom_packet_libav_size);
 	}
-	memcpy(incom_packet_ff.data,buffer+packet.pos_buffer+*samplepos,haveToCopy);
-	incom_packet_ff.size+=haveToCopy;
+	memcpy(incom_packet_libav.data+incom_packet_libav.size,buffer+packet.pos_buffer+*samplepos,haveToCopy);
+	incom_packet_libav.size+=haveToCopy;
 
 	*samplepos+=haveToCopy;
 
diff --git a/videovpeogl.h b/videovpeogl.h
index c127a33..ef73724 100755
--- a/videovpeogl.h
+++ b/videovpeogl.h
@@ -65,14 +65,22 @@ struct VPE_OMX_EVENT {
 
 #endif
 
-#ifdef VPE_FFMPEG_SUPPORT
+#ifdef VPE_LIBAV_SUPPORT
 
 #include <stdint.h>
 extern "C" {
 
 #include <libavcodec/avcodec.h>
 #include <libavformat/avformat.h>
+#include <libavutil/imgutils.h>
 }
+
+struct VPE_FrameBuf
+{ //This is a frame bulibaver, nothing fancy just plain memory
+	void *data[4];
+	unsigned int size[4];
+};
+
 #endif
 
 
@@ -124,18 +132,18 @@ class VideoVPEOGL : public Video, public Thread_TYPE
     int attachFrameBuffer(); // What does this do?
     ULONG timecodeToFrameNumber(ULLONG timecode);
     ULLONG getCurrentTimestamp();
-    bool displayIFrame(const UCHAR* buffer, UINT length);
+    bool displayIFrame(const UCHAR* bulibaver, UINT length);
 
     // Writing Data to Videodevice
     virtual void PrepareMediaSample(const MediaPacketList&, UINT samplepos);
-    virtual UINT DeliverMediaSample(UCHAR* buffer, UINT* samplepos);
+    virtual UINT DeliverMediaSample(UCHAR* bulibaver, UINT* samplepos);
     virtual long long SetStartOffset(long long curreftime, bool *rsync)
     { *rsync=false; return 0; };
     virtual void ResetTimeOffsets();
 
 	virtual bool supportsh264(){return true;};
 
-	int WriteOutTS(const unsigned char *buffer,int length, int type);
+	int WriteOutTS(const unsigned char *bulibaver,int length, int type);
 	void WriteOutPATPMT();
 
 
@@ -154,33 +162,39 @@ class VideoVPEOGL : public Video, public Thread_TYPE
     int initUsingOSDObjects();
     int shutdownUsingOSDObjects() {return shutdown();};
 
+#ifdef VPE_LIBAV_SUPPORT
+    int getlibavDecodingMode() {return decoding_mode;};
+
+
+#endif
+
   private:
 	   int EnterIframePlayback();
 	   bool iframemode;
 
 	   UINT DeliverMediaPacket(MediaPacket packet,
-	                                     const UCHAR* buffer,
+	                                     const UCHAR* bulibaver,
 	                                     UINT *samplepos);
-	   int decoding_backend; //1 omx, 2 ffmpeg
+	   int decoding_backend; //1 omx, 2 libav
 #define VPE_DECODER_OMX 1
-#define VPE_DECODER_FFMPEG 2
+#define VPE_DECODER_libav 2
 
 #ifdef VPE_OMX_SUPPORT
 	   static OMX_ERRORTYPE EventHandler_OMX(OMX_IN OMX_HANDLETYPE handle,OMX_IN OMX_PTR appdata,
 	   OMX_IN OMX_EVENTTYPE event_type,OMX_IN OMX_U32 data1,
 	   OMX_IN OMX_U32 data2,OMX_IN OMX_PTR event_data);
-	   static OMX_ERRORTYPE EmptyBufferDone_OMX(OMX_IN OMX_HANDLETYPE hcomp,OMX_IN OMX_PTR appdata,OMX_IN OMX_BUFFERHEADERTYPE* buffer);
-	   static OMX_ERRORTYPE FillBufferDone_OMX(OMX_IN OMX_HANDLETYPE hcomp, OMX_IN OMX_PTR appdata,OMX_IN OMX_BUFFERHEADERTYPE* buffer);
+	   static OMX_ERRORTYPE EmptyBufferDone_OMX(OMX_IN OMX_HANDLETYPE hcomp,OMX_IN OMX_PTR appdata,OMX_IN OMX_BUFFERHEADERTYPE* bulibaver);
+	   static OMX_ERRORTYPE FillBufferDone_OMX(OMX_IN OMX_HANDLETYPE hcomp, OMX_IN OMX_PTR appdata,OMX_IN OMX_BUFFERHEADERTYPE* bulibaver);
 
 	   UINT DeliverMediaPacketOMX(MediaPacket packet,
-	  	                                     const UCHAR* buffer,
+	  	                                     const UCHAR* bulibaver,
 	  	                                     UINT *samplepos);
 
 	   int PrepareInputBufsOMX();
 	   int DestroyInputBufsOMX();
 
 	   void AddOmxEvent(VPE_OMX_EVENT  new_event);
-	   void ReturnEmptyOMXBuffer(OMX_BUFFERHEADERTYPE* buffer);
+	   void ReturnEmptyOMXBuffer(OMX_BUFFERHEADERTYPE* bulibaver);
 
 	   int ChangeComponentState(OMX_HANDLETYPE handle,OMX_STATETYPE type);
 	   int CommandFinished(OMX_HANDLETYPE handle,OMX_U32 command,OMX_U32 data2);
@@ -231,30 +245,57 @@ class VideoVPEOGL : public Video, public Thread_TYPE
 
 #endif
 
-#ifdef VPE_FFMPEG_SUPPORT
-	AVCodec *mpeg2codec_ff;
-	AVCodecContext *mpeg2codec_context_ff;
-	vector<AVFrame*> dec_frame_ff_all;
-	list<AVFrame*> dec_frame_ff_free;
-	list<AVFrame*> dec_frame_ff_upload_pending;
-	AVFrame* dec_frame_ff_uploading;
-	AVFrame* dec_frame_ff_decoding;
+#ifdef VPE_LIBAV_SUPPORT
+
+	const static int VPE_NO_XVMC=0;
+	const static int VPE_XVMC_MOCOMP=1;
+	const static int VPE_XVMC_IDCT=2;
+
+	int decoding_mode;
+
+	AVCodec *mpeg2codec_libav;
+	AVCodecContext *mpeg2codec_context_libav;
+	vector<AVFrame*> dec_frame_libav_all;
+	list<AVFrame*> dec_frame_libav_free;
+	list<AVFrame*> dec_frame_libav_upload_pending;
+	AVFrame* dec_frame_libav_uploading;
+	AVFrame* dec_frame_libav_decoding;
+
+	vector<VPE_FrameBuf*> all_frame_bufs;
+	list<VPE_FrameBuf*> free_frame_bufs;
+	list<VPE_FrameBuf*> locked_libav_frame_buf;
+	list<VPE_FrameBuf*> locked_uploading_frame_buf;
+
+	VPE_FrameBuf *getFrameBuf(unsigned int *size); //for libav
+	void releaseFrameBufLibav(VPE_FrameBuf*data);
+	void lockFrameBufUpload(VPE_FrameBuf* data);
+	void releaseFrameBufUpload(VPE_FrameBuf* data);
+
+	Mutex vpe_framebuf_mutex;
+
+
+
+	AVPacket incom_packet_libav;
+	int incom_packet_libav_size;
+	Mutex dec_frame_libav_mutex;
+
 
-	AVPacket incom_packet_ff;
-	int incom_packet_ff_size;
-	Mutex dec_frame_ff_mutex;
 
+	UINT DeliverMediaPacketlibav(MediaPacket packet,const UCHAR* bulibaver,UINT *samplepos);
+	int AllocateCodecsLibav();
+	int DeAllocateCodecsLibav();
+	int DecodePacketlibav();
 
+	static enum PixelFormat get_format_libav(struct AVCodecContext *s, const enum PixelFormat *fmt);
+	static int get_buffer_libav(struct AVCodecContext *c, AVFrame *pic);
+	static int reget_buffer_libav(struct AVCodecContext *c, AVFrame *pic);
+	static void release_buffer_libav(struct AVCodecContext *c, AVFrame *pic);
 
-	UINT DeliverMediaPacketFFMPEG(MediaPacket packet,const UCHAR* buffer,UINT *samplepos);
-	int AllocateCodecsFFMPEG();
-	int DeAllocateCodecsFFMPEG();
-	int DecodePacketFFMPEG();
-	bool ffmpeg_running;
-	bool ffmpeg_hastime; // signals if a pts is now
-	long long ffmpeg_time;
+	bool libav_running;
+	bool libav_hastime; // signals if a pts is now
+	long long libav_time;
 
-	int ffwidth,ffheight,ffpixfmt;
+	int libavwidth,libavheight,libavpixfmt;
 
 	
 #endif
-- 
2.39.5