2 Copyright 2004-2005 Chris Tallon 2009 Marten Richter
\r
4 This file is part of VOMP.
\r
6 VOMP is free software; you can redistribute it and/or modify
\r
7 it under the terms of the GNU General Public License as published by
\r
8 the Free Software Foundation; either version 2 of the License, or
\r
9 (at your option) any later version.
\r
11 VOMP is distributed in the hope that it will be useful,
\r
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
\r
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
\r
14 GNU General Public License for more details.
\r
16 You should have received a copy of the GNU General Public License
\r
17 along with VOMP; if not, write to the Free Software
\r
18 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
\r
22 #ifndef VIDEOVPEOGL_H
\r
23 #define VIDEOVPEOGL_H
\r
31 #include <sys/ioctl.h>
\r
39 #include "defines.h"
\r
41 #include "threadsystem.h"
\r
44 //#define EGL_EGLEXT_PROTOTYPES
\r
46 #include <GLES2/gl2.h>
\r
47 #include <EGL/egl.h>
\r
48 #include <EGL/eglext.h>
\r
50 #ifdef VPE_OMX_SUPPORT
\r
52 #include <IL/OMX_Core.h>
\r
53 #include <IL/OMX_Types.h>
\r
54 #include <IL/OMX_Component.h>
\r
55 #include <IL/OMX_Broadcom.h>
\r
58 struct VPE_OMX_EVENT {
\r
59 OMX_IN OMX_HANDLETYPE handle;
\r
60 OMX_IN OMX_PTR appdata;
\r
61 OMX_IN OMX_EVENTTYPE event_type;
\r
62 OMX_IN OMX_U32 data1;
\r
63 OMX_IN OMX_U32 data2;
\r
64 OMX_IN OMX_PTR event_data;
\r
69 #if defined(VPE_LIBAV_MPEG2_TRANSCODING) || defined(VPE_LIBAV_SUPPORT)
\r
71 #include <libavcodec/avcodec.h>
\r
72 #include <libavformat/avformat.h>
\r
73 #include <libavutil/imgutils.h>
\r
77 #ifdef VPE_LIBAV_MPEG2_TRANSCODING
\r
79 #include <libavcodec/transcode.h>
\r
81 #if !defined(VPE_OMX_SUPPORT)
\r
82 #error "VPE_LIBAV_MPEG2_TRANSCODING defined, and not VPE_OMX_SUPPORT defined!"
\r
86 #ifdef VPE_LIBAV_SUPPORT
\r
90 #include <libavcodec/xvmc.h>
\r
92 #include "glmocoshader.h"
\r
97 { //This is a frame bulibaver, nothing fancy just plain memory
\r
99 unsigned int size[4];
\r
100 unsigned int pict_num;
\r
101 VPEOGLFrame *ogl_ref;
\r
109 struct VPEOGLFrame {
\r
110 int type; //1 = YUV400, 2 YUV444
\r
111 GLuint textures[3]; // 0=RGB or Y, 1=U 2=V
\r
114 unsigned int pict_num;
\r
116 #ifdef VPE_OMX_SUPPORT
\r
118 EGLImageKHR khr_image;
\r
119 OMX_BUFFERHEADERTYPE *omx_buf;
\r
128 class VideoVPEOGL : public Video, public Thread_TYPE
\r
130 friend class AudioVPE;
\r
133 virtual ~VideoVPEOGL();
\r
135 int init(UCHAR format);
\r
138 int setFormat(UCHAR format);
\r
139 int setConnection(UCHAR connection);
\r
140 int setAspectRatio(UCHAR aspectRatio); // This one does the pin 8 scart widescreen switching
\r
141 int setMode(UCHAR mode);
\r
142 int setTVsize(UCHAR size); // Is the TV a widescreen?
\r
143 int setDefaultAspect();
\r
145 int setPosition(int x, int y);
\r
152 int unFastForward();
\r
157 int attachFrameBuffer(); // What does this do?
\r
158 ULONG timecodeToFrameNumber(ULLONG timecode);
\r
159 ULLONG getCurrentTimestamp();
\r
160 bool displayIFrame(const UCHAR* bulibaver, UINT length);
\r
162 virtual bool dtsTimefix(){return true;} //please we need dts time values
\r
164 // Writing Data to Videodevice
\r
165 virtual void PrepareMediaSample(const MediaPacketList&, UINT samplepos);
\r
166 virtual UINT DeliverMediaSample(UCHAR* bulibaver, UINT* samplepos);
\r
169 virtual bool supportsh264(){return true;};
\r
171 int WriteOutTS(const unsigned char *bulibaver,int length, int type);
\r
172 void WriteOutPATPMT();
\r
175 VPEOGLFrame *getReadyOGLFrame();
\r
176 void returnOGLFrame(VPEOGLFrame *frame);
\r
177 void recycleOGLRefFrames();
\r
179 virtual long long SetStartOffset(long long curreftime, bool *rsync);
\r
180 long long SetStartAudioOffset(long long curreftime, bool *rsync);
\r
181 virtual void ResetTimeOffsets();
\r
189 int initUsingOSDObjects();
\r
190 int shutdownUsingOSDObjects() {return shutdown();};
\r
192 #ifdef VPE_LIBAV_SUPPORT
\r
193 int getlibavDecodingMode() {return decoding_mode;};
\r
199 int EnterIframePlayback();
\r
201 bool InIframemode() {return iframemode;};
\r
203 UINT DeliverMediaPacket(MediaPacket packet,const UCHAR* bulibaver,UINT *samplepos);
\r
205 int decoding_backend; //1 omx, 2 libav, 3 omx through lib av transcoder
\r
206 #define VPE_DECODER_OMX 1
\r
207 #define VPE_DECODER_libav 2
\r
208 #define VPE_DECODER_OMX_libav_TRANSCODE 3
\r
212 bool offsetvideonotset;
\r
213 bool offsetaudionotset;
\r
214 long long startoffset;
\r
215 long long lastrefvideotime;
\r
216 long long lastrefaudiotime;
\r
217 OMX_TICKS lastreftimeOMX;
\r
218 ULLONG lastreftimePTS;
\r
220 long long playbacktimeoffset; //this is the offset between the media time and system clock
\r
221 long long pausetimecode;
\r
224 static long long GetCurrentSystemTime();
\r
225 static void WaitUntil(long long time);
\r
226 void FrameWaitforDisplay(long long pts);
\r
227 void AdjustAudioPTS(long long pts);
\r
232 #ifdef VPE_OMX_SUPPORT
\r
233 static OMX_ERRORTYPE EventHandler_OMX(OMX_IN OMX_HANDLETYPE handle,OMX_IN OMX_PTR appdata,
\r
234 OMX_IN OMX_EVENTTYPE event_type,OMX_IN OMX_U32 data1,
\r
235 OMX_IN OMX_U32 data2,OMX_IN OMX_PTR event_data);
\r
236 static OMX_ERRORTYPE EmptyBufferDone_OMX(OMX_IN OMX_HANDLETYPE hcomp,OMX_IN OMX_PTR appdata,OMX_IN OMX_BUFFERHEADERTYPE* bulibaver);
\r
237 static OMX_ERRORTYPE FillBufferDone_OMX(OMX_IN OMX_HANDLETYPE hcomp, OMX_IN OMX_PTR appdata,OMX_IN OMX_BUFFERHEADERTYPE* bulibaver);
\r
239 UINT DeliverMediaPacketOMX(MediaPacket packet,
\r
240 const UCHAR* bulibaver,
\r
243 int PrepareInputBufsOMX();
\r
244 int DestroyInputBufsOMX();
\r
246 void AddOmxEvent(VPE_OMX_EVENT new_event);
\r
247 void ReturnEmptyOMXBuffer(OMX_BUFFERHEADERTYPE* bulibaver);
\r
249 int ChangeComponentState(OMX_HANDLETYPE handle,OMX_STATETYPE type);
\r
250 int CommandFinished(OMX_HANDLETYPE handle,OMX_U32 command,OMX_U32 data2);
\r
251 int EnablePort(OMX_HANDLETYPE handle,OMX_U32 port,bool wait);
\r
252 int DisablePort(OMX_HANDLETYPE handle,OMX_U32 port,bool wait=true);
\r
255 int setClockExecutingandRunning();
\r
257 void destroyClock();
\r
259 int getClockAudioandInit(OMX_HANDLETYPE *p_omx_clock,OMX_U32 *p_omx_clock_output_port);
\r
260 int getClockVideoandInit();
\r
261 void LockClock() {clock_mutex.Lock();};
\r
262 void UnlockClock() {clock_mutex.Unlock();};
\r
264 void clockUnpause();
\r
266 Mutex clock_mutex; //clock mutex is now responsible for all omx stuff
\r
270 OMX_HANDLETYPE omx_vid_dec;
\r
271 OMX_HANDLETYPE omx_vid_sched;
\r
272 OMX_HANDLETYPE omx_vid_rend;
\r
273 OMX_HANDLETYPE omx_clock;
\r
274 int clock_references;
\r
277 OMX_U32 omx_codec_input_port;
\r
278 OMX_U32 omx_codec_output_port;
\r
279 OMX_U32 omx_rend_input_port;
\r
280 OMX_U32 omx_shed_input_port;
\r
281 OMX_U32 omx_shed_output_port;
\r
282 OMX_U32 omx_shed_clock_port;
\r
283 OMX_U32 omx_clock_output_port;
\r
284 // OMX_NALUFORMATSTYPE omx_nalu_format;
\r
288 int AllocateCodecsOMX();
\r
289 int DeAllocateCodecsOMX();
\r
291 vector<OMX_BUFFERHEADERTYPE*> input_bufs_omx_all;
\r
292 list<OMX_BUFFERHEADERTYPE*> input_bufs_omx_free;
\r
293 Mutex input_bufs_omx_mutex;
\r
294 OMX_BUFFERHEADERTYPE* cur_input_buf_omx;
\r
298 bool omx_first_frame;
\r
300 Mutex omx_event_mutex;
\r
302 list<VPE_OMX_EVENT> omx_events;
\r
312 #ifdef VPE_LIBAV_MPEG2_TRANSCODING
\r
313 list<OMX_BUFFERHEADERTYPE*> input_bufs_omx_in_libav;
\r
314 vector<transcode_pix_fmt*> pix_fmt_omx_all;
\r
315 list<transcode_pix_fmt*> pix_fmt_omx_free;
\r
317 AVCodec *transcodecodec_libav;
\r
318 AVCodecContext *transcodecodec_context_libav;
\r
319 AVFrame transcode_frame_libav;
\r
321 static enum PixelFormat get_format_transcode(struct AVCodecContext *s, const enum PixelFormat *fmt);
\r
322 static int reget_buffer_transcode(struct AVCodecContext *c, AVFrame *pic);
\r
323 static int get_buffer_transcode(struct AVCodecContext *c, AVFrame *pic);
\r
324 OMX_BUFFERHEADERTYPE* GetFreeOMXBufferforlibav(transcode_pix_fmt **pix_trans);
\r
325 static void release_buffer_transcode(struct AVCodecContext *c, AVFrame *pic);
\r
326 void ReturnEmptyOMXBuffer_libav(OMX_BUFFERHEADERTYPE* buffer,transcode_pix_fmt *pix_fmt);
\r
329 UINT DeliverMediaPacketOMXTranscode(MediaPacket packet,const UCHAR* bulibaver,UINT *samplepos);
\r
330 int DecodePacketOMXTranscode();
\r
331 int InitTranscoderLibAV();
\r
332 int DeInitTranscoderLibAV();
\r
337 #if defined(VPE_LIBAV_MPEG2_TRANSCODING) || defined(VPE_LIBAV_SUPPORT)
\r
338 AVPacket incom_packet_libav;
\r
339 int incom_packet_libav_size;
\r
344 #ifdef VPE_LIBAV_SUPPORT // this is the data for software decoding subject to removal
\r
346 const static int VPE_NO_XVMC=0;
\r
347 const static int VPE_XVMC_MOCOMP=1;
\r
348 const static int VPE_XVMC_IDCT=2;
\r
352 AVCodec *mpeg2codec_libav;
\r
353 AVCodecContext *mpeg2codec_context_libav;
\r
354 vector<AVFrame*> dec_frame_libav_all;
\r
355 list<AVFrame*> dec_frame_libav_free;
\r
356 list<AVFrame*> dec_frame_libav_upload_and_view_pending;
\r
357 list<VPE_FrameBuf*> dec_frame_libav_upload_only_pending;
\r
358 //AVFrame* dec_frame_libav_uploading;
\r
359 VPE_FrameBuf* dec_frame_libav_uploading_framebuf;
\r
360 AVFrame* dec_frame_libav_decoding;
\r
362 void add_dec_frame_upload_only(struct AVCodecContext *s,const AVFrame* data);
\r
364 vector<VPE_FrameBuf*> all_frame_bufs;
\r
365 list<VPE_FrameBuf*> free_frame_bufs;
\r
366 list<VPE_FrameBuf*> locked_libav_frame_buf;
\r
367 list<VPE_FrameBuf*> locked_uploading_frame_buf;
\r
369 VPE_FrameBuf *getFrameBuf(unsigned int *size); //for libav
\r
370 void releaseFrameBufLibav(VPE_FrameBuf*data);
\r
371 void lockFrameBufUpload(VPE_FrameBuf* data);
\r
372 void releaseFrameBufUpload(VPE_FrameBuf* data);
\r
374 unsigned int framebuf_framenum;
\r
376 Mutex vpe_framebuf_mutex;
\r
381 Mutex dec_frame_libav_mutex;
\r
385 UINT DeliverMediaPacketlibav(MediaPacket packet,const UCHAR* bulibaver,UINT *samplepos);
\r
386 int AllocateCodecsLibav();
\r
387 int DeAllocateCodecsLibav();
\r
388 int DecodePacketlibav();
\r
390 static enum PixelFormat get_format_libav(struct AVCodecContext *s, const enum PixelFormat *fmt);
\r
391 static int get_buffer_libav(struct AVCodecContext *c, AVFrame *pic);
\r
392 static int reget_buffer_libav(struct AVCodecContext *c, AVFrame *pic);
\r
393 static void release_buffer_libav(struct AVCodecContext *c, AVFrame *pic);
\r
395 static void draw_horiz_band_libav(struct AVCodecContext *s, const AVFrame *src, int offset[4], int y, int type, int height);
\r
397 bool libav_running;
\r
398 bool libav_hastime; // signals if a pts is now
\r
399 long long libav_time;
\r
401 int libavwidth,libavheight,libavpixfmt;
\r
402 GLMocoShader *moco_shader;
\r
407 vector<VPEOGLFrame*> all_ogl_frames;
\r
408 list<VPEOGLFrame*> free_ogl_frames;
\r
409 list<VPEOGLFrame*> recycle_ref_ogl_frames;
\r
410 list<VPEOGLFrame*> ready_ogl_frames;
\r
411 int ogl_forward_ref_frame_num;
\r
412 int ogl_backward_ref_frame_num;
\r
413 VPEOGLFrame* ogl_forward_ref_frame;
\r
414 VPEOGLFrame* ogl_backward_ref_frame;
\r
416 bool ogl_frame_outside;
\r
417 Mutex ogl_frame_mutex;
\r
419 int AllocateYUV400OglTexture(VPEOGLFrame* outframe,int width,int height,int stride);
\r
420 int AllocateYUV444OglTexture(VPEOGLFrame* outframe,int width,int height,int stride);
\r
423 virtual void threadMethod();
\r
424 virtual void threadPostStopCleanup();
\r
430 EGLDisplay egl_display;
\r
431 EGLSurface egl_surface;
\r
432 EGLContext egl_context;
\r
433 #ifdef BENCHMARK_FPS
\r
434 unsigned int time_in_decoder;
\r
435 unsigned int num_frames;
\r
436 unsigned int time_in_decoder_gl;
\r
437 unsigned int num_frames_gl;
\r
441 MediaPacket mediapacket;
\r