1 | Description: Replace deprecated FFmpeg API |
---|
2 | Author: Andreas Cadhalpun <Andreas.Cadhalpun@googlemail.com> |
---|
3 | Last-Update: <2015-11-02> |
---|
4 | |
---|
5 | --- src/osgPlugins/ffmpeg/FFmpegDecoderVideo.cpp.orig |
---|
6 | +++ src/osgPlugins/ffmpeg/FFmpegDecoderVideo.cpp |
---|
7 | @@ -71,7 +71,7 @@ void FFmpegDecoderVideo::open(AVStream * |
---|
8 | findAspectRatio(); |
---|
9 | |
---|
10 | // Find out whether we support Alpha channel |
---|
11 | - m_alpha_channel = (m_context->pix_fmt == PIX_FMT_YUVA420P); |
---|
12 | + m_alpha_channel = (m_context->pix_fmt == AV_PIX_FMT_YUVA420P); |
---|
13 | |
---|
14 | // Find out the framerate |
---|
15 | m_frame_rate = av_q2d(stream->avg_frame_rate); |
---|
16 | @@ -91,20 +91,19 @@ void FFmpegDecoderVideo::open(AVStream * |
---|
17 | throw std::runtime_error("avcodec_open() failed"); |
---|
18 | |
---|
19 | // Allocate video frame |
---|
20 | - m_frame.reset(avcodec_alloc_frame()); |
---|
21 | + m_frame.reset(av_frame_alloc()); |
---|
22 | |
---|
23 | // Allocate converted RGB frame |
---|
24 | - m_frame_rgba.reset(avcodec_alloc_frame()); |
---|
25 | - m_buffer_rgba[0].resize(avpicture_get_size(PIX_FMT_RGB24, width(), height())); |
---|
26 | + m_frame_rgba.reset(av_frame_alloc()); |
---|
27 | + m_buffer_rgba[0].resize(avpicture_get_size(AV_PIX_FMT_RGB24, width(), height())); |
---|
28 | m_buffer_rgba[1].resize(m_buffer_rgba[0].size()); |
---|
29 | |
---|
30 | // Assign appropriate parts of the buffer to image planes in m_frame_rgba |
---|
31 | - avpicture_fill((AVPicture *) (m_frame_rgba).get(), &(m_buffer_rgba[0])[0], PIX_FMT_RGB24, width(), height()); |
---|
32 | + avpicture_fill((AVPicture *) (m_frame_rgba).get(), &(m_buffer_rgba[0])[0], AV_PIX_FMT_RGB24, width(), height()); |
---|
33 | |
---|
34 | // Override get_buffer()/release_buffer() from codec context in order to retrieve the PTS of each frame. |
---|
35 | m_context->opaque = this; |
---|
36 | - m_context->get_buffer = getBuffer; |
---|
37 | - m_context->release_buffer = releaseBuffer; |
---|
38 | + m_context->get_buffer2 = getBuffer; |
---|
39 | } |
---|
40 | |
---|
41 | |
---|
42 | @@ -263,8 +262,8 @@ int FFmpegDecoderVideo::convert(AVPictur |
---|
43 | #ifdef USE_SWSCALE |
---|
44 | if (m_swscale_ctx==0) |
---|
45 | { |
---|
46 | - m_swscale_ctx = sws_getContext(src_width, src_height, (PixelFormat) src_pix_fmt, |
---|
47 | - src_width, src_height, (PixelFormat) dst_pix_fmt, |
---|
48 | + m_swscale_ctx = sws_getContext(src_width, src_height, (AVPixelFormat) src_pix_fmt, |
---|
49 | + src_width, src_height, (AVPixelFormat) dst_pix_fmt, |
---|
50 | /*SWS_BILINEAR*/ SWS_BICUBIC, NULL, NULL, NULL); |
---|
51 | } |
---|
52 | |
---|
53 | @@ -311,14 +310,14 @@ void FFmpegDecoderVideo::publishFrame(co |
---|
54 | AVPicture * const dst = (AVPicture *) m_frame_rgba.get(); |
---|
55 | |
---|
56 | // Assign appropriate parts of the buffer to image planes in m_frame_rgba |
---|
57 | - avpicture_fill((AVPicture *) (m_frame_rgba).get(), &(m_buffer_rgba[m_writeBuffer])[0], PIX_FMT_RGB24, width(), height()); |
---|
58 | + avpicture_fill((AVPicture *) (m_frame_rgba).get(), &(m_buffer_rgba[m_writeBuffer])[0], AV_PIX_FMT_RGB24, width(), height()); |
---|
59 | |
---|
60 | // Convert YUVA420p (i.e. YUV420p plus alpha channel) using our own routine |
---|
61 | |
---|
62 | - if (m_context->pix_fmt == PIX_FMT_YUVA420P) |
---|
63 | + if (m_context->pix_fmt == AV_PIX_FMT_YUVA420P) |
---|
64 | yuva420pToRgba(dst, src, width(), height()); |
---|
65 | else |
---|
66 | - convert(dst, PIX_FMT_RGB24, src, m_context->pix_fmt, width(), height()); |
---|
67 | + convert(dst, AV_PIX_FMT_RGB24, src, m_context->pix_fmt, width(), height()); |
---|
68 | |
---|
69 | // Wait 'delay' seconds before publishing the picture. |
---|
70 | int i_delay = static_cast<int>(delay * 1000000 + 0.5); |
---|
71 | @@ -345,7 +344,7 @@ void FFmpegDecoderVideo::publishFrame(co |
---|
72 | |
---|
73 | void FFmpegDecoderVideo::yuva420pToRgba(AVPicture * const dst, AVPicture * const src, int width, int height) |
---|
74 | { |
---|
75 | - convert(dst, PIX_FMT_RGB24, src, m_context->pix_fmt, width, height); |
---|
76 | + convert(dst, AV_PIX_FMT_RGB24, src, m_context->pix_fmt, width, height); |
---|
77 | |
---|
78 | const size_t bpp = 4; |
---|
79 | |
---|
80 | @@ -363,31 +362,28 @@ void FFmpegDecoderVideo::yuva420pToRgba( |
---|
81 | } |
---|
82 | } |
---|
83 | |
---|
84 | - |
---|
85 | - |
---|
86 | -int FFmpegDecoderVideo::getBuffer(AVCodecContext * const context, AVFrame * const picture) |
---|
87 | +int FFmpegDecoderVideo::getBuffer(AVCodecContext * const context, AVFrame * const picture, int flags) |
---|
88 | { |
---|
89 | + AVBufferRef *ref; |
---|
90 | const FFmpegDecoderVideo * const this_ = reinterpret_cast<const FFmpegDecoderVideo*>(context->opaque); |
---|
91 | |
---|
92 | - const int result = avcodec_default_get_buffer(context, picture); |
---|
93 | + const int result = avcodec_default_get_buffer2(context, picture, flags); |
---|
94 | int64_t * p_pts = reinterpret_cast<int64_t*>( av_malloc(sizeof(int64_t)) ); |
---|
95 | |
---|
96 | *p_pts = this_->m_packet_pts; |
---|
97 | picture->opaque = p_pts; |
---|
98 | |
---|
99 | + ref = av_buffer_create((uint8_t *)picture->opaque, sizeof(int64_t), FFmpegDecoderVideo::freeBuffer, picture->buf[0], flags); |
---|
100 | + picture->buf[0] = ref; |
---|
101 | + |
---|
102 | return result; |
---|
103 | } |
---|
104 | |
---|
105 | - |
---|
106 | - |
---|
107 | -void FFmpegDecoderVideo::releaseBuffer(AVCodecContext * const context, AVFrame * const picture) |
---|
108 | +void FFmpegDecoderVideo::freeBuffer(void *opaque, uint8_t *data) |
---|
109 | { |
---|
110 | - if (picture != 0) |
---|
111 | - av_freep(&picture->opaque); |
---|
112 | - |
---|
113 | - avcodec_default_release_buffer(context, picture); |
---|
114 | + AVBufferRef *ref = (AVBufferRef *)opaque; |
---|
115 | + av_buffer_unref(&ref); |
---|
116 | + av_free(data); |
---|
117 | } |
---|
118 | |
---|
119 | - |
---|
120 | - |
---|
121 | } // namespace osgFFmpeg |
---|
122 | --- src/osgPlugins/ffmpeg/FFmpegDecoderVideo.hpp.orig |
---|
123 | +++ src/osgPlugins/ffmpeg/FFmpegDecoderVideo.hpp |
---|
124 | @@ -94,8 +94,8 @@ private: |
---|
125 | int src_pix_fmt, int src_width, int src_height); |
---|
126 | |
---|
127 | |
---|
128 | - static int getBuffer(AVCodecContext * context, AVFrame * picture); |
---|
129 | - static void releaseBuffer(AVCodecContext * context, AVFrame * picture); |
---|
130 | + static int getBuffer(AVCodecContext * context, AVFrame * picture, int flags); |
---|
131 | + static void freeBuffer(void * opaque, uint8_t *data); |
---|
132 | |
---|
133 | PacketQueue & m_packets; |
---|
134 | FFmpegClocks & m_clocks; |
---|
135 | --- src/osgPlugins/ffmpeg/FFmpegParameters.cpp.orig |
---|
136 | +++ src/osgPlugins/ffmpeg/FFmpegParameters.cpp |
---|
137 | @@ -19,7 +19,7 @@ extern "C" |
---|
138 | #include <libavutil/pixdesc.h> |
---|
139 | } |
---|
140 | |
---|
141 | -inline PixelFormat osg_av_get_pix_fmt(const char *name) { return av_get_pix_fmt(name); } |
---|
142 | +inline AVPixelFormat osg_av_get_pix_fmt(const char *name) { return av_get_pix_fmt(name); } |
---|
143 | |
---|
144 | |
---|
145 | namespace osgFFmpeg { |
---|
146 | --- src/osgPlugins/ffmpeg/FFmpegDecoderAudio.cpp.orig 2016-02-18 21:25:39.627923629 +0000 |
---|
147 | +++ src/osgPlugins/ffmpeg/FFmpegDecoderAudio.cpp 2016-02-18 21:26:17.071140100 +0000 |
---|
148 | @@ -227,8 +227,7 @@ |
---|
149 | if (avcodec_open2(m_context, p_codec, NULL) < 0) |
---|
150 | throw std::runtime_error("avcodec_open() failed"); |
---|
151 | |
---|
152 | - m_context->get_buffer = avcodec_default_get_buffer; |
---|
153 | - m_context->release_buffer = avcodec_default_release_buffer; |
---|
154 | + m_context->get_buffer2 = avcodec_default_get_buffer2; |
---|
155 | |
---|
156 | } |
---|
157 | |
---|