3
3
#include < AvTranscoder/util.hpp>
4
4
5
5
#include < stdexcept>
6
+ #include < libavutil/channel_layout.h>
6
7
7
8
#ifndef FF_INPUT_BUFFER_PADDING_SIZE
8
9
#define FF_INPUT_BUFFER_PADDING_SIZE 16
@@ -35,31 +36,29 @@ IOutputStream& OutputFile::addVideoStream(const VideoCodec& videoDesc)
35
36
{
36
37
AVStream& stream = _formatContext.addAVStream (videoDesc.getAVCodec ());
37
38
38
- stream.codec ->width = videoDesc.getAVCodecContext ().width ;
39
- stream.codec ->height = videoDesc.getAVCodecContext ().height ;
40
- stream.codec ->bit_rate = videoDesc.getAVCodecContext ().bit_rate ;
41
- stream.codec -> pix_fmt = videoDesc.getAVCodecContext ().pix_fmt ;
42
- stream.codec ->profile = videoDesc.getAVCodecContext ().profile ;
43
- stream.codec ->level = videoDesc.getAVCodecContext ().level ;
44
- stream.codec ->field_order = videoDesc.getAVCodecContext ().field_order ;
39
+ stream.codecpar ->width = videoDesc.getAVCodecContext ().width ;
40
+ stream.codecpar ->height = videoDesc.getAVCodecContext ().height ;
41
+ stream.codecpar ->bit_rate = videoDesc.getAVCodecContext ().bit_rate ;
42
+ stream.codecpar -> format = videoDesc.getAVCodecContext ().pix_fmt ;
43
+ stream.codecpar ->profile = videoDesc.getAVCodecContext ().profile ;
44
+ stream.codecpar ->level = videoDesc.getAVCodecContext ().level ;
45
+ stream.codecpar ->field_order = videoDesc.getAVCodecContext ().field_order ;
45
46
46
- stream.codec -> colorspace = videoDesc.getAVCodecContext ().colorspace ;
47
- stream.codec ->color_primaries = videoDesc.getAVCodecContext ().color_primaries ;
48
- stream.codec ->color_range = videoDesc.getAVCodecContext ().color_range ;
49
- stream.codec ->color_trc = videoDesc.getAVCodecContext ().color_trc ;
50
- stream.codec -> chroma_sample_location = videoDesc.getAVCodecContext ().chroma_sample_location ;
47
+ stream.codecpar -> color_space = videoDesc.getAVCodecContext ().colorspace ;
48
+ stream.codecpar ->color_primaries = videoDesc.getAVCodecContext ().color_primaries ;
49
+ stream.codecpar ->color_range = videoDesc.getAVCodecContext ().color_range ;
50
+ stream.codecpar ->color_trc = videoDesc.getAVCodecContext ().color_trc ;
51
+ stream.codecpar -> chroma_location = videoDesc.getAVCodecContext ().chroma_sample_location ;
51
52
52
53
setOutputStream (stream, videoDesc);
53
54
54
55
// need to set the time_base on the AVCodecContext and the AVStream
55
56
// compensating the frame rate with the ticks_per_frame and keeping
56
57
// a coherent reading speed.
57
- av_reduce (&stream.codec -> time_base .num , &stream.codec -> time_base .den ,
58
+ av_reduce (&stream.time_base .num , &stream.time_base .den ,
58
59
videoDesc.getAVCodecContext ().time_base .num * videoDesc.getAVCodecContext ().ticks_per_frame ,
59
60
videoDesc.getAVCodecContext ().time_base .den , INT_MAX);
60
61
61
- stream.time_base = stream.codec ->time_base ;
62
-
63
62
OutputStream* outputStream = new OutputStream (*this , _formatContext.getNbStreams () - 1 );
64
63
_outputStreams.push_back (outputStream);
65
64
@@ -70,16 +69,16 @@ IOutputStream& OutputFile::addAudioStream(const AudioCodec& audioDesc)
70
69
{
71
70
AVStream& stream = _formatContext.addAVStream (audioDesc.getAVCodec ());
72
71
73
- stream.codec ->sample_rate = audioDesc.getAVCodecContext ().sample_rate ;
74
- stream.codec ->channels = audioDesc.getAVCodecContext ().channels ;
75
- stream.codec ->channel_layout = audioDesc.getAVCodecContext ().channel_layout ;
76
- stream.codec -> sample_fmt = audioDesc.getAVCodecContext ().sample_fmt ;
77
- stream.codec ->frame_size = audioDesc.getAVCodecContext ().frame_size ;
72
+ stream.codecpar ->sample_rate = audioDesc.getAVCodecContext ().sample_rate ;
73
+ stream.codecpar ->channels = audioDesc.getAVCodecContext ().channels ;
74
+ stream.codecpar ->channel_layout = audioDesc.getAVCodecContext ().channel_layout ;
75
+ stream.codecpar -> format = audioDesc.getAVCodecContext ().sample_fmt ;
76
+ stream.codecpar ->frame_size = audioDesc.getAVCodecContext ().frame_size ;
78
77
79
78
setOutputStream (stream, audioDesc);
80
79
81
80
// need to set the time_base on the AVCodecContext of the AVStream
82
- av_reduce (&stream.codec -> time_base .num , &stream.codec -> time_base .den , audioDesc.getAVCodecContext ().time_base .num ,
81
+ av_reduce (&stream.time_base .num , &stream.time_base .den , audioDesc.getAVCodecContext ().time_base .num ,
83
82
audioDesc.getAVCodecContext ().time_base .den , INT_MAX);
84
83
85
84
OutputStream* outputStream = new OutputStream (*this , _formatContext.getNbStreams () - 1 );
@@ -92,14 +91,14 @@ IOutputStream& OutputFile::addCustomStream(const ICodec& iCodecDesc)
92
91
{
93
92
AVStream& stream = _formatContext.addAVStream (iCodecDesc.getAVCodec ());
94
93
95
- stream.codec ->sample_rate = 48000 ;
96
- stream.codec ->channels = 1 ;
97
- stream.codec ->channel_layout = AV_CH_LAYOUT_MONO;
98
- stream.codec -> sample_fmt = AV_SAMPLE_FMT_S32;
99
- stream.codec ->frame_size = 1920 ;
94
+ stream.codecpar ->sample_rate = 48000 ;
95
+ stream.codecpar ->channels = 1 ;
96
+ stream.codecpar ->channel_layout = AV_CH_LAYOUT_MONO;
97
+ stream.codecpar -> format = AV_SAMPLE_FMT_S32;
98
+ stream.codecpar ->frame_size = 1920 ;
100
99
101
100
// need to set the time_base on the AVCodecContext of the AVStream
102
- av_reduce (&stream.codec -> time_base .num , &stream.codec -> time_base .den , 1 , 1 , INT_MAX);
101
+ av_reduce (&stream.time_base .num , &stream.time_base .den , 1 , 1 , INT_MAX);
103
102
104
103
OutputStream* outputStream = new OutputStream (*this , _formatContext.getNbStreams () - 1 );
105
104
_outputStreams.push_back (outputStream);
@@ -136,7 +135,7 @@ IOutputStream& OutputFile::getStream(const size_t streamIndex)
136
135
137
136
std::string OutputFile::getFilename () const
138
137
{
139
- return std::string (_formatContext.getAVFormatContext ().filename );
138
+ return std::string (_formatContext.getAVFormatContext ().url );
140
139
}
141
140
142
141
std::string OutputFile::getFormatName () const
@@ -194,8 +193,7 @@ IOutputStream::EWrappingStatus OutputFile::wrap(const CodedData& data, const siz
194
193
<< _frameCount.at (streamIndex) << " )" )
195
194
196
195
// Packet to wrap
197
- AVPacket packet;
198
- av_init_packet (&packet);
196
+ AVPacket packet = *av_packet_alloc ();
199
197
packet.stream_index = streamIndex;
200
198
packet.data = (uint8_t *)data.getData ();
201
199
packet.size = data.getSize ();
@@ -348,6 +346,7 @@ void OutputFile::setupRemainingWrappingOptions()
348
346
349
347
void OutputFile::setOutputStream (AVStream& avStream, const ICodec& codec)
350
348
{
349
+ #if LIBAVCODEC_VERSION_MAJOR < 59
351
350
// depending on the format, place global headers in extradata instead of every keyframe
352
351
if (_formatContext.getAVOutputFormat ().flags & AVFMT_GLOBALHEADER)
353
352
{
@@ -368,13 +367,14 @@ void OutputFile::setOutputStream(AVStream& avStream, const ICodec& codec)
368
367
LOG_WARN (" This codec is considered experimental by libav/ffmpeg:" << codec.getCodecName ());
369
368
avStream.codec ->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL;
370
369
}
370
+ #endif
371
371
372
372
// some codecs need/can use extradata to decode
373
373
uint8_t * srcExtradata = codec.getAVCodecContext ().extradata ;
374
374
const int srcExtradataSize = codec.getAVCodecContext ().extradata_size ;
375
- avStream.codec ->extradata = (uint8_t *)av_malloc (srcExtradataSize + FF_INPUT_BUFFER_PADDING_SIZE);
376
- memcpy (avStream.codec ->extradata , srcExtradata, srcExtradataSize);
377
- memset (((uint8_t *)avStream.codec ->extradata ) + srcExtradataSize, 0 , FF_INPUT_BUFFER_PADDING_SIZE);
378
- avStream.codec ->extradata_size = codec.getAVCodecContext ().extradata_size ;
375
+ avStream.codecpar ->extradata = (uint8_t *)av_malloc (srcExtradataSize + FF_INPUT_BUFFER_PADDING_SIZE);
376
+ memcpy (avStream.codecpar ->extradata , srcExtradata, srcExtradataSize);
377
+ memset (((uint8_t *)avStream.codecpar ->extradata ) + srcExtradataSize, 0 , FF_INPUT_BUFFER_PADDING_SIZE);
378
+ avStream.codecpar ->extradata_size = codec.getAVCodecContext ().extradata_size ;
379
379
}
380
380
}
0 commit comments