
Ich verwende derzeit ffmpeg 6.0, um H264-Raw-Streams in die Formate MP4, FLV oder RTMP zu kapseln. Wenn die Bitrate 1024 kb/s überschreitet, tritt am unteren Rand des endgültigen Bildes ein Mosaikphänomen auf. Ich vermute, dass ffmpeg einige Pufferparametereinstellungen hat, die dazu führen, dass das Bild abgeschnitten wird. Der Code, den ich geschrieben habe, lautet wie folgt:
avformat_alloc_output_context2(&m_outputFormatContext, NULL, "flv", m_outputUrl.c_str()); const AVOutputFormat* ofmt = m_outputFormatContext->oformat;
AVStream* outStream = avformat_new_stream(m_outputFormatContext, NULL);
outStream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
outStream->codecpar->codec_id = AV_CODEC_ID_H264;
outStream->codecpar->width = track.width;
outStream->codecpar->height = track.height;
outStream->codecpar->profile = track.profile;
outStream->codecpar->codec_tag = 0;
outStream->codecpar->level = track.level;
if(track.profile<100){
outStream->codecpar->format = AV_PIX_FMT_YUV420P;
}else{
outStream->codecpar->format = AV_PIX_FMT_YUV444P;
}
int spsPpsLen = track.params1.size() + track.params2.size();
outStream->codecpar->extradata_size = spsPpsLen;
outStream->codecpar->extradata = (uint8_t*)av_malloc(spsPpsLen + AV_INPUT_BUFFER_PADDING_SIZE);
memcpy(outStream->codecpar->extradata, track.params1.data(), track.params1.size());
memcpy(outStream->codecpar->extradata+ track.params1.size(), track.params2.data(), track.params2.size());
AVDictionary *formatOpts = NULL;
av_dict_set(&formatOpts, "rw_timeout", "500000", 0);
ret = avio_open2(&m_outputFormatContext->pb, m_outputUrl.c_str() , AVIO_FLAG_WRITE , NULL , &formatOpts);
ret = avformat_write_header(m_outputFormatContext, NULL);
av_interleaved_write_frame(m_outputFormatContext, packet);//write video packet