文档库 最新最全的文档下载
当前位置:文档库 › ffmpeg视频格式转换代码

ffmpeg视频格式转换代码

int main(int argc, char **argv)
{
char *filename = "d:\\1.asf";
char *outfilename = "d:\\test.mp4";

AVFormatContext *pFormatCtxDec, *pFormatCtxEnc;
AVCodecContext *pCodecCtxDec, *pCodecCtxEnc;
AVCodec *pCodecDec, *pCodecEnc;
AVFrame *pFrameDec, *pFrameEnc;
AVOutputFormat *pOutputFormat;
AVStream *video_st;

int i, videoStream;
int outbuf_size;
uint8_t *outbuf;

AVPacket packet;
int frameFinished,frames=0;
int out_size;

// Register all formats and codecs
av_register_all();

// Open video file
if(av_open_input_file(&pFormatCtxDec, filename, NULL, 0, NULL)!=0)
return ; // Couldn't open file

// Retrieve stream information
if(av_find_stream_info(pFormatCtxDec)<0)
return ; // Couldn't find stream information

// Find the first video stream
videoStream=-1;
for(i=0; inb_streams; i++) {
if(pFormatCtxDec->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO)
{
videoStream=i;
break;
}
}
if(videoStream==-1)
return ; // Didn't find a video stream

// Get a pointer to the codec context for the video stream
pCodecCtxDec=pFormatCtxDec->streams[videoStream]->codec;

// Find the decoder for the video stream
pCodecDec=avcodec_find_decoder(pCodecCtxDec->codec_id);
if(pCodecDec==NULL)
return ; // Codec not found

// Inform the codec that we can handle truncated bitstreams -- i.e.,
// bitstreams where frame boundaries can fall in the middle of packets
//if(pCodecDec->capabilities & CODEC_CAP_TRUNCATED)
// pCodecCtxDec->flags|=CODEC_FLAG_TRUNCATED;

// Open codec
if(avcodec_open(pCodecCtxDec, pCodecDec)<0)
return ; // Could not open codec

// Allocate video frame
pFrameDec = avcodec_alloc_frame();
if(pFrameDec == NULL)
return ;


// auto detect the output format from the name. default is mpeg.
pOutputFormat = guess_format(NULL, outfilename, NULL);
if (pOutputFormat == NULL)
return;

// allocate the output media context
pFormatCtxEnc = av_alloc_format_context();
if (pFormatCtxEnc == NULL)
return;
pFormatCtxEnc->oformat = pOutputFormat;
sprintf(pFormatCtxEnc->filename, "%s", outfilename);

video_st = av_new_stream(pFormatCtxEnc, 0); // 0 for video

pCodecCtxEnc = video_st->codec;

pCodecCtxEnc->codec_id = pOutputFormat->video_codec;
pCodecCtxEnc->codec_type = CODEC_TYPE_VIDEO;

// put sample parameters
pCodecCtxEnc->bit_rate = 200000;
// resolution must be a multiple of two
pCodecCtxEnc->width = pCodecCtxDec->width;
pCodecCtxEnc->height = pCodecCtxDec->height;
// frames per second
pCodecCtxEnc->time_base.den = 25;
pCodecCtxEnc->time_base.num = 1;
pCodecCtxEnc->pix_fmt = PIX_FMT_YUV420P;
pCodecCtxEnc->gop_size = 12; /* emit one intra frame every ten frames */

if (pCodecCtxEnc->codec_id == CODEC_ID_MPEG1VIDEO){
/* needed to avoid using macroblocks in which some coeffs overflow
this doesnt happen with normal video, it just happens here as

the
motion of the chroma plane doesnt match the luma plane */
pCodecCtxEnc->mb_decision=2;
}
// some formats want stream headers to be seperate
if(!strcmp(pFormatCtxEnc->oformat->name, "mp4") || !strcmp(pFormatCtxEnc->oformat->name, "mov") || !strcmp(pFormatCtxEnc->oformat->name, "3gp"))
pCodecCtxEnc->flags |= CODEC_FLAG_GLOBAL_HEADER;

// set the output parameters (must be done even if no parameters).
if (av_set_parameters(pFormatCtxEnc, NULL) < 0) {
return;
}

// find the video encoder
pCodecEnc = avcodec_find_encoder(pCodecCtxEnc->codec_id);
if (pCodecEnc == NULL)
return;

// open it
if (avcodec_open(pCodecCtxEnc, pCodecEnc) < 0) {
return;
}

if (!(pFormatCtxEnc->oformat->flags & AVFMT_RAWPICTURE)) {
/* allocate output buffer */
/* XXX: API change will be done */
outbuf_size = 500000;
outbuf = av_malloc(outbuf_size);
}

pFrameEnc= avcodec_alloc_frame();

// open the output file, if needed
if (!(pOutputFormat->flags & AVFMT_NOFILE)) {
if (url_fopen(&pFormatCtxEnc->pb, outfilename, URL_WRONLY) < 0) {
fprintf(stderr, "Could not open '%s'\n", filename);
return;
}
}

// write the stream header, if any
av_write_header(pFormatCtxEnc);

// Read frames and save frames to disk

while(av_read_frame(pFormatCtxDec, &packet)>=0)
{
// Is this a packet from the video stream?
if(packet.stream_index==videoStream)
{
// Decode video frame
avcodec_decode_video(pCodecCtxDec, pFrameDec, &frameFinished, packet.data, packet.size);

// Did we get a video frame?
if(frameFinished)
{
frames++;

pFrameEnc->data[0] = pFrameDec->data[0];
pFrameEnc->data[1] = pFrameDec->data[1];
pFrameEnc->data[2] = pFrameDec->data[2];
pFrameEnc->linesize[0] = pFrameDec->linesize[0];
pFrameEnc->linesize[1] = pFrameDec->linesize[1];
pFrameEnc->linesize[2] = pFrameDec->linesize[2];

if (pFormatCtxEnc->oformat->flags & AVFMT_RAWPICTURE) {
/* raw video case. The API will change slightly in the near
futur for that */
AVPacket pkt;
av_init_packet(&pkt);

pkt.flags |= PKT_FLAG_KEY;
pkt.stream_index= video_st->index;
pkt.data= (uint8_t *)pFrameEnc;
pkt.size= sizeof(AVPicture);

av_write_frame(pFormatCtxEnc, &pkt);
} else {
// encode the image
out_size = avcodec_encode_video(pCodecCtxEnc, outbuf, outbuf_size, pFrameEnc);
// if zero size, it means the image was buffered
if (out_size != 0) {
AVPacket pkt;
av_init_packet(&pkt);

pkt.pts= pCodecCtxEnc->coded_frame->pts;
if(pCodecCtxEnc->coded_frame->key_frame)
pkt.flags |= PKT_FLAG_KEY;
pkt.stream_index= video_st->index;
pkt.data= outbuf;
pkt.size= out_size;

// write the compressed frame in the media file

av_write_frame(pFormatCtxEnc, &pkt);
}
}
}
}
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
}

// get the delayed frames
for(; out_size; i++) {
out_size = avcodec_encode_video(pCodecCtxEnc, outbuf, outbuf_size, NULL);
if (out_size != 0) {
AVPacket pkt;
av_init_packet(&pkt);

pkt.pts= pCodecCtxEnc->coded_frame->pts;
if(pCodecCtxEnc->coded_frame->key_frame)
pkt.flags |= PKT_FLAG_KEY;
pkt.stream_index= video_st->index;
pkt.data= outbuf;
pkt.size= out_size;

// write the compressed frame in the media file
av_write_frame(pFormatCtxEnc, &pkt);
}
}

// Close the codec
avcodec_close(pCodecCtxDec);
avcodec_close(pCodecCtxEnc);

// Free the YUV frame
av_free(pFrameDec);
av_free(pFrameEnc);

av_free(outbuf);

// write the trailer, if any
av_write_trailer(pFormatCtxEnc);

// free the streams
for(i = 0; i < pFormatCtxEnc->nb_streams; i++) {
av_freep(&pFormatCtxEnc->streams[i]->codec);
av_freep(&pFormatCtxEnc->streams[i]);
}

if (!(pOutputFormat->flags & AVFMT_NOFILE)) {
/* close the output file */
url_fclose(&pFormatCtxEnc->pb);
}

/* free the stream */
av_free(pFormatCtxEnc);

// Close the video file
av_close_input_file(pFormatCtxDec);

av_free_static();
}

相关文档