FFMPEG 图像拉伸缩放及数据格式转换
本文转载自http://blog.****.net/li_wen01/article/details/65445743
在做摄像头数据采集处理的时候,经常会遇到摄像头输入数据格式的不同,导致编码的时候需要再进行一次数据装换。在X264的编码中,是需要将YUV的Y,U,V 三个分量的数据独立出来。遇到一些YUV交叉分布的数据就比较的麻烦,需要自己设计算法对数据进行分离。但是在FFMPEG中,它就已经包含了图像格式转换的接口,同时它还带有图像拉伸缩放的功能。下面代码实现了任意数据格式的转换和任意尺寸的缩放。
- /*=============================================================================
- # FileName: scaling_image.c
- # Desc: an example of ffmpeg scale image
- # Author: licaibiao
- # LastChange: 2017-03-23
- =============================================================================*/
- #include <libavutil/imgutils.h>
- #include <libswscale/swscale.h>
- int main(int argc, charchar **argv)
- {
- enum AVPixelFormat src_pix_fmt = AV_PIX_FMT_YUYV422;
- enum AVPixelFormat dst_pix_fmt = AV_PIX_FMT_YUV420P;
- struct SwsContext *sws_ctx;
- const charchar *dst_filename = NULL;
- const charchar *src_filename = NULL;
- const charchar *dst_size = NULL;
- const charchar *src_size = NULL;
- uint8_t *src_data[4];
- uint8_t *dst_data[4];
- int src_linesize[4];
- int dst_linesize[4];
- int src_bufsize;
- int dst_bufsize;
- int src_w ;
- int src_h ;
- int dst_w ;
- int dst_h ;
- int i;
- int ret;
- FILEFILE *src_file;
- FILEFILE *dst_file;
- if (argc != 5) {
- fprintf(stderr, "Usage: %s input_file input_size(w x h) output_file output_size(w x h)\n"
- "API example program to show how to scale an image with libswscale.\n"
- "This program generates a pictures, rescales them to the given "
- "output_size and saves them to an output file named output_file\n."
- "\n", argv[0]);
- exit(1);
- }
- /*source file parse*/
- src_filename = argv[1];
- src_size = argv[2];
- if (av_parse_video_size(&src_w, &src_h, src_size) < 0) {
- fprintf(stderr,
- "Invalid size '%s', must be in the form WxH or a valid size abbreviation\n",
- dst_size);
- exit(1);
- }
- src_file = fopen(src_filename, "rb");
- if (!src_file) {
- fprintf(stderr, "Could not open source file %s\n", dst_filename);
- exit(1);
- }
- /*destination file parse*/
- dst_filename = argv[3];
- dst_size = argv[4];
- if (av_parse_video_size(&dst_w, &dst_h, dst_size) < 0) {
- fprintf(stderr,
- "Invalid size '%s', must be in the form WxH or a valid size abbreviation\n",
- dst_size);
- exit(1);
- }
- dst_file = fopen(dst_filename, "wb");
- if (!dst_file) {
- fprintf(stderr, "Could not open destination file %s\n", dst_filename);
- exit(1);
- }
- /* create scaling context */
- sws_ctx = sws_getContext(src_w, src_h, src_pix_fmt, dst_w, dst_h, dst_pix_fmt,
- SWS_BILINEAR, NULL, NULL, NULL);
- if (!sws_ctx) {
- fprintf(stderr,
- "Impossible to create scale context for the conversion "
- "fmt:%s s:%dx%d -> fmt:%s s:%dx%d\n",
- av_get_pix_fmt_name(src_pix_fmt), src_w, src_h,
- av_get_pix_fmt_name(dst_pix_fmt), dst_w, dst_h);
- ret = AVERROR(EINVAL);
- goto end;
- }
- /* allocate source and destination image buffers */
- if ((ret = av_image_alloc(src_data, src_linesize, src_w, src_h, src_pix_fmt, 16)) < 0) {
- fprintf(stderr, "Could not allocate source image\n");
- goto end;
- }
- src_bufsize = ret;
- /* buffer is going to be written to rawvideo file, no alignment */
- if ((ret = av_image_alloc(dst_data, dst_linesize, dst_w, dst_h, dst_pix_fmt, 1)) < 0) {
- fprintf(stderr, "Could not allocate destination image\n");
- goto end;
- }
- dst_bufsize = ret;
- fread(src_data[0], 1, src_bufsize, src_file);
- sws_scale(sws_ctx, (const uint8_t * const*)src_data, src_linesize, 0, src_h, dst_data, dst_linesize);
- fprintf(stderr, "Scaling succeeded. Play the output file with the command:\n"
- "ffplay -f rawvideo -pix_fmt %s -video_size %dx%d %s\n",
- av_get_pix_fmt_name(dst_pix_fmt), dst_w, dst_h, dst_filename);
- /* write scaled image to file */
- fwrite(dst_data[0], 1, dst_bufsize, dst_file);
- end:
- fclose(dst_file);
- fclose(src_file);
- av_freep(&src_data[0]);
- av_freep(&dst_data[0]);
- sws_freeContext(sws_ctx);
- return ret < 0;
- }
- OUT_APP = test
- INCLUDE_PATH = /usr/local/include/
- INCLUDE = -I$(INCLUDE_PATH)libavutil/ -I$(INCLUDE_PATH)libavdevice/ \
- -I$(INCLUDE_PATH)libavcodec/ -I$(INCLUDE_PATH)libswresample \
- -I$(INCLUDE_PATH)libavfilter/ -I$(INCLUDE_PATH)libavformat \
- -I$(INCLUDE_PATH)libswscale/
- FFMPEG_LIBS = -lavformat -lavutil -lavdevice -lavcodec -lswresample -lavfilter -lswscale
- SDL_LIBS =
- LIBS = $(FFMPEG_LIBS)$(SDL_LIBS)
- COMPILE_OPTS = $(INCLUDE)
- C = c
- OBJ = o
- C_COMPILER = cc
- C_FLAGS = $(COMPILE_OPTS) $(CPPFLAGS) $(CFLAGS)
- LINK = cc -o
- LINK_OPTS = -lz -lm -lpthread
- LINK_OBJ = read_device.o
- .$(C).$(OBJ):
- $(C_COMPILER) -c -g $(C_FLAGS) $<
- $(OUT_APP): $(LINK_OBJ)
- $(LINK)[email protected] $(LINK_OBJ) $(LIBS) $(LINK_OPTS)
- clean:
- -rm -rf *.$(OBJ) $(OUT_APP) core *.core *~ *.jpeg
- licaibiao@ubuntu:~/test/FFMPEG/examples$ ./test yuyv_320_240 320x240 out640_480 640x480
- Scaling succeeded. Play the output file with the command:
- ffplay -f rawvideo -pix_fmt yuv420p -video_size 640x480 out640_480
- licaibiao@ubuntu:~/test/FFMPEG/examples$
运行时输入的参数为:输入文件,,输入文件的尺寸,输出文件名,输出文件尺寸。
需要注意的是: 输入文件格式要与程序中src_pix_fmt 设置一致,输出数据格式由dst_pix_fmt 设置。
原始输入数据yuyv_320_240
拉伸后的输出数据:out640_480
博客显示的问题看上去输入与输出图像差不多大,实际图片输入是输出的1/4大小。