Browse Source

增加字幕

master
JackLee_CN 1 year ago
parent
commit
86168628c0
  1. 2
      CMakeLists.txt
  2. 371
      MediaAVInfo.cpp
  3. 43
      MediaAVInfo.h
  4. 11
      MediaFFex.h
  5. 127
      SDL2RenderWidget.cpp
  6. 34
      SDL2RenderWidget.h
  7. 12
      Stb_Media.cpp
  8. 4
      Stb_Media.h
  9. 371
      mediaplayer.cpp
  10. 13
      mediaplayer.h
  11. 14
      zffmpeg.cpp
  12. 3
      zffmpeg.h

2
CMakeLists.txt

@ -96,8 +96,6 @@ set(PROJECT_SOURCES
MediaAVPacket.h
MediaAVFilter.cpp
MediaAVFilter.h
MediaAVInfo.cpp
MediaAVInfo.h
MediaPlayerPreview.cpp
MediaPlayerPreview.h
SDL2RenderWidget.cpp

371
MediaAVInfo.cpp

@ -1,371 +0,0 @@
#include "MediaAVInfo.h"
char* charAVPixelFormat[] {
"AV_PIX_FMT_NONE",
"AV_PIX_FMT_YUV420P", ///< planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
"AV_PIX_FMT_YUYV422", ///< packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
"AV_PIX_FMT_RGB24", ///< packed RGB 8:8:8, 24bpp, RGBRGB...
"AV_PIX_FMT_BGR24", ///< packed RGB 8:8:8, 24bpp, BGRBGR...
"AV_PIX_FMT_YUV422P", ///< planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
"AV_PIX_FMT_YUV444P", ///< planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
"AV_PIX_FMT_YUV410P", ///< planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
"AV_PIX_FMT_YUV411P", ///< planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
"AV_PIX_FMT_GRAY8", ///< Y , 8bpp
"AV_PIX_FMT_MONOWHITE", ///< Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb
"AV_PIX_FMT_MONOBLACK", ///< Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb
"AV_PIX_FMT_PAL8", ///< 8 bits with AV_PIX_FMT_RGB32 palette
"AV_PIX_FMT_YUVJ420P", ///< planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting color_range
"AV_PIX_FMT_YUVJ422P", ///< planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting color_range
"AV_PIX_FMT_YUVJ444P", ///< planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting color_range
"AV_PIX_FMT_UYVY422", ///< packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
"AV_PIX_FMT_UYYVYY411", ///< packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3
"AV_PIX_FMT_BGR8", ///< packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
"AV_PIX_FMT_BGR4", ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
"AV_PIX_FMT_BGR4_BYTE", ///< packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb)
"AV_PIX_FMT_RGB8", ///< packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb)
"AV_PIX_FMT_RGB4", ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
"AV_PIX_FMT_RGB4_BYTE", ///< packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb)
"AV_PIX_FMT_NV12", ///< planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V)
"AV_PIX_FMT_NV21", ///< as above, but U and V bytes are swapped
"AV_PIX_FMT_ARGB", ///< packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
"AV_PIX_FMT_RGBA", ///< packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
"AV_PIX_FMT_ABGR", ///< packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
"AV_PIX_FMT_BGRA", ///< packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
"AV_PIX_FMT_GRAY16BE", ///< Y , 16bpp, big-endian
"AV_PIX_FMT_GRAY16LE", ///< Y , 16bpp, little-endian
"AV_PIX_FMT_YUV440P", ///< planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
"AV_PIX_FMT_YUVJ440P", ///< planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
"AV_PIX_FMT_YUVA420P", ///< planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
"AV_PIX_FMT_RGB48BE", ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian
"AV_PIX_FMT_RGB48LE", ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian
"AV_PIX_FMT_RGB565BE", ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
"AV_PIX_FMT_RGB565LE", ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
"AV_PIX_FMT_RGB555BE", ///< packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), big-endian , X=unused/undefined
"AV_PIX_FMT_RGB555LE", ///< packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined
"AV_PIX_FMT_BGR565BE", ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian
"AV_PIX_FMT_BGR565LE", ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian
"AV_PIX_FMT_BGR555BE", ///< packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), big-endian , X=unused/undefined
"AV_PIX_FMT_BGR555LE", ///< packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), little-endian, X=unused/undefined
/**
* Hardware acceleration through VA-API, data[3] contains a
* VASurfaceID.
*/
"AV_PIX_FMT_VAAPI",
"AV_PIX_FMT_YUV420P16LE", ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
"AV_PIX_FMT_YUV420P16BE", ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
"AV_PIX_FMT_YUV422P16LE", ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_YUV422P16BE", ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_YUV444P16LE", ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
"AV_PIX_FMT_YUV444P16BE", ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
"AV_PIX_FMT_DXVA2_VLD", ///< HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer
"AV_PIX_FMT_RGB444LE", ///< packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), little-endian, X=unused/undefined
"AV_PIX_FMT_RGB444BE", ///< packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), big-endian", X=unused/undefined
"AV_PIX_FMT_BGR444LE", ///< packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), little-endian, X=unused/undefined
"AV_PIX_FMT_BGR444BE", ///< packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), big-endian", X=unused/undefined
"AV_PIX_FMT_YA8", ///< 8 bits gray, 8 bits alpha
"AV_PIX_FMT_Y400A ", ///< alias for AV_PIX_FMT_YA8
"AV_PIX_FMT_GRAY8A", ///< alias for AV_PIX_FMT_YA8
"AV_PIX_FMT_BGR48BE", ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian
"AV_PIX_FMT_BGR48LE", ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian
/**
* The following 12 formats have the disadvantage of needing 1 format for each bit depth.
* Notice that each 9/10 bits sample is stored in 16 bits with extra padding.
* If you want to support multiple bit depths, then using AV_PIX_FMT_YUV420P16* with the bpp stored separately is better.
*/
"AV_PIX_FMT_YUV420P9BE", ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
"AV_PIX_FMT_YUV420P9LE", ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
"AV_PIX_FMT_YUV420P10BE",///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
"AV_PIX_FMT_YUV420P10LE",///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
"AV_PIX_FMT_YUV422P10BE",///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_YUV422P10LE",///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_YUV444P9BE", ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
"AV_PIX_FMT_YUV444P9LE", ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
"AV_PIX_FMT_YUV444P10BE",///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
"AV_PIX_FMT_YUV444P10LE",///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
"AV_PIX_FMT_YUV422P9BE", ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_YUV422P9LE", ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_GBRP", ///< planar GBR 4:4:4 24bpp
"AV_PIX_FMT_GBR24P", // alias for #AV_PIX_FMT_GBRP
"AV_PIX_FMT_GBRP9BE", ///< planar GBR 4:4:4 27bpp, big-endian
"AV_PIX_FMT_GBRP9LE", ///< planar GBR 4:4:4 27bpp, little-endian
"AV_PIX_FMT_GBRP10BE", ///< planar GBR 4:4:4 30bpp, big-endian
"AV_PIX_FMT_GBRP10LE", ///< planar GBR 4:4:4 30bpp, little-endian
"AV_PIX_FMT_GBRP16BE", ///< planar GBR 4:4:4 48bpp, big-endian
"AV_PIX_FMT_GBRP16LE", ///< planar GBR 4:4:4 48bpp, little-endian
"AV_PIX_FMT_YUVA422P", ///< planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
"AV_PIX_FMT_YUVA444P", ///< planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
"AV_PIX_FMT_YUVA420P9BE", ///< planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), big-endian
"AV_PIX_FMT_YUVA420P9LE", ///< planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), little-endian
"AV_PIX_FMT_YUVA422P9BE", ///< planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), big-endian
"AV_PIX_FMT_YUVA422P9LE", ///< planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), little-endian
"AV_PIX_FMT_YUVA444P9BE", ///< planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), big-endian
"AV_PIX_FMT_YUVA444P9LE", ///< planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), little-endian
"AV_PIX_FMT_YUVA420P10BE", ///< planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA420P10LE", ///< planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian)
"AV_PIX_FMT_YUVA422P10BE", ///< planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA422P10LE", ///< planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian)
"AV_PIX_FMT_YUVA444P10BE", ///< planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA444P10LE", ///< planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
"AV_PIX_FMT_YUVA420P16BE", ///< planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA420P16LE", ///< planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian)
"AV_PIX_FMT_YUVA422P16BE", ///< planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA422P16LE", ///< planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian)
"AV_PIX_FMT_YUVA444P16BE", ///< planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA444P16LE", ///< planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
"AV_PIX_FMT_VDPAU", ///< HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface
"AV_PIX_FMT_XYZ12LE", ///< packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as little-endian, the 4 lower bits are set to 0
"AV_PIX_FMT_XYZ12BE", ///< packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as big-endian, the 4 lower bits are set to 0
"AV_PIX_FMT_NV16", ///< interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
"AV_PIX_FMT_NV20LE", ///< interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_NV20BE", ///< interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_RGBA64BE", ///< packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian
"AV_PIX_FMT_RGBA64LE", ///< packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian
"AV_PIX_FMT_BGRA64BE", ///< packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian
"AV_PIX_FMT_BGRA64LE", ///< packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian
"AV_PIX_FMT_YVYU422", ///< packed YUV 4:2:2, 16bpp, Y0 Cr Y1 Cb
"AV_PIX_FMT_YA16BE", ///< 16 bits gray, 16 bits alpha (big-endian)
"AV_PIX_FMT_YA16LE", ///< 16 bits gray, 16 bits alpha (little-endian)
"AV_PIX_FMT_GBRAP", ///< planar GBRA 4:4:4:4 32bpp
"AV_PIX_FMT_GBRAP16BE", ///< planar GBRA 4:4:4:4 64bpp, big-endian
"AV_PIX_FMT_GBRAP16LE", ///< planar GBRA 4:4:4:4 64bpp, little-endian
/**
* HW acceleration through QSV, data[3] contains a pointer to the
* mfxFrameSurface1 structure.
*/
"AV_PIX_FMT_QSV",
/**
* HW acceleration though MMAL, data[3] contains a pointer to the
* MMAL_BUFFER_HEADER_T structure.
*/
"AV_PIX_FMT_MMAL",
"AV_PIX_FMT_D3D11VA_VLD", ///< HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView pointer
/**
* HW acceleration through CUDA. data[i] contain CUdeviceptr pointers
* exactly as for system memory frames.
*/
"AV_PIX_FMT_CUDA",
"AV_PIX_FMT_0RGB", ///< packed RGB 8:8:8, 32bpp, XRGBXRGB... X=unused/undefined
"AV_PIX_FMT_RGB0", ///< packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
"AV_PIX_FMT_0BGR", ///< packed BGR 8:8:8, 32bpp, XBGRXBGR... X=unused/undefined
"AV_PIX_FMT_BGR0", ///< packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
"AV_PIX_FMT_YUV420P12BE", ///< planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
"AV_PIX_FMT_YUV420P12LE", ///< planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
"AV_PIX_FMT_YUV420P14BE", ///< planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
"AV_PIX_FMT_YUV420P14LE", ///< planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
"AV_PIX_FMT_YUV422P12BE", ///< planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_YUV422P12LE", ///< planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_YUV422P14BE", ///< planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_YUV422P14LE", ///< planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_YUV444P12BE", ///< planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
"AV_PIX_FMT_YUV444P12LE", ///< planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
"AV_PIX_FMT_YUV444P14BE", ///< planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
"AV_PIX_FMT_YUV444P14LE", ///< planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
"AV_PIX_FMT_GBRP12BE", ///< planar GBR 4:4:4 36bpp, big-endian
"AV_PIX_FMT_GBRP12LE", ///< planar GBR 4:4:4 36bpp, little-endian
"AV_PIX_FMT_GBRP14BE", ///< planar GBR 4:4:4 42bpp, big-endian
"AV_PIX_FMT_GBRP14LE", ///< planar GBR 4:4:4 42bpp, little-endian
"AV_PIX_FMT_YUVJ411P", ///< planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV411P and setting color_range
"AV_PIX_FMT_BAYER_BGGR8", ///< bayer, BGBG..(odd line), GRGR..(even line), 8-bit samples
"AV_PIX_FMT_BAYER_RGGB8", ///< bayer, RGRG..(odd line), GBGB..(even line), 8-bit samples
"AV_PIX_FMT_BAYER_GBRG8", ///< bayer, GBGB..(odd line), RGRG..(even line), 8-bit samples
"AV_PIX_FMT_BAYER_GRBG8", ///< bayer, GRGR..(odd line), BGBG..(even line), 8-bit samples
"AV_PIX_FMT_BAYER_BGGR16LE", ///< bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, little-endian
"AV_PIX_FMT_BAYER_BGGR16BE", ///< bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, big-endian
"AV_PIX_FMT_BAYER_RGGB16LE", ///< bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, little-endian
"AV_PIX_FMT_BAYER_RGGB16BE", ///< bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, big-endian
"AV_PIX_FMT_BAYER_GBRG16LE", ///< bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, little-endian
"AV_PIX_FMT_BAYER_GBRG16BE", ///< bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, big-endian
"AV_PIX_FMT_BAYER_GRBG16LE", ///< bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, little-endian
"AV_PIX_FMT_BAYER_GRBG16BE", ///< bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, big-endian
"AV_PIX_FMT_XVMC",///< XVideo Motion Acceleration via common packet passing
"AV_PIX_FMT_YUV440P10LE", ///< planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian
"AV_PIX_FMT_YUV440P10BE", ///< planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian
"AV_PIX_FMT_YUV440P12LE", ///< planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian
"AV_PIX_FMT_YUV440P12BE", ///< planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian
"AV_PIX_FMT_AYUV64LE", ///< packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), little-endian
"AV_PIX_FMT_AYUV64BE", ///< packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), big-endian
"AV_PIX_FMT_VIDEOTOOLBOX", ///< hardware decoding through Videotoolbox
"AV_PIX_FMT_P010LE", ///< like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, little-endian
"AV_PIX_FMT_P010BE", ///< like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, big-endian
"AV_PIX_FMT_GBRAP12BE", ///< planar GBR 4:4:4:4 48bpp, big-endian
"AV_PIX_FMT_GBRAP12LE", ///< planar GBR 4:4:4:4 48bpp, little-endian
"AV_PIX_FMT_GBRAP10BE", ///< planar GBR 4:4:4:4 40bpp, big-endian
"AV_PIX_FMT_GBRAP10LE", ///< planar GBR 4:4:4:4 40bpp, little-endian
"AV_PIX_FMT_MEDIACODEC", ///< hardware decoding through MediaCodec
"AV_PIX_FMT_GRAY12BE", ///< Y , 12bpp, big-endian
"AV_PIX_FMT_GRAY12LE", ///< Y , 12bpp, little-endian
"AV_PIX_FMT_GRAY10BE", ///< Y , 10bpp, big-endian
"AV_PIX_FMT_GRAY10LE", ///< Y , 10bpp, little-endian
"AV_PIX_FMT_P016LE", ///< like NV12, with 16bpp per component, little-endian
"AV_PIX_FMT_P016BE", ///< like NV12, with 16bpp per component, big-endian
/**
* Hardware surfaces for Direct3D11.
*
* This is preferred over the legacy AV_PIX_FMT_D3D11VA_VLD. The new D3D11
* hwaccel API and filtering support AV_PIX_FMT_D3D11 only.
*
* data[0] contains a ID3D11Texture2D pointer, and data[1] contains the
* texture array index of the frame as intptr_t if the ID3D11Texture2D is
* an array texture (or always 0 if it's a normal texture).
*/
"AV_PIX_FMT_D3D11",
"AV_PIX_FMT_GRAY9BE", ///< Y , 9bpp, big-endian
"AV_PIX_FMT_GRAY9LE", ///< Y , 9bpp, little-endian
"AV_PIX_FMT_GBRPF32BE", ///< IEEE-754 single precision planar GBR 4:4:4", 96bpp, big-endian
"AV_PIX_FMT_GBRPF32LE", ///< IEEE-754 single precision planar GBR 4:4:4", 96bpp, little-endian
"AV_PIX_FMT_GBRAPF32BE", ///< IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, big-endian
"AV_PIX_FMT_GBRAPF32LE", ///< IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, little-endian
/**
* DRM-managed buffers exposed through PRIME buffer sharing.
*
* data[0] points to an AVDRMFrameDescriptor.
*/
"AV_PIX_FMT_DRM_PRIME",
/**
* Hardware surfaces for OpenCL.
*
* data[i] contain 2D image objects (typed in C as cl_mem, used
* in OpenCL as image2d_t) for each plane of the surface.
*/
"AV_PIX_FMT_OPENCL",
"AV_PIX_FMT_GRAY14BE", ///< Y , 14bpp, big-endian
"AV_PIX_FMT_GRAY14LE", ///< Y , 14bpp, little-endian
"AV_PIX_FMT_GRAYF32BE", ///< IEEE-754 single precision Y, 32bpp, big-endian
"AV_PIX_FMT_GRAYF32LE", ///< IEEE-754 single precision Y, 32bpp, little-endian
"AV_PIX_FMT_YUVA422P12BE", ///< planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, big-endian
"AV_PIX_FMT_YUVA422P12LE", ///< planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, little-endian
"AV_PIX_FMT_YUVA444P12BE", ///< planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, big-endian
"AV_PIX_FMT_YUVA444P12LE", ///< planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, little-endian
"AV_PIX_FMT_NV24", ///< planar YUV 4:4:4, 24bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V)
"AV_PIX_FMT_NV42", ///< as above, but U and V bytes are swapped
/**
* Vulkan hardware images.
*
* data[0] points to an AVVkFrame
*/
"AV_PIX_FMT_VULKAN",
"AV_PIX_FMT_Y210BE", ///< packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, big-endian
"AV_PIX_FMT_Y210LE", ///< packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, little-endian
"AV_PIX_FMT_X2RGB10LE", ///< packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), little-endian, X=unused/undefined
"AV_PIX_FMT_X2RGB10BE", ///< packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), big-endian, X=unused/undefined
"AV_PIX_FMT_X2BGR10LE", ///< packed BGR 10:10:10, 30bpp, (msb)2X 10B 10G 10R(lsb), little-endian, X=unused/undefined
"AV_PIX_FMT_X2BGR10BE", ///< packed BGR 10:10:10, 30bpp, (msb)2X 10B 10G 10R(lsb), big-endian, X=unused/undefined
"AV_PIX_FMT_P210BE", ///< interleaved chroma YUV 4:2:2, 20bpp, data in the high bits, big-endian
"AV_PIX_FMT_P210LE", ///< interleaved chroma YUV 4:2:2, 20bpp, data in the high bits, little-endian
"AV_PIX_FMT_P410BE", ///< interleaved chroma YUV 4:4:4, 30bpp, data in the high bits, big-endian
"AV_PIX_FMT_P410LE", ///< interleaved chroma YUV 4:4:4, 30bpp, data in the high bits, little-endian
"AV_PIX_FMT_P216BE", ///< interleaved chroma YUV 4:2:2, 32bpp, big-endian
"AV_PIX_FMT_P216LE", ///< interleaved chroma YUV 4:2:2, 32bpp, liddle-endian
"AV_PIX_FMT_P416BE", ///< interleaved chroma YUV 4:4:4, 48bpp, big-endian
"AV_PIX_FMT_P416LE", ///< interleaved chroma YUV 4:4:4, 48bpp, little-endian
"AV_PIX_FMT_NB" ///< number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions
};
MediaAVInfo::MediaAVInfo():
m_Layout(new QVBoxLayout),
m_fmtCtx(nullptr),
name_file_str(new QLabel),
channels_str(new QLabel),
sample_rate_str(new QLabel),
bit_rate_str(new QLabel),
duration_str(new QLabel),
pix_fmt_str(new QLabel),
pkg_fmt_name_str(new QLabel)
/* name_file_data(new QLabel),
channels_data(new QLabel),
sample_rate_data(new QLabel),
bit_rate_data(new QLabel),
duration_data(new QLabel),
pix_fmt_data(new QLabel),
pkg_fmt_name_data(new QLabel) */
{
name_file_str->setText(tr("名称:"));
channels_str->setText(tr("音频声道数:"));
sample_rate_str->setText(tr("音频采样率:"));
bit_rate_str->setText(tr("码率:"));
duration_str->setText(tr("总时长:"));
pix_fmt_str->setText(tr("视频像素格式:"));
pkg_fmt_name_str->setText(tr("封装格式:"));
}
MediaAVInfo::~MediaAVInfo()
{
}
void MediaAVInfo::update_media_info(AVFormatContext *_updatefmtCtx,char *_filename){
int audioindex=-1;
int videoindex = -1;
for (int i = 0; i < _updatefmtCtx->nb_streams; i++) {
if (_updatefmtCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoindex = i;
}
else if (_updatefmtCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
audioindex = i;
}
}
m_fmtCtx=_updatefmtCtx;
name_file_str->setText("名称:"+QString::fromUtf8(_filename));
channels_str->setText("音频声道数:"+(audioindex==-1?"无法获取音频信息":QString::number(_updatefmtCtx->streams[audioindex]->codecpar->channels)));
sample_rate_str->setText("音频采样率:"+(audioindex==-1?"无法获取音频信息":QString::number(_updatefmtCtx->streams[audioindex]->codecpar->sample_rate)));
bit_rate_str->setText("码率:"+QString::number(_updatefmtCtx->bit_rate));
duration_str->setText("总时长:"+QString::number(round(_updatefmtCtx->duration*av_q2d(AV_TIME_BASE_Q)))+"");
pix_fmt_str->setText("视频像素格式:"+(videoindex==-1?"无法获取视频信息":QString(charAVPixelFormat[_updatefmtCtx->streams[videoindex]->codecpar->frame_size+1])));
pkg_fmt_name_str->setText("封装格式名称:"+QString(_updatefmtCtx->iformat->name));
m_Layout->addWidget(name_file_str);
m_Layout->addWidget(channels_str);
m_Layout->addWidget(sample_rate_str);
m_Layout->addWidget(bit_rate_str);
m_Layout->addWidget(duration_str);
m_Layout->addWidget(pix_fmt_str);
m_Layout->addWidget(pkg_fmt_name_str);
setLayout(m_Layout);
}

43
MediaAVInfo.h

@ -1,43 +0,0 @@
#ifndef MEDIAAVINFO_H
#define MEDIAAVINFO_H
extern "C"{
#include <libavformat/avformat.h>
}
#include <QDialog>
#include <QVBoxLayout>
#include <QLabel>
#include <QString>
class MediaAVInfo : public QDialog
{
Q_OBJECT
public:
MediaAVInfo();
~MediaAVInfo();
void update_media_info(AVFormatContext *update_fmtCtx,char *filename);
private:
QVBoxLayout* m_Layout;
AVFormatContext *m_fmtCtx;
QLabel* name_file_str;
QLabel* channels_str;
QLabel* sample_rate_str;
QLabel* bit_rate_str;
QLabel* duration_str;
QLabel* pix_fmt_str;
QLabel* pkg_fmt_name_str;
/* QLabel* name_file_data;
QLabel* channels_data;
QLabel* sample_rate_data;
QLabel* bit_rate_data;
QLabel* duration_data;
QLabel* pix_fmt_data;
QLabel* pkg_fmt_name_data; */
};
#endif //MEDIAAVINFO_H

11
MediaFFex.h

@ -5,14 +5,21 @@ extern "C"
{
#include <libavformat/avformat.h>
}
#include "MediaAVFrame.h"
/*视频输出帧参数*/
typedef struct
{
int width;
int height;
int size;
AVPixelFormat pix_fmt;
std::string pix_fmt;
std::string filename;
std::string channels;
std::string sample_rate;
std::string bit_rate;
std::string duration;
std::string pkg_fmt_name;
bool info_state;
} v_sws_spec;
/*音频 参数*/

127
SDL2RenderWidget.cpp

@ -7,16 +7,17 @@ SDL2RenderWidget::SDL2RenderWidget(QWidget *parent)
{
printf("SDL could not initialize! SDL_Error: %s\n", SDL_GetError());
}
SDL_SetHint(SDL_HINT_RENDER_SCALE_QUALITY, "1");
SDL_SetHint(SDL_HINT_RENDER_SCALE_QUALITY, "1");
m_sdl_window = SDL_CreateWindowFrom((void *)(parent->winId()));
m_sdl_renderer = SDL_CreateRenderer(m_sdl_window, -1, SDL_RENDERER_TARGETTEXTURE);
SDL_SetRenderDrawBlendMode(m_sdl_renderer, SDL_BLENDMODE_BLEND);
if (!m_sdl_window)
{
printf("SDL: could not create window - exiting:%s\n", SDL_GetError());
}
{
printf("SDL: could not create window - exiting:%s\n", SDL_GetError());
}
SDL_CreateThread(SDL2RenderWidget::refresh_default_image, NULL, this);
}
SDL2RenderWidget::~SDL2RenderWidget()
@ -32,47 +33,52 @@ void SDL2RenderWidget::set_Size(QSize _size)
m_height = _size.height();
SDL_SetWindowSize(m_sdl_window, _size.width(), _size.height());
}
void SDL2RenderWidget::default_image()
void SDL2RenderWidget::renderer_default_image()
{
if (!m_sdl_surface)
{
m_sdl_surface = m_stb_media->default_logo_surface(logoImageStr);
m_sdl_texture = SDL_CreateTextureFromSurface(m_sdl_renderer, m_sdl_surface);
m_stb_media->setFontArgs(m_sdl_renderer);
}
int iW, iH, iWidth, iHeight;
SDL_QueryTexture(m_sdl_texture, NULL, NULL, &iW, &iH);
SDL_GetWindowSize(m_sdl_window, &iWidth, &iHeight);
m_sdl_rect.x = iWidth / 2 - iW / 2;
m_sdl_rect.y = iHeight / 2 - iH / 2;
m_sdl_rect.w = iW;
m_sdl_rect.h = iH;
if (!m_sdl_surface)
{
m_sdl_surface = m_stb_media->default_logo_surface(logoImageStr);
m_sdl_texture = SDL_CreateTextureFromSurface(m_sdl_renderer, m_sdl_surface);
m_stb_media->setFontArgs(m_sdl_renderer, 48, 12);
}
int iW, iH, iWidth, iHeight;
SDL_Rect m_image_rect, m_font_rect;
SDL_QueryTexture(m_sdl_texture, NULL, NULL, &iW, &iH);
SDL_GetWindowSize(m_sdl_window, &iWidth, &iHeight);
SDL_RenderClear(m_sdl_renderer);
SDL_RenderCopy(m_sdl_renderer, m_sdl_texture, NULL, &m_sdl_rect);
m_image_rect.x = iWidth / 2 - iW / 2;
m_image_rect.y = iHeight / 2 - iH / 2;
m_image_rect.w = iW;
m_image_rect.h = iH;
m_sdl_font_rect.x = (iWidth / 2 - iW / 2) - m_stb_media->fc.faceSize;
m_sdl_font_rect.y = iHeight - 50;
SDL_RenderClear(m_sdl_renderer);
SDL_RenderCopy(m_sdl_renderer, m_sdl_texture, NULL, &m_image_rect);
m_stb_media->fc.drawText(m_sdl_font_rect.x, m_sdl_font_rect.y, logoStr);
SDL_RenderPresent(m_sdl_renderer);
m_font_rect.x = (iWidth / 2 - iW / 2) - m_stb_media->fc.faceSize;
m_font_rect.y = iHeight - 50;
// renderer_text_info(m_font_rect,logoStr, default_logo_str);
m_stb_media->fc.drawText(m_font_rect.x, m_font_rect.y, logoStr);
SDL_RenderPresent(m_sdl_renderer);
}
void SDL2RenderWidget::update_image(std::shared_ptr<MediaAVFrame> yuv_frame)
void SDL2RenderWidget::update_image(std::shared_ptr<MediaAVFrame> v_frame, v_sws_spec *v_spec)
{
m_renderer_image_type = media_video_image;
int nTextureWidth, nTextureHeight;
// 首先查询当前纹理对象的宽高,如果不符合,那么需要重建纹理对象
SDL_QueryTexture(m_sdl_texture, nullptr, nullptr, &nTextureWidth, &nTextureHeight);
if (nTextureWidth != yuv_frame->m_frame->width || nTextureHeight != yuv_frame->m_frame->height)
if (nTextureWidth != v_frame->m_frame->width || nTextureHeight != v_frame->m_frame->height)
{
if (!m_sdl_texture)
{
SDL_DestroyTexture(m_sdl_texture);
}
m_sdl_texture = SDL_CreateTexture(m_sdl_renderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,
yuv_frame->m_frame->width, yuv_frame->m_frame->height);
}
v_frame->m_frame->width, v_frame->m_frame->height);
m_stb_media->setFontArgs(m_sdl_renderer, 26, 12);
}
SDL_GetWindowSize(m_sdl_window, &nTextureWidth, &nTextureHeight);
m_sdl_rect.x = 0;
m_sdl_rect.y = 0;
@ -80,17 +86,45 @@ void SDL2RenderWidget::update_image(std::shared_ptr<MediaAVFrame> yuv_frame)
m_sdl_rect.h = nTextureHeight;
SDL_UpdateYUVTexture(m_sdl_texture,
&m_sdl_rect,
yuv_frame->m_frame->data[0],
yuv_frame->m_frame->linesize[0],
yuv_frame->m_frame->data[1],
yuv_frame->m_frame->linesize[1],
yuv_frame->m_frame->data[2],
yuv_frame->m_frame->linesize[2]);
v_frame->m_frame->data[0],
v_frame->m_frame->linesize[0],
v_frame->m_frame->data[1],
v_frame->m_frame->linesize[1],
v_frame->m_frame->data[2],
v_frame->m_frame->linesize[2]);
SDL_RenderClear(m_sdl_renderer);
SDL_RenderCopy(m_sdl_renderer, m_sdl_texture, &m_sdl_rect, NULL);
SDL_Rect m_font_rect;
std::string info = "\n" + v_spec->filename + "\n" +
v_spec->channels + "\n" +
v_spec->sample_rate + "\n" +
v_spec->bit_rate + "\n" +
v_spec->duration + "\n" +
v_spec->pix_fmt + "\n" +
v_spec->pkg_fmt_name + "\n";
// renderer_text_info(m_font_rect,info, media_info_str);
m_stb_media->fc.drawText(m_font_rect.x, m_font_rect.y, info);
SDL_RenderPresent(m_sdl_renderer);
}
void SDL2RenderWidget::renderer_text_info(SDL_Rect r_rect, std::string r_str, font_renderer_type f_type)
{
switch (f_type)
{
case media_info_str:
m_stb_media->fc.drawText(r_rect.x, r_rect.y, r_str);
break;
case media_attachment:
break;
case default_logo_str:
m_stb_media->fc.drawText(r_rect.x, r_rect.y, r_str);
break;
}
}
SDL_AudioDeviceID SDL2RenderWidget::openAudioDevice(SDL_AudioSpec *spec)
{
SDL_AudioSpec have;
@ -109,3 +143,22 @@ SDL_AudioDeviceID SDL2RenderWidget::openAudioDevice(SDL_AudioSpec *spec)
SDL_PauseAudioDevice(dev, 0);
return dev;
}
int SDL2RenderWidget::refresh_default_image(void *opaque)
{
SDL2RenderWidget *n_sdl = (SDL2RenderWidget *)opaque;
SDL_Event _event;
while (true)
{
if (n_sdl->m_renderer_image_type == default_logo_image)
{
QMetaObject::invokeMethod(
QApplication::instance(), [=]()
{ n_sdl->renderer_default_image(); },
Qt::QueuedConnection);
SDL_Delay(n_sdl->sd_time);
}
}
_event.type = BREAK_EVENT;
SDL_PushEvent(&_event);
return 0;
}

34
SDL2RenderWidget.h

@ -2,9 +2,27 @@
#define SDL2RENDERWIDGET_H
#include <QWidget>
#include <memory>
#include "MediaAVFrame.h"
#include <MediaFFex.h>
#include "SDL2/SDL.h"
#include "Stb_Media.h"
#include <QApplication>
//Refresh Event
#define REFRESH_EVENT (SDL_USEREVENT + 1)
//Break
#define BREAK_EVENT (SDL_USEREVENT + 2)
typedef enum
{
media_info_str,
media_attachment,
default_logo_str
} font_renderer_type;
typedef enum
{
media_video_image,
default_logo_image
} media_renderer_type;
class SDL2RenderWidget : public QWidget
{
@ -12,19 +30,25 @@ class SDL2RenderWidget : public QWidget
public:
SDL2RenderWidget(QWidget *parent);
~SDL2RenderWidget();
void update_image(std::shared_ptr<MediaAVFrame> frame);
void default_image();
void update_image(std::shared_ptr<MediaAVFrame>,v_sws_spec*);
void renderer_default_image();
void renderer_text_info(SDL_Rect m_rect,std::string str,font_renderer_type f_type);
void set_Size(QSize _size);
SDL_AudioDeviceID openAudioDevice(SDL_AudioSpec *spec);
private:
static int refresh_default_image(void *opaque);
private:
SDL_Window *m_sdl_window = nullptr;
SDL_Renderer *m_sdl_renderer = nullptr;
SDL_Texture *m_sdl_texture = nullptr;
SDL_Surface *m_sdl_surface = nullptr;
SDL_Rect m_sdl_rect, m_sdl_font_rect;
SDL_Rect m_sdl_rect;
int m_width, m_height;
Stb_Media *m_stb_media;
SDL_Event event;
std::string renderder_str;
media_renderer_type m_renderer_image_type=default_logo_image;
int sd_time=1;
// 默认字符串
std::string logoImageStr = "F:/SourceCode/VTS/ZFFmpeg/ZFFmpeg/res/img/zvo.png";
std::string logoStr = "新时代社会主义中国";

12
Stb_Media.cpp

@ -65,11 +65,17 @@ void Stb_Media::readFileRaw_toMemory (const std::string & fullPath, sttfont_memo
fs.read(mem.data, LEN);
fs.close();
}
void Stb_Media::setFontArgs(SDL_Renderer* sdl_rander){
fc.faceSize = 48;
fc.tabWidthInSpaces = 12;
void Stb_Media::setFontArgs(SDL_Renderer* sdl_rander,int f_size,int w_size){
fc.faceSize = f_size;
fc.tabWidthInSpaces = w_size;
sttfont_memory notoSans;
readFileRaw_toMemory((QDir::currentPath()+"/font/PingFang.ttf").toStdString(), notoSans);
fc.loadFontManaged(notoSans);
fc.bindRenderer(sdl_rander);
}
void Stb_Media::setFontSize(int f_size){
fc.faceSize=f_size;
}
void Stb_Media::setFontWidthInSpaces(int w_size){
fc.tabWidthInSpaces=w_size;
}

4
Stb_Media.h

@ -17,7 +17,9 @@ class Stb_Media{
SDL_Surface* default_logo_surface(std::string image);
void readFileRaw (const std::string & fullPath, std::string & output);
void readFileRaw_toMemory (const std::string & fullPath, sttfont_memory & mem);
void setFontArgs(SDL_Renderer* sdl_rander);
void setFontArgs(SDL_Renderer* sdl_rander,int f_size,int w_size);
void setFontSize(int f_seize);
void setFontWidthInSpaces(int f_size);
sdl_stb_font_cache fc;
};
#endif

371
mediaplayer.cpp

@ -3,13 +3,319 @@
MediaPlayer::MediaPlayer() : m_avformat_ctx(nullptr),
m_video_avdecoder(new MediaAVDecoder),
m_audio_avdecoder(new MediaAVDecoder),
m_render_receive_obj(nullptr),
m_sws_spec(new v_sws_spec)
m_render_receive_obj(nullptr)
{
}
MediaPlayer::~MediaPlayer()
{
}
char* CharAVPixelFormat[] {
"AV_PIX_FMT_NONE",
"AV_PIX_FMT_YUV420P", ///< planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
"AV_PIX_FMT_YUYV422", ///< packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
"AV_PIX_FMT_RGB24", ///< packed RGB 8:8:8, 24bpp, RGBRGB...
"AV_PIX_FMT_BGR24", ///< packed RGB 8:8:8, 24bpp, BGRBGR...
"AV_PIX_FMT_YUV422P", ///< planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
"AV_PIX_FMT_YUV444P", ///< planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
"AV_PIX_FMT_YUV410P", ///< planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
"AV_PIX_FMT_YUV411P", ///< planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
"AV_PIX_FMT_GRAY8", ///< Y , 8bpp
"AV_PIX_FMT_MONOWHITE", ///< Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb
"AV_PIX_FMT_MONOBLACK", ///< Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb
"AV_PIX_FMT_PAL8", ///< 8 bits with AV_PIX_FMT_RGB32 palette
"AV_PIX_FMT_YUVJ420P", ///< planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting color_range
"AV_PIX_FMT_YUVJ422P", ///< planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting color_range
"AV_PIX_FMT_YUVJ444P", ///< planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting color_range
"AV_PIX_FMT_UYVY422", ///< packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
"AV_PIX_FMT_UYYVYY411", ///< packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3
"AV_PIX_FMT_BGR8", ///< packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
"AV_PIX_FMT_BGR4", ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
"AV_PIX_FMT_BGR4_BYTE", ///< packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb)
"AV_PIX_FMT_RGB8", ///< packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb)
"AV_PIX_FMT_RGB4", ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
"AV_PIX_FMT_RGB4_BYTE", ///< packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb)
"AV_PIX_FMT_NV12", ///< planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V)
"AV_PIX_FMT_NV21", ///< as above, but U and V bytes are swapped
"AV_PIX_FMT_ARGB", ///< packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
"AV_PIX_FMT_RGBA", ///< packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
"AV_PIX_FMT_ABGR", ///< packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
"AV_PIX_FMT_BGRA", ///< packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
"AV_PIX_FMT_GRAY16BE", ///< Y , 16bpp, big-endian
"AV_PIX_FMT_GRAY16LE", ///< Y , 16bpp, little-endian
"AV_PIX_FMT_YUV440P", ///< planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
"AV_PIX_FMT_YUVJ440P", ///< planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
"AV_PIX_FMT_YUVA420P", ///< planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
"AV_PIX_FMT_RGB48BE", ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian
"AV_PIX_FMT_RGB48LE", ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian
"AV_PIX_FMT_RGB565BE", ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
"AV_PIX_FMT_RGB565LE", ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
"AV_PIX_FMT_RGB555BE", ///< packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), big-endian , X=unused/undefined
"AV_PIX_FMT_RGB555LE", ///< packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined
"AV_PIX_FMT_BGR565BE", ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian
"AV_PIX_FMT_BGR565LE", ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian
"AV_PIX_FMT_BGR555BE", ///< packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), big-endian , X=unused/undefined
"AV_PIX_FMT_BGR555LE", ///< packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), little-endian, X=unused/undefined
/**
* Hardware acceleration through VA-API, data[3] contains a
* VASurfaceID.
*/
"AV_PIX_FMT_VAAPI",
"AV_PIX_FMT_YUV420P16LE", ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
"AV_PIX_FMT_YUV420P16BE", ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
"AV_PIX_FMT_YUV422P16LE", ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_YUV422P16BE", ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_YUV444P16LE", ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
"AV_PIX_FMT_YUV444P16BE", ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
"AV_PIX_FMT_DXVA2_VLD", ///< HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer
"AV_PIX_FMT_RGB444LE", ///< packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), little-endian, X=unused/undefined
"AV_PIX_FMT_RGB444BE", ///< packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), big-endian", X=unused/undefined
"AV_PIX_FMT_BGR444LE", ///< packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), little-endian, X=unused/undefined
"AV_PIX_FMT_BGR444BE", ///< packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), big-endian", X=unused/undefined
"AV_PIX_FMT_YA8", ///< 8 bits gray, 8 bits alpha
"AV_PIX_FMT_Y400A ", ///< alias for AV_PIX_FMT_YA8
"AV_PIX_FMT_GRAY8A", ///< alias for AV_PIX_FMT_YA8
"AV_PIX_FMT_BGR48BE", ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian
"AV_PIX_FMT_BGR48LE", ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian
/**
* The following 12 formats have the disadvantage of needing 1 format for each bit depth.
* Notice that each 9/10 bits sample is stored in 16 bits with extra padding.
* If you want to support multiple bit depths, then using AV_PIX_FMT_YUV420P16* with the bpp stored separately is better.
*/
"AV_PIX_FMT_YUV420P9BE", ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
"AV_PIX_FMT_YUV420P9LE", ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
"AV_PIX_FMT_YUV420P10BE",///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
"AV_PIX_FMT_YUV420P10LE",///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
"AV_PIX_FMT_YUV422P10BE",///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_YUV422P10LE",///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_YUV444P9BE", ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
"AV_PIX_FMT_YUV444P9LE", ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
"AV_PIX_FMT_YUV444P10BE",///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
"AV_PIX_FMT_YUV444P10LE",///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
"AV_PIX_FMT_YUV422P9BE", ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_YUV422P9LE", ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_GBRP", ///< planar GBR 4:4:4 24bpp
"AV_PIX_FMT_GBR24P", // alias for #AV_PIX_FMT_GBRP
"AV_PIX_FMT_GBRP9BE", ///< planar GBR 4:4:4 27bpp, big-endian
"AV_PIX_FMT_GBRP9LE", ///< planar GBR 4:4:4 27bpp, little-endian
"AV_PIX_FMT_GBRP10BE", ///< planar GBR 4:4:4 30bpp, big-endian
"AV_PIX_FMT_GBRP10LE", ///< planar GBR 4:4:4 30bpp, little-endian
"AV_PIX_FMT_GBRP16BE", ///< planar GBR 4:4:4 48bpp, big-endian
"AV_PIX_FMT_GBRP16LE", ///< planar GBR 4:4:4 48bpp, little-endian
"AV_PIX_FMT_YUVA422P", ///< planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
"AV_PIX_FMT_YUVA444P", ///< planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
"AV_PIX_FMT_YUVA420P9BE", ///< planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), big-endian
"AV_PIX_FMT_YUVA420P9LE", ///< planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), little-endian
"AV_PIX_FMT_YUVA422P9BE", ///< planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), big-endian
"AV_PIX_FMT_YUVA422P9LE", ///< planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), little-endian
"AV_PIX_FMT_YUVA444P9BE", ///< planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), big-endian
"AV_PIX_FMT_YUVA444P9LE", ///< planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), little-endian
"AV_PIX_FMT_YUVA420P10BE", ///< planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA420P10LE", ///< planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian)
"AV_PIX_FMT_YUVA422P10BE", ///< planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA422P10LE", ///< planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian)
"AV_PIX_FMT_YUVA444P10BE", ///< planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA444P10LE", ///< planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
"AV_PIX_FMT_YUVA420P16BE", ///< planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA420P16LE", ///< planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian)
"AV_PIX_FMT_YUVA422P16BE", ///< planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA422P16LE", ///< planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian)
"AV_PIX_FMT_YUVA444P16BE", ///< planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA444P16LE", ///< planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
"AV_PIX_FMT_VDPAU", ///< HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface
"AV_PIX_FMT_XYZ12LE", ///< packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as little-endian, the 4 lower bits are set to 0
"AV_PIX_FMT_XYZ12BE", ///< packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as big-endian, the 4 lower bits are set to 0
"AV_PIX_FMT_NV16", ///< interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
"AV_PIX_FMT_NV20LE", ///< interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_NV20BE", ///< interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_RGBA64BE", ///< packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian
"AV_PIX_FMT_RGBA64LE", ///< packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian
"AV_PIX_FMT_BGRA64BE", ///< packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian
"AV_PIX_FMT_BGRA64LE", ///< packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian
"AV_PIX_FMT_YVYU422", ///< packed YUV 4:2:2, 16bpp, Y0 Cr Y1 Cb
"AV_PIX_FMT_YA16BE", ///< 16 bits gray, 16 bits alpha (big-endian)
"AV_PIX_FMT_YA16LE", ///< 16 bits gray, 16 bits alpha (little-endian)
"AV_PIX_FMT_GBRAP", ///< planar GBRA 4:4:4:4 32bpp
"AV_PIX_FMT_GBRAP16BE", ///< planar GBRA 4:4:4:4 64bpp, big-endian
"AV_PIX_FMT_GBRAP16LE", ///< planar GBRA 4:4:4:4 64bpp, little-endian
/**
* HW acceleration through QSV, data[3] contains a pointer to the
* mfxFrameSurface1 structure.
*/
"AV_PIX_FMT_QSV",
/**
* HW acceleration though MMAL, data[3] contains a pointer to the
* MMAL_BUFFER_HEADER_T structure.
*/
"AV_PIX_FMT_MMAL",
"AV_PIX_FMT_D3D11VA_VLD", ///< HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView pointer
/**
* HW acceleration through CUDA. data[i] contain CUdeviceptr pointers
* exactly as for system memory frames.
*/
"AV_PIX_FMT_CUDA",
"AV_PIX_FMT_0RGB", ///< packed RGB 8:8:8, 32bpp, XRGBXRGB... X=unused/undefined
"AV_PIX_FMT_RGB0", ///< packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
"AV_PIX_FMT_0BGR", ///< packed BGR 8:8:8, 32bpp, XBGRXBGR... X=unused/undefined
"AV_PIX_FMT_BGR0", ///< packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
"AV_PIX_FMT_YUV420P12BE", ///< planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
"AV_PIX_FMT_YUV420P12LE", ///< planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
"AV_PIX_FMT_YUV420P14BE", ///< planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
"AV_PIX_FMT_YUV420P14LE", ///< planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
"AV_PIX_FMT_YUV422P12BE", ///< planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_YUV422P12LE", ///< planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_YUV422P14BE", ///< planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_YUV422P14LE", ///< planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_YUV444P12BE", ///< planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
"AV_PIX_FMT_YUV444P12LE", ///< planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
"AV_PIX_FMT_YUV444P14BE", ///< planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
"AV_PIX_FMT_YUV444P14LE", ///< planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
"AV_PIX_FMT_GBRP12BE", ///< planar GBR 4:4:4 36bpp, big-endian
"AV_PIX_FMT_GBRP12LE", ///< planar GBR 4:4:4 36bpp, little-endian
"AV_PIX_FMT_GBRP14BE", ///< planar GBR 4:4:4 42bpp, big-endian
"AV_PIX_FMT_GBRP14LE", ///< planar GBR 4:4:4 42bpp, little-endian
"AV_PIX_FMT_YUVJ411P", ///< planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV411P and setting color_range
"AV_PIX_FMT_BAYER_BGGR8", ///< bayer, BGBG..(odd line), GRGR..(even line), 8-bit samples
"AV_PIX_FMT_BAYER_RGGB8", ///< bayer, RGRG..(odd line), GBGB..(even line), 8-bit samples
"AV_PIX_FMT_BAYER_GBRG8", ///< bayer, GBGB..(odd line), RGRG..(even line), 8-bit samples
"AV_PIX_FMT_BAYER_GRBG8", ///< bayer, GRGR..(odd line), BGBG..(even line), 8-bit samples
"AV_PIX_FMT_BAYER_BGGR16LE", ///< bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, little-endian
"AV_PIX_FMT_BAYER_BGGR16BE", ///< bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, big-endian
"AV_PIX_FMT_BAYER_RGGB16LE", ///< bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, little-endian
"AV_PIX_FMT_BAYER_RGGB16BE", ///< bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, big-endian
"AV_PIX_FMT_BAYER_GBRG16LE", ///< bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, little-endian
"AV_PIX_FMT_BAYER_GBRG16BE", ///< bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, big-endian
"AV_PIX_FMT_BAYER_GRBG16LE", ///< bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, little-endian
"AV_PIX_FMT_BAYER_GRBG16BE", ///< bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, big-endian
"AV_PIX_FMT_XVMC",///< XVideo Motion Acceleration via common packet passing
"AV_PIX_FMT_YUV440P10LE", ///< planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian
"AV_PIX_FMT_YUV440P10BE", ///< planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian
"AV_PIX_FMT_YUV440P12LE", ///< planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian
"AV_PIX_FMT_YUV440P12BE", ///< planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian
"AV_PIX_FMT_AYUV64LE", ///< packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), little-endian
"AV_PIX_FMT_AYUV64BE", ///< packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), big-endian
"AV_PIX_FMT_VIDEOTOOLBOX", ///< hardware decoding through Videotoolbox
"AV_PIX_FMT_P010LE", ///< like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, little-endian
"AV_PIX_FMT_P010BE", ///< like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, big-endian
"AV_PIX_FMT_GBRAP12BE", ///< planar GBR 4:4:4:4 48bpp, big-endian
"AV_PIX_FMT_GBRAP12LE", ///< planar GBR 4:4:4:4 48bpp, little-endian
"AV_PIX_FMT_GBRAP10BE", ///< planar GBR 4:4:4:4 40bpp, big-endian
"AV_PIX_FMT_GBRAP10LE", ///< planar GBR 4:4:4:4 40bpp, little-endian
"AV_PIX_FMT_MEDIACODEC", ///< hardware decoding through MediaCodec
"AV_PIX_FMT_GRAY12BE", ///< Y , 12bpp, big-endian
"AV_PIX_FMT_GRAY12LE", ///< Y , 12bpp, little-endian
"AV_PIX_FMT_GRAY10BE", ///< Y , 10bpp, big-endian
"AV_PIX_FMT_GRAY10LE", ///< Y , 10bpp, little-endian
"AV_PIX_FMT_P016LE", ///< like NV12, with 16bpp per component, little-endian
"AV_PIX_FMT_P016BE", ///< like NV12, with 16bpp per component, big-endian
/**
* Hardware surfaces for Direct3D11.
*
* This is preferred over the legacy AV_PIX_FMT_D3D11VA_VLD. The new D3D11
* hwaccel API and filtering support AV_PIX_FMT_D3D11 only.
*
* data[0] contains a ID3D11Texture2D pointer, and data[1] contains the
* texture array index of the frame as intptr_t if the ID3D11Texture2D is
* an array texture (or always 0 if it's a normal texture).
*/
"AV_PIX_FMT_D3D11",
"AV_PIX_FMT_GRAY9BE", ///< Y , 9bpp, big-endian
"AV_PIX_FMT_GRAY9LE", ///< Y , 9bpp, little-endian
"AV_PIX_FMT_GBRPF32BE", ///< IEEE-754 single precision planar GBR 4:4:4", 96bpp, big-endian
"AV_PIX_FMT_GBRPF32LE", ///< IEEE-754 single precision planar GBR 4:4:4", 96bpp, little-endian
"AV_PIX_FMT_GBRAPF32BE", ///< IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, big-endian
"AV_PIX_FMT_GBRAPF32LE", ///< IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, little-endian
/**
* DRM-managed buffers exposed through PRIME buffer sharing.
*
* data[0] points to an AVDRMFrameDescriptor.
*/
"AV_PIX_FMT_DRM_PRIME",
/**
* Hardware surfaces for OpenCL.
*
* data[i] contain 2D image objects (typed in C as cl_mem, used
* in OpenCL as image2d_t) for each plane of the surface.
*/
"AV_PIX_FMT_OPENCL",
"AV_PIX_FMT_GRAY14BE", ///< Y , 14bpp, big-endian
"AV_PIX_FMT_GRAY14LE", ///< Y , 14bpp, little-endian
"AV_PIX_FMT_GRAYF32BE", ///< IEEE-754 single precision Y, 32bpp, big-endian
"AV_PIX_FMT_GRAYF32LE", ///< IEEE-754 single precision Y, 32bpp, little-endian
"AV_PIX_FMT_YUVA422P12BE", ///< planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, big-endian
"AV_PIX_FMT_YUVA422P12LE", ///< planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, little-endian
"AV_PIX_FMT_YUVA444P12BE", ///< planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, big-endian
"AV_PIX_FMT_YUVA444P12LE", ///< planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, little-endian
"AV_PIX_FMT_NV24", ///< planar YUV 4:4:4, 24bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V)
"AV_PIX_FMT_NV42", ///< as above, but U and V bytes are swapped
/**
* Vulkan hardware images.
*
* data[0] points to an AVVkFrame
*/
"AV_PIX_FMT_VULKAN",
"AV_PIX_FMT_Y210BE", ///< packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, big-endian
"AV_PIX_FMT_Y210LE", ///< packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, little-endian
"AV_PIX_FMT_X2RGB10LE", ///< packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), little-endian, X=unused/undefined
"AV_PIX_FMT_X2RGB10BE", ///< packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), big-endian, X=unused/undefined
"AV_PIX_FMT_X2BGR10LE", ///< packed BGR 10:10:10, 30bpp, (msb)2X 10B 10G 10R(lsb), little-endian, X=unused/undefined
"AV_PIX_FMT_X2BGR10BE", ///< packed BGR 10:10:10, 30bpp, (msb)2X 10B 10G 10R(lsb), big-endian, X=unused/undefined
"AV_PIX_FMT_P210BE", ///< interleaved chroma YUV 4:2:2, 20bpp, data in the high bits, big-endian
"AV_PIX_FMT_P210LE", ///< interleaved chroma YUV 4:2:2, 20bpp, data in the high bits, little-endian
"AV_PIX_FMT_P410BE", ///< interleaved chroma YUV 4:4:4, 30bpp, data in the high bits, big-endian
"AV_PIX_FMT_P410LE", ///< interleaved chroma YUV 4:4:4, 30bpp, data in the high bits, little-endian
"AV_PIX_FMT_P216BE", ///< interleaved chroma YUV 4:2:2, 32bpp, big-endian
"AV_PIX_FMT_P216LE", ///< interleaved chroma YUV 4:2:2, 32bpp, liddle-endian
"AV_PIX_FMT_P416BE", ///< interleaved chroma YUV 4:4:4, 48bpp, big-endian
"AV_PIX_FMT_P416LE", ///< interleaved chroma YUV 4:4:4, 48bpp, little-endian
"AV_PIX_FMT_NB" ///< number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions
};
short format_convert(int format)
{
short sdl_format = false;
@ -489,10 +795,11 @@ void MediaPlayer::render_video_thread()
std::this_thread::sleep_until(wake_tp);
// 取出来类型为std::shared_ptr<MediaAVFrame>如果不需要格式转换则可以直接输出video_frame
std::shared_ptr<MediaAVFrame> yuv_frame = std::make_shared<MediaAVFrame>();
std::shared_ptr<MediaAVFrame> v_frame = std::make_shared<MediaAVFrame>();
// 将yuv_frame->m_frame->data数组指向buffer,并按照codecContext和AVPixelFormat决定data指针数组各个成员的指向
av_image_fill_arrays(yuv_frame->m_frame->data, // dst data[]
yuv_frame->m_frame->linesize, // dst linesize[]
av_image_fill_arrays(v_frame->m_frame->data, // dst data[]
v_frame->m_frame->linesize, // dst linesize[]
buffer, // src buffer
m_video_avdecoder->m_avcodec_context->pix_fmt, // pixel format
m_video_avdecoder->m_avcodec_context->width, // width
@ -500,20 +807,26 @@ void MediaPlayer::render_video_thread()
1 // align
);
yuv_frame->m_frame->width = m_video_avdecoder->m_avcodec_context->width;
yuv_frame->m_frame->height = m_video_avdecoder->m_avcodec_context->height;
v_frame->m_frame->width = m_video_avdecoder->m_avcodec_context->width;
v_frame->m_frame->height = m_video_avdecoder->m_avcodec_context->height;
// sws_scale将各种video_format转换为AV_PIX_FMT_YUV420P
sws_scale(m_sws_ctx, // sws context
video_frame->m_frame->data, // src slice
video_frame->m_frame->linesize, // src stride
0, // src slice y
m_video_avdecoder->m_avcodec_context->height, // src slice height
yuv_frame->m_frame->data, // dst planes
yuv_frame->m_frame->linesize); // dst strides
v_frame->m_frame->data, // dst planes
v_frame->m_frame->linesize); // dst strides
if(!m_sws_spec){
m_sws_spec=new v_sws_spec;
m_sws_spec->info_state=true;
out_media_info();
}
QMetaObject::invokeMethod(
QApplication::instance(), [=]()
{ m_render_receive_obj->update_image(yuv_frame); },
{ m_render_receive_obj->update_image(v_frame,m_sws_spec); },
Qt::QueuedConnection);
}
av_free(buffer);
@ -590,6 +903,26 @@ void MediaPlayer::render_audio_thread()
}
printf("[finished]:void MediaPlayer::render_audio_thread()\n");
}
void MediaPlayer::out_media_info(){
int audioindex=-1;
int videoindex = -1;
for (int i = 0; i < m_avformat_ctx->nb_streams; i++) {
if (m_avformat_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoindex = i;
}
else if (m_avformat_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
audioindex = i;
}
}
m_sws_spec->filename="媒体路径:"+std::string(file_name);
m_sws_spec->channels="音频声道数:"+(audioindex==-1?"无法获取音频信息":std::to_string(m_avformat_ctx->streams[audioindex]->codecpar->channels));
m_sws_spec->sample_rate="音频采样率:"+(audioindex==-1?"无法获取音频信息":std::to_string(m_avformat_ctx->streams[audioindex]->codecpar->sample_rate));
m_sws_spec->bit_rate="码率:"+std::to_string(m_avformat_ctx->bit_rate);
m_sws_spec->duration="总时长:"+std::to_string(round(m_avformat_ctx->duration*av_q2d(AV_TIME_BASE_Q)))+"";
m_sws_spec->pix_fmt="视频像素格式:"+(videoindex==-1?"无法获取视频信息":std::string(CharAVPixelFormat[m_avformat_ctx->streams[videoindex]->codecpar->frame_size+1]));
m_sws_spec->pkg_fmt_name="封装格式名称:"+std::string(m_avformat_ctx->iformat->name);
}
void MediaPlayer::get_video_size(int &width, int &height)
{
if (m_video_avdecoder->m_avcodec_context)
@ -612,21 +945,5 @@ char *MediaPlayer::get_file_name()
}
void MediaPlayer::register_render_windows_callback(SDL2RenderWidget *receiver)
{
m_render_receive_obj = receiver;
m_video_default_image_thread = std::thread(render_default_image_thread,this);
m_render_receive_obj = receiver;
}
void MediaPlayer::render_default_image_thread()
{
while (true)
{
if (m_status != playing)
{
QMetaObject::invokeMethod(
QApplication::instance(), [=]()
{ m_render_receive_obj->default_image(); },
Qt::QueuedConnection);
}
}
}

13
mediaplayer.h

@ -25,11 +25,13 @@ extern "C"
#include <future>
#include <QApplication>
#include <QThread>
#define PACKETSIZE 1024 * 5 * 10
#define AV_SYNC_THRESHOLD 0.01
#define AV_NOSYNC_THRESHOLD 10.0
class MediaPlayer : public QObject
class MediaPlayer:public QObject
{
Q_OBJECT
public:
@ -49,6 +51,7 @@ private:
void close_file();
void pause_or_resume();
void free_avformat_ctx();
void out_media_info();
void demux_thread();
void push_pkt_queue(std::shared_ptr<MediaAVPacket> pkt);
void video_decode(std::shared_ptr<MediaAVPacket> video_pkt);
@ -58,14 +61,14 @@ private:
void render_audio_thread();
void render_video_thread();
void render_default_image_thread();
private:
char *file_name;
char* file_name;
AVFormatContext *m_avformat_ctx;
MediaAVDecoder *m_video_avdecoder;
MediaAVDecoder *m_audio_avdecoder;
SwsContext *m_sws_ctx;
v_sws_spec *m_sws_spec;
SwsContext *m_sws_ctx=nullptr;
v_sws_spec *m_sws_spec=nullptr;
SDL2RenderWidget *m_render_receive_obj = nullptr;
SDL_AudioDeviceID m_current_audio_deviceId;

14
zffmpeg.cpp

@ -71,7 +71,7 @@ ZFFmpeg::ZFFmpeg(QWidget *parent)
layout_main->setStretch(2, 1);
layout_main->setSpacing(0);
setLayout(layout_main);
setMinimumSize(800, 600);
setMinimumSize(600, 400);
connect(btn_play, SIGNAL(clicked()), this, SLOT(play()));
connect(btn_play, SIGNAL(clicked()), this, SLOT(pause()));
@ -82,18 +82,6 @@ ZFFmpeg::ZFFmpeg(QWidget *parent)
ZFFmpeg::~ZFFmpeg()
{
}
void ZFFmpeg::mousePressEvent(QMouseEvent *event)
{
if (event->button() == Qt::RightButton && m_player->get_state() == playing)
{
if (!m_avinfo)
{
m_avinfo = new MediaAVInfo();
}
m_avinfo->update_media_info(m_player->get_fmt_ctx(), m_player->get_file_name());
m_avinfo->exec();
}
}
void ZFFmpeg::resizeEvent(QResizeEvent *event)
{
widget_sdl2->set_Size(size());

3
zffmpeg.h

@ -15,7 +15,6 @@
#include "MediaSlider.h"
#include "MediaPlayer.h"
#include "MediaAVInfo.h"
#include "SDL2RenderWidget.h"
class ZFFmpeg : public QWidget
@ -65,11 +64,9 @@ private:
QSpacerItem * sparcer_item = new QSpacerItem(0,0,QSizePolicy::Expanding,QSizePolicy::Fixed);
MediaPlayer *m_player=nullptr;
MediaAVInfo *m_avinfo=nullptr;
QString ms_Path;
protected:
virtual void resizeEvent(QResizeEvent *event) override;
virtual void mousePressEvent(QMouseEvent *event) override;
public slots:
void set_open_file();
void play();

Loading…
Cancel
Save