Browse Source

增加默认渲染内容,图标,右键视频内容

master
JackLee_CN 1 year ago
parent
commit
e2ea1b91d8
  1. 13
      CMakeLists.txt
  2. 77
      MediaAVFilter.cpp
  3. 20
      MediaAVFilter.h
  4. 371
      MediaAVInfo.cpp
  5. 43
      MediaAVInfo.h
  6. 12
      MediaFFex.h
  7. 8
      MediaPlayerPreview.cpp
  8. 18
      MediaPlayerPreview.h
  9. 51
      MediaSlider.cpp
  10. 30
      MediaSlider.h
  11. 134
      SDL2RenderWidget.cpp
  12. 25
      SDL2RenderWidget.h
  13. 13
      Stb_Impl.cpp
  14. 75
      Stb_Media.cpp
  15. 23
      Stb_Media.h
  16. 58
      mediaplayer.cpp
  17. 106
      mediaplayer.h
  18. 11
      res.qrc
  19. BIN
      res/btn/addFile.png
  20. BIN
      res/btn/addFolder.png
  21. BIN
      res/btn/clear.png
  22. BIN
      res/btn/fill.png
  23. BIN
      res/btn/hideList.png
  24. BIN
      res/btn/lastMedia.png
  25. BIN
      res/btn/loud.png
  26. BIN
      res/btn/mutex.png
  27. BIN
      res/btn/nextMedia.png
  28. BIN
      res/btn/pause.png
  29. BIN
      res/btn/play.png
  30. BIN
      res/btn/reverse3s.png
  31. BIN
      res/btn/showList.png
  32. BIN
      res/btn/speed3s.png
  33. BIN
      res/btn/stop.png
  34. BIN
      res/btn/zoom.png
  35. 155
      zffmpeg.cpp
  36. 58
      zffmpeg.h

13
CMakeLists.txt

@ -80,7 +80,12 @@ include_directories(${PROJECT_SOURCE_DIR}/3rdparty/stb)
set(PROJECT_SOURCES
main.cpp
ZFFmpeg.cpp
ZFFmpeg.h
ZFFmpeg.h
Stb_Impl.cpp
Stb_Media.cpp
Stb_Media.h
MediaSlider.cpp
MediaSlider.h
MediaAVDecoder.h
MediaAVDecoder.cpp
MediaAVStream.cpp
@ -89,6 +94,12 @@ set(PROJECT_SOURCES
MediaAVFrame.h
MediaAVPacket.cpp
MediaAVPacket.h
MediaAVFilter.cpp
MediaAVFilter.h
MediaAVInfo.cpp
MediaAVInfo.h
MediaPlayerPreview.cpp
MediaPlayerPreview.h
SDL2RenderWidget.cpp
SDL2RenderWidget.h
MediaPlayer.cpp

77
MediaAVFilter.cpp

@ -0,0 +1,77 @@
#include "MediaAVFilter.h"
MediaAVFilter::MediaAVFilter()
{
}
MediaAVFilter::~MediaAVFilter()
{
if(m_src_filter_ctx != nullptr){
avfilter_free(m_src_filter_ctx);
m_src_filter_ctx = nullptr;
}
if(m_sink_filter_ctx != nullptr){
avfilter_free(m_sink_filter_ctx);
m_sink_filter_ctx = nullptr;
}
if(m_fraph != nullptr){
avfilter_graph_free(&m_fraph);
m_fraph = nullptr;
}
}
int MediaAVFilter::initAVFilter(AVCodecContext **m_avcodec_context,char *value)
{
m_fraph = avfilter_graph_alloc();
// 源过滤器和格式转换过滤器参数
std::string s1 = "sample_rate=" + std::to_string((*m_avcodec_context)->sample_rate) + ":sample_fmt=" + av_get_sample_fmt_name((*m_avcodec_context)->sample_fmt) + ":channel_layout=" + std::to_string((*m_avcodec_context)->channel_layout);
std::string s2 = "sample_rates=" + std::to_string((*m_avcodec_context)->sample_rate) + ":sample_fmts=" + av_get_sample_fmt_name((*m_avcodec_context)->sample_fmt) + ":channel_layouts=" + std::to_string((*m_avcodec_context)->channel_layout);
//创建源过滤器
const AVFilter *src_filter=avfilter_get_by_name("buffer");
m_src_filter_ctx=avfilter_graph_alloc_filter(m_fraph,src_filter,"src");
if (avfilter_init_str(m_src_filter_ctx, s1.c_str()) < 0) {
printf("init src filter fail!\n");
return -1;
}
//创建变速过滤器
const AVFilter *tempo_filter = avfilter_get_by_name("tempo");
AVFilterContext *tempo_filter_ctx = avfilter_graph_alloc_filter(m_fraph, tempo_filter, "tempo");
AVDictionary *args = NULL;
av_dict_set(&args, "tempo", value, 0);//根据value的值调节速度
if (avfilter_init_dict(tempo_filter_ctx, &args) < 0) {
printf("init speed filter fail!\n");
return -1;
}
//创建格式转化过滤器
const AVFilter *fmt_filter = avfilter_get_by_name("format");
AVFilterContext *fmt_filter_ctx = avfilter_graph_alloc_filter(m_fraph, fmt_filter, "format");
if (avfilter_init_str(fmt_filter_ctx, s2.c_str()) < 0) {
printf("init fmt filter fail!\n");
return -1;
}
//创建接收过滤器
const AVFilter *sink_filter=avfilter_get_by_name("buffersink");
m_sink_filter_ctx=avfilter_graph_alloc_filter(m_fraph,sink_filter,"sink");
if (avfilter_init_dict(m_sink_filter_ctx, NULL) < 0) {
printf("link filter fail!\n");
return -1;
}
//链接过滤器
if(avfilter_link(m_src_filter_ctx,0,tempo_filter_ctx,0) != 0){
printf("link filter fail!\n");
return -1;
}
if(avfilter_link(tempo_filter_ctx,0,fmt_filter_ctx,0) != 0){
printf("link filter fail!\n");
return -1;
}
if(avfilter_link(fmt_filter_ctx,0,m_sink_filter_ctx,0) != 0){
printf("link filter fail!\n");
return -1;
}
//配置图
if (avfilter_graph_config(m_fraph, NULL) < 0) {
printf("config graph fail!\n");
return -1;
}
return 0;
}

20
MediaAVFilter.h

@ -0,0 +1,20 @@
#ifndef MEDIAAVFILTER_H
#define MEDIAAVFILTER_H
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavfilter/avfilter.h>
}
#include<iostream>
class MediaAVFilter{
public:
MediaAVFilter();
~MediaAVFilter();
int initAVFilter(AVCodecContext** m_avcodec_context,char *value);
public:
AVFilterGraph* m_fraph;
AVFilterContext* m_src_filter_ctx;
AVFilterContext* m_sink_filter_ctx;
};
#endif //MEDIAAVFILTER_H

371
MediaAVInfo.cpp

@ -0,0 +1,371 @@
#include "MediaAVInfo.h"
char* charAVPixelFormat[] {
"AV_PIX_FMT_NONE",
"AV_PIX_FMT_YUV420P", ///< planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
"AV_PIX_FMT_YUYV422", ///< packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
"AV_PIX_FMT_RGB24", ///< packed RGB 8:8:8, 24bpp, RGBRGB...
"AV_PIX_FMT_BGR24", ///< packed RGB 8:8:8, 24bpp, BGRBGR...
"AV_PIX_FMT_YUV422P", ///< planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
"AV_PIX_FMT_YUV444P", ///< planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
"AV_PIX_FMT_YUV410P", ///< planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
"AV_PIX_FMT_YUV411P", ///< planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
"AV_PIX_FMT_GRAY8", ///< Y , 8bpp
"AV_PIX_FMT_MONOWHITE", ///< Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb
"AV_PIX_FMT_MONOBLACK", ///< Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb
"AV_PIX_FMT_PAL8", ///< 8 bits with AV_PIX_FMT_RGB32 palette
"AV_PIX_FMT_YUVJ420P", ///< planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting color_range
"AV_PIX_FMT_YUVJ422P", ///< planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting color_range
"AV_PIX_FMT_YUVJ444P", ///< planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting color_range
"AV_PIX_FMT_UYVY422", ///< packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
"AV_PIX_FMT_UYYVYY411", ///< packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3
"AV_PIX_FMT_BGR8", ///< packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
"AV_PIX_FMT_BGR4", ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
"AV_PIX_FMT_BGR4_BYTE", ///< packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb)
"AV_PIX_FMT_RGB8", ///< packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb)
"AV_PIX_FMT_RGB4", ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
"AV_PIX_FMT_RGB4_BYTE", ///< packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb)
"AV_PIX_FMT_NV12", ///< planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V)
"AV_PIX_FMT_NV21", ///< as above, but U and V bytes are swapped
"AV_PIX_FMT_ARGB", ///< packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
"AV_PIX_FMT_RGBA", ///< packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
"AV_PIX_FMT_ABGR", ///< packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
"AV_PIX_FMT_BGRA", ///< packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
"AV_PIX_FMT_GRAY16BE", ///< Y , 16bpp, big-endian
"AV_PIX_FMT_GRAY16LE", ///< Y , 16bpp, little-endian
"AV_PIX_FMT_YUV440P", ///< planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
"AV_PIX_FMT_YUVJ440P", ///< planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
"AV_PIX_FMT_YUVA420P", ///< planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
"AV_PIX_FMT_RGB48BE", ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian
"AV_PIX_FMT_RGB48LE", ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian
"AV_PIX_FMT_RGB565BE", ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
"AV_PIX_FMT_RGB565LE", ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
"AV_PIX_FMT_RGB555BE", ///< packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), big-endian , X=unused/undefined
"AV_PIX_FMT_RGB555LE", ///< packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined
"AV_PIX_FMT_BGR565BE", ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian
"AV_PIX_FMT_BGR565LE", ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian
"AV_PIX_FMT_BGR555BE", ///< packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), big-endian , X=unused/undefined
"AV_PIX_FMT_BGR555LE", ///< packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), little-endian, X=unused/undefined
/**
* Hardware acceleration through VA-API, data[3] contains a
* VASurfaceID.
*/
"AV_PIX_FMT_VAAPI",
"AV_PIX_FMT_YUV420P16LE", ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
"AV_PIX_FMT_YUV420P16BE", ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
"AV_PIX_FMT_YUV422P16LE", ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_YUV422P16BE", ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_YUV444P16LE", ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
"AV_PIX_FMT_YUV444P16BE", ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
"AV_PIX_FMT_DXVA2_VLD", ///< HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer
"AV_PIX_FMT_RGB444LE", ///< packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), little-endian, X=unused/undefined
"AV_PIX_FMT_RGB444BE", ///< packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), big-endian", X=unused/undefined
"AV_PIX_FMT_BGR444LE", ///< packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), little-endian, X=unused/undefined
"AV_PIX_FMT_BGR444BE", ///< packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), big-endian", X=unused/undefined
"AV_PIX_FMT_YA8", ///< 8 bits gray, 8 bits alpha
"AV_PIX_FMT_Y400A ", ///< alias for AV_PIX_FMT_YA8
"AV_PIX_FMT_GRAY8A", ///< alias for AV_PIX_FMT_YA8
"AV_PIX_FMT_BGR48BE", ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian
"AV_PIX_FMT_BGR48LE", ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian
/**
* The following 12 formats have the disadvantage of needing 1 format for each bit depth.
* Notice that each 9/10 bits sample is stored in 16 bits with extra padding.
* If you want to support multiple bit depths, then using AV_PIX_FMT_YUV420P16* with the bpp stored separately is better.
*/
"AV_PIX_FMT_YUV420P9BE", ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
"AV_PIX_FMT_YUV420P9LE", ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
"AV_PIX_FMT_YUV420P10BE",///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
"AV_PIX_FMT_YUV420P10LE",///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
"AV_PIX_FMT_YUV422P10BE",///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_YUV422P10LE",///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_YUV444P9BE", ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
"AV_PIX_FMT_YUV444P9LE", ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
"AV_PIX_FMT_YUV444P10BE",///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
"AV_PIX_FMT_YUV444P10LE",///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
"AV_PIX_FMT_YUV422P9BE", ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_YUV422P9LE", ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_GBRP", ///< planar GBR 4:4:4 24bpp
"AV_PIX_FMT_GBR24P", // alias for #AV_PIX_FMT_GBRP
"AV_PIX_FMT_GBRP9BE", ///< planar GBR 4:4:4 27bpp, big-endian
"AV_PIX_FMT_GBRP9LE", ///< planar GBR 4:4:4 27bpp, little-endian
"AV_PIX_FMT_GBRP10BE", ///< planar GBR 4:4:4 30bpp, big-endian
"AV_PIX_FMT_GBRP10LE", ///< planar GBR 4:4:4 30bpp, little-endian
"AV_PIX_FMT_GBRP16BE", ///< planar GBR 4:4:4 48bpp, big-endian
"AV_PIX_FMT_GBRP16LE", ///< planar GBR 4:4:4 48bpp, little-endian
"AV_PIX_FMT_YUVA422P", ///< planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
"AV_PIX_FMT_YUVA444P", ///< planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
"AV_PIX_FMT_YUVA420P9BE", ///< planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), big-endian
"AV_PIX_FMT_YUVA420P9LE", ///< planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), little-endian
"AV_PIX_FMT_YUVA422P9BE", ///< planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), big-endian
"AV_PIX_FMT_YUVA422P9LE", ///< planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), little-endian
"AV_PIX_FMT_YUVA444P9BE", ///< planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), big-endian
"AV_PIX_FMT_YUVA444P9LE", ///< planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), little-endian
"AV_PIX_FMT_YUVA420P10BE", ///< planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA420P10LE", ///< planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian)
"AV_PIX_FMT_YUVA422P10BE", ///< planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA422P10LE", ///< planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian)
"AV_PIX_FMT_YUVA444P10BE", ///< planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA444P10LE", ///< planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
"AV_PIX_FMT_YUVA420P16BE", ///< planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA420P16LE", ///< planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian)
"AV_PIX_FMT_YUVA422P16BE", ///< planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA422P16LE", ///< planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian)
"AV_PIX_FMT_YUVA444P16BE", ///< planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian)
"AV_PIX_FMT_YUVA444P16LE", ///< planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
"AV_PIX_FMT_VDPAU", ///< HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface
"AV_PIX_FMT_XYZ12LE", ///< packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as little-endian, the 4 lower bits are set to 0
"AV_PIX_FMT_XYZ12BE", ///< packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as big-endian, the 4 lower bits are set to 0
"AV_PIX_FMT_NV16", ///< interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
"AV_PIX_FMT_NV20LE", ///< interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_NV20BE", ///< interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_RGBA64BE", ///< packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian
"AV_PIX_FMT_RGBA64LE", ///< packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian
"AV_PIX_FMT_BGRA64BE", ///< packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian
"AV_PIX_FMT_BGRA64LE", ///< packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian
"AV_PIX_FMT_YVYU422", ///< packed YUV 4:2:2, 16bpp, Y0 Cr Y1 Cb
"AV_PIX_FMT_YA16BE", ///< 16 bits gray, 16 bits alpha (big-endian)
"AV_PIX_FMT_YA16LE", ///< 16 bits gray, 16 bits alpha (little-endian)
"AV_PIX_FMT_GBRAP", ///< planar GBRA 4:4:4:4 32bpp
"AV_PIX_FMT_GBRAP16BE", ///< planar GBRA 4:4:4:4 64bpp, big-endian
"AV_PIX_FMT_GBRAP16LE", ///< planar GBRA 4:4:4:4 64bpp, little-endian
/**
* HW acceleration through QSV, data[3] contains a pointer to the
* mfxFrameSurface1 structure.
*/
"AV_PIX_FMT_QSV",
/**
* HW acceleration though MMAL, data[3] contains a pointer to the
* MMAL_BUFFER_HEADER_T structure.
*/
"AV_PIX_FMT_MMAL",
"AV_PIX_FMT_D3D11VA_VLD", ///< HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView pointer
/**
* HW acceleration through CUDA. data[i] contain CUdeviceptr pointers
* exactly as for system memory frames.
*/
"AV_PIX_FMT_CUDA",
"AV_PIX_FMT_0RGB", ///< packed RGB 8:8:8, 32bpp, XRGBXRGB... X=unused/undefined
"AV_PIX_FMT_RGB0", ///< packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
"AV_PIX_FMT_0BGR", ///< packed BGR 8:8:8, 32bpp, XBGRXBGR... X=unused/undefined
"AV_PIX_FMT_BGR0", ///< packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
"AV_PIX_FMT_YUV420P12BE", ///< planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
"AV_PIX_FMT_YUV420P12LE", ///< planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
"AV_PIX_FMT_YUV420P14BE", ///< planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
"AV_PIX_FMT_YUV420P14LE", ///< planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
"AV_PIX_FMT_YUV422P12BE", ///< planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_YUV422P12LE", ///< planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_YUV422P14BE", ///< planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
"AV_PIX_FMT_YUV422P14LE", ///< planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
"AV_PIX_FMT_YUV444P12BE", ///< planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
"AV_PIX_FMT_YUV444P12LE", ///< planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
"AV_PIX_FMT_YUV444P14BE", ///< planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
"AV_PIX_FMT_YUV444P14LE", ///< planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
"AV_PIX_FMT_GBRP12BE", ///< planar GBR 4:4:4 36bpp, big-endian
"AV_PIX_FMT_GBRP12LE", ///< planar GBR 4:4:4 36bpp, little-endian
"AV_PIX_FMT_GBRP14BE", ///< planar GBR 4:4:4 42bpp, big-endian
"AV_PIX_FMT_GBRP14LE", ///< planar GBR 4:4:4 42bpp, little-endian
"AV_PIX_FMT_YUVJ411P", ///< planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV411P and setting color_range
"AV_PIX_FMT_BAYER_BGGR8", ///< bayer, BGBG..(odd line), GRGR..(even line), 8-bit samples
"AV_PIX_FMT_BAYER_RGGB8", ///< bayer, RGRG..(odd line), GBGB..(even line), 8-bit samples
"AV_PIX_FMT_BAYER_GBRG8", ///< bayer, GBGB..(odd line), RGRG..(even line), 8-bit samples
"AV_PIX_FMT_BAYER_GRBG8", ///< bayer, GRGR..(odd line), BGBG..(even line), 8-bit samples
"AV_PIX_FMT_BAYER_BGGR16LE", ///< bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, little-endian
"AV_PIX_FMT_BAYER_BGGR16BE", ///< bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, big-endian
"AV_PIX_FMT_BAYER_RGGB16LE", ///< bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, little-endian
"AV_PIX_FMT_BAYER_RGGB16BE", ///< bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, big-endian
"AV_PIX_FMT_BAYER_GBRG16LE", ///< bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, little-endian
"AV_PIX_FMT_BAYER_GBRG16BE", ///< bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, big-endian
"AV_PIX_FMT_BAYER_GRBG16LE", ///< bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, little-endian
"AV_PIX_FMT_BAYER_GRBG16BE", ///< bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, big-endian
"AV_PIX_FMT_XVMC",///< XVideo Motion Acceleration via common packet passing
"AV_PIX_FMT_YUV440P10LE", ///< planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian
"AV_PIX_FMT_YUV440P10BE", ///< planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian
"AV_PIX_FMT_YUV440P12LE", ///< planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian
"AV_PIX_FMT_YUV440P12BE", ///< planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian
"AV_PIX_FMT_AYUV64LE", ///< packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), little-endian
"AV_PIX_FMT_AYUV64BE", ///< packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), big-endian
"AV_PIX_FMT_VIDEOTOOLBOX", ///< hardware decoding through Videotoolbox
"AV_PIX_FMT_P010LE", ///< like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, little-endian
"AV_PIX_FMT_P010BE", ///< like NV12, with 10bpp per component, data in the high bits, zeros in the low bits, big-endian
"AV_PIX_FMT_GBRAP12BE", ///< planar GBR 4:4:4:4 48bpp, big-endian
"AV_PIX_FMT_GBRAP12LE", ///< planar GBR 4:4:4:4 48bpp, little-endian
"AV_PIX_FMT_GBRAP10BE", ///< planar GBR 4:4:4:4 40bpp, big-endian
"AV_PIX_FMT_GBRAP10LE", ///< planar GBR 4:4:4:4 40bpp, little-endian
"AV_PIX_FMT_MEDIACODEC", ///< hardware decoding through MediaCodec
"AV_PIX_FMT_GRAY12BE", ///< Y , 12bpp, big-endian
"AV_PIX_FMT_GRAY12LE", ///< Y , 12bpp, little-endian
"AV_PIX_FMT_GRAY10BE", ///< Y , 10bpp, big-endian
"AV_PIX_FMT_GRAY10LE", ///< Y , 10bpp, little-endian
"AV_PIX_FMT_P016LE", ///< like NV12, with 16bpp per component, little-endian
"AV_PIX_FMT_P016BE", ///< like NV12, with 16bpp per component, big-endian
/**
* Hardware surfaces for Direct3D11.
*
* This is preferred over the legacy AV_PIX_FMT_D3D11VA_VLD. The new D3D11
* hwaccel API and filtering support AV_PIX_FMT_D3D11 only.
*
* data[0] contains a ID3D11Texture2D pointer, and data[1] contains the
* texture array index of the frame as intptr_t if the ID3D11Texture2D is
* an array texture (or always 0 if it's a normal texture).
*/
"AV_PIX_FMT_D3D11",
"AV_PIX_FMT_GRAY9BE", ///< Y , 9bpp, big-endian
"AV_PIX_FMT_GRAY9LE", ///< Y , 9bpp, little-endian
"AV_PIX_FMT_GBRPF32BE", ///< IEEE-754 single precision planar GBR 4:4:4", 96bpp, big-endian
"AV_PIX_FMT_GBRPF32LE", ///< IEEE-754 single precision planar GBR 4:4:4", 96bpp, little-endian
"AV_PIX_FMT_GBRAPF32BE", ///< IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, big-endian
"AV_PIX_FMT_GBRAPF32LE", ///< IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, little-endian
/**
* DRM-managed buffers exposed through PRIME buffer sharing.
*
* data[0] points to an AVDRMFrameDescriptor.
*/
"AV_PIX_FMT_DRM_PRIME",
/**
* Hardware surfaces for OpenCL.
*
* data[i] contain 2D image objects (typed in C as cl_mem, used
* in OpenCL as image2d_t) for each plane of the surface.
*/
"AV_PIX_FMT_OPENCL",
"AV_PIX_FMT_GRAY14BE", ///< Y , 14bpp, big-endian
"AV_PIX_FMT_GRAY14LE", ///< Y , 14bpp, little-endian
"AV_PIX_FMT_GRAYF32BE", ///< IEEE-754 single precision Y, 32bpp, big-endian
"AV_PIX_FMT_GRAYF32LE", ///< IEEE-754 single precision Y, 32bpp, little-endian
"AV_PIX_FMT_YUVA422P12BE", ///< planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, big-endian
"AV_PIX_FMT_YUVA422P12LE", ///< planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, little-endian
"AV_PIX_FMT_YUVA444P12BE", ///< planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, big-endian
"AV_PIX_FMT_YUVA444P12LE", ///< planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, little-endian
"AV_PIX_FMT_NV24", ///< planar YUV 4:4:4, 24bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V)
"AV_PIX_FMT_NV42", ///< as above, but U and V bytes are swapped
/**
* Vulkan hardware images.
*
* data[0] points to an AVVkFrame
*/
"AV_PIX_FMT_VULKAN",
"AV_PIX_FMT_Y210BE", ///< packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, big-endian
"AV_PIX_FMT_Y210LE", ///< packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, little-endian
"AV_PIX_FMT_X2RGB10LE", ///< packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), little-endian, X=unused/undefined
"AV_PIX_FMT_X2RGB10BE", ///< packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), big-endian, X=unused/undefined
"AV_PIX_FMT_X2BGR10LE", ///< packed BGR 10:10:10, 30bpp, (msb)2X 10B 10G 10R(lsb), little-endian, X=unused/undefined
"AV_PIX_FMT_X2BGR10BE", ///< packed BGR 10:10:10, 30bpp, (msb)2X 10B 10G 10R(lsb), big-endian, X=unused/undefined
"AV_PIX_FMT_P210BE", ///< interleaved chroma YUV 4:2:2, 20bpp, data in the high bits, big-endian
"AV_PIX_FMT_P210LE", ///< interleaved chroma YUV 4:2:2, 20bpp, data in the high bits, little-endian
"AV_PIX_FMT_P410BE", ///< interleaved chroma YUV 4:4:4, 30bpp, data in the high bits, big-endian
"AV_PIX_FMT_P410LE", ///< interleaved chroma YUV 4:4:4, 30bpp, data in the high bits, little-endian
"AV_PIX_FMT_P216BE", ///< interleaved chroma YUV 4:2:2, 32bpp, big-endian
"AV_PIX_FMT_P216LE", ///< interleaved chroma YUV 4:2:2, 32bpp, liddle-endian
"AV_PIX_FMT_P416BE", ///< interleaved chroma YUV 4:4:4, 48bpp, big-endian
"AV_PIX_FMT_P416LE", ///< interleaved chroma YUV 4:4:4, 48bpp, little-endian
"AV_PIX_FMT_NB" ///< number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions
};
MediaAVInfo::MediaAVInfo():
m_Layout(new QVBoxLayout),
m_fmtCtx(nullptr),
name_file_str(new QLabel),
channels_str(new QLabel),
sample_rate_str(new QLabel),
bit_rate_str(new QLabel),
duration_str(new QLabel),
pix_fmt_str(new QLabel),
pkg_fmt_name_str(new QLabel)
/* name_file_data(new QLabel),
channels_data(new QLabel),
sample_rate_data(new QLabel),
bit_rate_data(new QLabel),
duration_data(new QLabel),
pix_fmt_data(new QLabel),
pkg_fmt_name_data(new QLabel) */
{
name_file_str->setText(tr("名称:"));
channels_str->setText(tr("音频声道数:"));
sample_rate_str->setText(tr("音频采样率:"));
bit_rate_str->setText(tr("码率:"));
duration_str->setText(tr("总时长:"));
pix_fmt_str->setText(tr("视频像素格式:"));
pkg_fmt_name_str->setText(tr("封装格式:"));
}
MediaAVInfo::~MediaAVInfo()
{
}
void MediaAVInfo::update_media_info(AVFormatContext *_updatefmtCtx,char *_filename){
int audioindex=-1;
int videoindex = -1;
for (int i = 0; i < _updatefmtCtx->nb_streams; i++) {
if (_updatefmtCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoindex = i;
}
else if (_updatefmtCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
audioindex = i;
}
}
m_fmtCtx=_updatefmtCtx;
name_file_str->setText("名称:"+QString::fromUtf8(_filename));
channels_str->setText("音频声道数:"+(audioindex==-1?"无法获取音频信息":QString::number(_updatefmtCtx->streams[audioindex]->codecpar->channels)));
sample_rate_str->setText("音频采样率:"+(audioindex==-1?"无法获取音频信息":QString::number(_updatefmtCtx->streams[audioindex]->codecpar->sample_rate)));
bit_rate_str->setText("码率:"+QString::number(_updatefmtCtx->bit_rate));
duration_str->setText("总时长:"+QString::number(round(_updatefmtCtx->duration*av_q2d(AV_TIME_BASE_Q)))+"");
pix_fmt_str->setText("视频像素格式:"+(videoindex==-1?"无法获取视频信息":QString(charAVPixelFormat[_updatefmtCtx->streams[videoindex]->codecpar->frame_size+1])));
pkg_fmt_name_str->setText("封装格式名称:"+QString(_updatefmtCtx->iformat->name));
m_Layout->addWidget(name_file_str);
m_Layout->addWidget(channels_str);
m_Layout->addWidget(sample_rate_str);
m_Layout->addWidget(bit_rate_str);
m_Layout->addWidget(duration_str);
m_Layout->addWidget(pix_fmt_str);
m_Layout->addWidget(pkg_fmt_name_str);
setLayout(m_Layout);
}

43
MediaAVInfo.h

@ -0,0 +1,43 @@
#ifndef MEDIAAVINFO_H
#define MEDIAAVINFO_H
extern "C"{
#include <libavformat/avformat.h>
}
#include <QDialog>
#include <QVBoxLayout>
#include <QLabel>
#include <QString>
class MediaAVInfo : public QDialog
{
Q_OBJECT
public:
MediaAVInfo();
~MediaAVInfo();
void update_media_info(AVFormatContext *update_fmtCtx,char *filename);
private:
QVBoxLayout* m_Layout;
AVFormatContext *m_fmtCtx;
QLabel* name_file_str;
QLabel* channels_str;
QLabel* sample_rate_str;
QLabel* bit_rate_str;
QLabel* duration_str;
QLabel* pix_fmt_str;
QLabel* pkg_fmt_name_str;
/* QLabel* name_file_data;
QLabel* channels_data;
QLabel* sample_rate_data;
QLabel* bit_rate_data;
QLabel* duration_data;
QLabel* pix_fmt_data;
QLabel* pkg_fmt_name_data; */
};
#endif //MEDIAAVINFO_H

12
MediaFFex.h

@ -41,5 +41,17 @@ typedef enum
min = 0,
max = 100
} v_volumn;
/*解码方式*/
typedef enum{
sw,
hw
}decoder_type;
typedef struct{
}u_opt;
typedef struct{
decoder_type sw;
}s_opt;
#endif

8
MediaPlayerPreview.cpp

@ -0,0 +1,8 @@
#include "MediaPlayerPreview.h"
MediaPlayerPreview::MediaPlayerPreview(){
}
MediaPlayerPreview::~MediaPlayerPreview(){
}

18
MediaPlayerPreview.h

@ -0,0 +1,18 @@
#ifndef MEDIAPLAYERPREVIEW_H
#define MEDIAPLAYERPREVIEW_H
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
};
class MediaPlayerPreview {
public:
MediaPlayerPreview();
~MediaPlayerPreview();
};
#endif //MEDIASTREAM_H

51
MediaSlider.cpp

@ -0,0 +1,51 @@
#include "MediaSlider.h"
#include <QMouseEvent>
#include <QStyle>
MediaSlider::MediaSlider(QWidget *parent) : QSlider(parent) {
setMouseTracking(true);//监听鼠标轨迹
start=clock();
x=0;
}
void MediaSlider::mouseReleaseEvent(QMouseEvent *ev) {
int value = QStyle::sliderValueFromPosition(minimum(),
maximum(),
ev->pos().x(),
width());
setValue(value);
QSlider::mouseReleaseEvent(ev);
emit clicked(this);
}
void MediaSlider::mouseMoveEvent(QMouseEvent *event){
if(clock()-start >300)
{
int temp=event->pos().x();
if((x - temp < 3 && x - temp > 0) || (temp - x < 3 && temp - x > 0 ))
{
int value = QStyle::sliderValueFromPosition(minimum(),
maximum(),
event->pos().x(),
width());
// _previewMutex.lock();
// _previewMutex.signal();
emit preview(value, event->pos().x());
// _previewMutex.unlock();
}
start=clock();
x=event->pos().x();
}
QSlider::mouseMoveEvent(event);
}
void MediaSlider::leaveEvent(QEvent *event){
emit mouseleave();
}
void MediaSlider::changeValue(int n){
int valuenow = value();
setValue(valuenow+n);
emit clicked(this);
}

30
MediaSlider.h

@ -0,0 +1,30 @@
#ifndef MEDIASLIDER_H
#define MEDIASLIDER_H
#include <QSlider>
#include <time.h>
class MediaSlider : public QSlider {
Q_OBJECT
public:
explicit MediaSlider(QWidget *parent = nullptr);
void changeValue(int n);
signals:
/** 点击事件 */
void clicked(MediaSlider *slider);
void preview(int seektime, int x);
void mouseleave();
private:
void mouseReleaseEvent(QMouseEvent *event) override;
void mouseMoveEvent(QMouseEvent *event) override;
void leaveEvent(QEvent *event) override;
clock_t start;
int x;
};
#endif // MEDIASLIDER_H

134
SDL2RenderWidget.cpp

@ -1,94 +1,108 @@
#include "SDL2RenderWidget.h"
SDL2RenderWidget::SDL2RenderWidget(QWidget *parent /*= nullptr*/)
:QWidget(parent)
SDL2RenderWidget::SDL2RenderWidget(QWidget *parent)
: m_stb_media(new Stb_Media)
{
//setUpdatesEnabled(false);
char winID[32] = { 0 };
QSize size = this->baseSize();
//B1. 初始化SDL子系统:缺省(事件处理、文件IO、线程)、视频、音频、定时器
if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER) < 0)
{
printf("SDL could not initialize! SDL_Error: %s\n", SDL_GetError());
}
SDL_SetHint(SDL_HINT_RENDER_SCALE_QUALITY, "1");
// B2. 创建SDL窗口,SDL 2.0支持多窗口
m_sdl_window = SDL_CreateWindowFrom((void*)(this->winId()));
// B3. 创建SDL_Renderer
// SDL_Renderer:渲染器
m_sdl_renderer = SDL_CreateRenderer(m_sdl_window, -1, 0);
m_sdl_window = SDL_CreateWindowFrom((void *)(parent->winId()));
m_sdl_renderer = SDL_CreateRenderer(m_sdl_window, -1, SDL_RENDERER_TARGETTEXTURE);
SDL_SetRenderDrawBlendMode(m_sdl_renderer, SDL_BLENDMODE_BLEND);
if (!m_sdl_window)
{
printf("SDL: could not create window - exiting:%s\n", SDL_GetError());
}
}
SDL2RenderWidget::~SDL2RenderWidget()
{
//销毁 window
// 销毁 window
SDL_DestroyWindow(m_sdl_window);
//退出 SDL subsystems
// 退出 SDL subsystems
SDL_Quit();
}
void SDL2RenderWidget::updateImage(std::shared_ptr<MediaAVFrame> yuv_frame)
void SDL2RenderWidget::set_Size(QSize _size)
{
int nTextureWidth = 0, nTextureHeight = 0;
//首先查询当前纹理对象的宽高,如果不符合,那么需要重建纹理对象
SDL_QueryTexture(m_sdl_texture, nullptr, nullptr, &nTextureWidth, &nTextureHeight);
setMinimumSize(nTextureWidth,nTextureHeight);
//B4 SDL_CreateTexture
if (nTextureWidth != yuv_frame->m_frame->width || nTextureHeight != yuv_frame->m_frame->height) {
if (m_sdl_texture){
SDL_DestroyTexture(m_sdl_texture);
}
//这里指定了渲染的数据格式,访问方式和宽高大小
m_sdl_texture = SDL_CreateTexture(m_sdl_renderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,
yuv_frame->m_frame->width, yuv_frame->m_frame->height);
m_width = _size.width();
m_height = _size.height();
SDL_SetWindowSize(m_sdl_window, _size.width(), _size.height());
}
void SDL2RenderWidget::default_image()
{
if (!m_sdl_surface)
{
m_sdl_surface = m_stb_media->default_logo_surface(logoImageStr);
m_sdl_texture = SDL_CreateTextureFromSurface(m_sdl_renderer, m_sdl_surface);
m_stb_media->setFontArgs(m_sdl_renderer);
}
m_sdl_rect.x=0;
m_sdl_rect.y=0;
m_sdl_rect.w=nTextureWidth;
m_sdl_rect.h=nTextureHeight;
//B5.使用新的YUV像素数据更新SDL_Rect
SDL_UpdateYUVTexture(m_sdl_texture, // sdl texture
&m_sdl_rect, // sdl rect
yuv_frame->m_frame->data[0], // y plane
yuv_frame->m_frame->linesize[0], // y pitch
yuv_frame->m_frame->data[1], // u plane
yuv_frame->m_frame->linesize[1], // u pitch
yuv_frame->m_frame->data[2], // v plane
yuv_frame->m_frame->linesize[2] // v pitch
);
int iW, iH, iWidth, iHeight;
SDL_QueryTexture(m_sdl_texture, NULL, NULL, &iW, &iH);
SDL_GetWindowSize(m_sdl_window, &iWidth, &iHeight);
m_sdl_rect.x = iWidth / 2 - iW / 2;
m_sdl_rect.y = iHeight / 2 - iH / 2;
m_sdl_rect.w = iW;
m_sdl_rect.h = iH;
// B6. 使用特定颜色清空当前渲染目标
SDL_RenderClear(m_sdl_renderer);
SDL_RenderCopy(m_sdl_renderer, m_sdl_texture, NULL, &m_sdl_rect);
// B7. 使用部分图像数据(texture)更新当前渲染目标
SDL_RenderCopy(m_sdl_renderer, // sdl renderer
m_sdl_texture, // sdl texture
&m_sdl_rect, // src rect, if NULL copy texture
NULL // dst rect
);
m_sdl_font_rect.x = (iWidth / 2 - iW / 2) - m_stb_media->fc.faceSize;
m_sdl_font_rect.y = iHeight - 50;
// B8. 执行渲染,更新屏幕显示
m_stb_media->fc.drawText(m_sdl_font_rect.x, m_sdl_font_rect.y, logoStr);
SDL_RenderPresent(m_sdl_renderer);
}
void SDL2RenderWidget::update_image(std::shared_ptr<MediaAVFrame> yuv_frame)
{
int nTextureWidth, nTextureHeight;
// 首先查询当前纹理对象的宽高,如果不符合,那么需要重建纹理对象
SDL_QueryTexture(m_sdl_texture, nullptr, nullptr, &nTextureWidth, &nTextureHeight);
// B9. 控制帧率为25FPS,此处不够准确,未考虑解码消耗的时间
//SDL_Delay(40);
if (nTextureWidth != yuv_frame->m_frame->width || nTextureHeight != yuv_frame->m_frame->height)
{
if (!m_sdl_texture)
{
SDL_DestroyTexture(m_sdl_texture);
}
m_sdl_texture = SDL_CreateTexture(m_sdl_renderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,
yuv_frame->m_frame->width, yuv_frame->m_frame->height);
}
SDL_GetWindowSize(m_sdl_window, &nTextureWidth, &nTextureHeight);
m_sdl_rect.x = 0;
m_sdl_rect.y = 0;
m_sdl_rect.w = nTextureWidth;
m_sdl_rect.h = nTextureHeight;
SDL_UpdateYUVTexture(m_sdl_texture,
&m_sdl_rect,
yuv_frame->m_frame->data[0],
yuv_frame->m_frame->linesize[0],
yuv_frame->m_frame->data[1],
yuv_frame->m_frame->linesize[1],
yuv_frame->m_frame->data[2],
yuv_frame->m_frame->linesize[2]);
SDL_RenderClear(m_sdl_renderer);
SDL_RenderCopy(m_sdl_renderer, m_sdl_texture, &m_sdl_rect, NULL);
SDL_RenderPresent(m_sdl_renderer);
}
SDL_AudioDeviceID SDL2RenderWidget::openAudioDevice(SDL_AudioSpec *spec)
{
SDL_AudioSpec have;
SDL_AudioSpec have;
SDL_AudioDeviceID dev = SDL_OpenAudioDevice(NULL, 0, spec, &have, SDL_AUDIO_ALLOW_FORMAT_CHANGE);
if (dev == 0) {
if (dev == 0)
{
SDL_Log("Failed to open audio: %s", SDL_GetError());
}
else {
if (have.format != spec->format) { /* we let this one thing change. */
else
{
if (have.format != spec->format)
{ /* we let this one thing change. */
SDL_Log("We didn't get Float32 audio format.");
}
}

25
SDL2RenderWidget.h

@ -4,21 +4,30 @@
#include <memory>
#include "MediaAVFrame.h"
#include "SDL2/SDL.h"
#include "Stb_Media.h"
class SDL2RenderWidget : public QWidget
{
Q_OBJECT
public:
SDL2RenderWidget(QWidget *parent = nullptr);
SDL2RenderWidget(QWidget *parent);
~SDL2RenderWidget();
void updateImage(std::shared_ptr<MediaAVFrame> frame);
SDL_AudioDeviceID openAudioDevice(SDL_AudioSpec * spec);
void update_image(std::shared_ptr<MediaAVFrame> frame);
void default_image();
void set_Size(QSize _size);
SDL_AudioDeviceID openAudioDevice(SDL_AudioSpec *spec);
private:
SDL_Window* m_sdl_window = nullptr;
SDL_Renderer* m_sdl_renderer = nullptr;
SDL_Texture* m_sdl_texture = nullptr;
SDL_Rect m_sdl_rect;
SDL_Window *m_sdl_window = nullptr;
SDL_Renderer *m_sdl_renderer = nullptr;
SDL_Texture *m_sdl_texture = nullptr;
SDL_Surface *m_sdl_surface = nullptr;
SDL_Rect m_sdl_rect, m_sdl_font_rect;
int m_width, m_height;
Stb_Media *m_stb_media;
// 默认字符串
std::string logoImageStr = "F:/SourceCode/VTS/ZFFmpeg/ZFFmpeg/res/img/zvo.png";
std::string logoStr = "新时代社会主义中国";
};
#endif

13
Stb_Impl.cpp

@ -0,0 +1,13 @@
#ifdef STB_IMAGE_IMPLEMENTATION
#undef STB_IMAGE_IMPLEMENTATION
#endif
#define STB_IMAGE_IMPLEMENTATION
extern "C"{
#include "stb_image.h"
}
#ifdef SDL_STB_FONT_IMPL
#undef SDL_STB_FONT_IMPL
#endif
#define SDL_STB_FONT_IMPL
#include "stb_font.h"

75
Stb_Media.cpp

@ -0,0 +1,75 @@
#include "Stb_Media.h"
Stb_Media::Stb_Media(){
}
Stb_Media::~Stb_Media(){
}
SDL_Surface* Stb_Media::default_logo_surface(std::string image){
int req_format = STBI_rgb_alpha;
int width, height, orig_format;
unsigned char* data = stbi_load(image.c_str(), &width, &height, &orig_format, req_format);
if(data==NULL){
return nullptr;
}
int depth, pitch;
Uint32 pixel_format;
if (req_format == STBI_rgb) {
depth = 24;
pitch = 3*width;
pixel_format = SDL_PIXELFORMAT_RGB24;
} else {
depth = 32;
pitch = 4*width;
pixel_format = SDL_PIXELFORMAT_RGBA32;
}
return SDL_CreateRGBSurfaceWithFormatFrom((void*)data, width, height,depth, pitch, pixel_format);
}
void Stb_Media::readFileRaw (const std::string & fullPath, std::string & output) {
std::ifstream fs(fullPath.c_str(), std::ios::in | std::ios::binary);
if (!fs.is_open()) {
std::cout << "readFileRaw: " << fullPath << " -- " << "WARNING: Could not open file." << std::endl;
return;
}
else {
std::cout << "Opened! " << fullPath << std::endl;
}
fs.seekg (0, std::ios::end);
const size_t LEN = fs.tellg();
fs.seekg (0, std::ios::beg);
output.resize(LEN);
fs.read(&output[0], LEN);
fs.close();
}
void Stb_Media::readFileRaw_toMemory (const std::string & fullPath, sttfont_memory & mem) {
std::ifstream fs(fullPath.c_str(), std::ios::in | std::ios::binary);
if (!fs.is_open()) {
std::cout << "readFileRaw: " << fullPath << " -- " << "WARNING: Could not open file." << std::endl;
return;
}
else {
std::cout << "Opened! " << fullPath << std::endl;
}
fs.seekg (0, std::ios::end);
const size_t LEN = fs.tellg();
fs.seekg (0, std::ios::beg);
mem.alloc(LEN);
fs.read(mem.data, LEN);
fs.close();
}
void Stb_Media::setFontArgs(SDL_Renderer* sdl_rander){
fc.faceSize = 48;
fc.tabWidthInSpaces = 12;
sttfont_memory notoSans;
readFileRaw_toMemory((QDir::currentPath()+"/font/PingFang.ttf").toStdString(), notoSans);
fc.loadFontManaged(notoSans);
fc.bindRenderer(sdl_rander);
}

23
Stb_Media.h

@ -0,0 +1,23 @@
#ifndef STB_MEDIA_H
#define STB_MEDIA_H
#include "SDL2/SDL.h"
extern "C"{
#include "stb_image.h"
}
#include "stb_font.h"
#include <iostream>
#include <fstream>
#include <QDir>
class Stb_Media{
public:
Stb_Media();
~Stb_Media();
SDL_Surface* default_logo_surface(std::string image);
void readFileRaw (const std::string & fullPath, std::string & output);
void readFileRaw_toMemory (const std::string & fullPath, sttfont_memory & mem);
void setFontArgs(SDL_Renderer* sdl_rander);
sdl_stb_font_cache fc;
};
#endif

58
mediaplayer.cpp

@ -152,9 +152,10 @@ void MediaPlayer::close_file()
{
avformat_close_input(&m_avformat_ctx);
}
bool MediaPlayer::start_play(const char *file_name)
bool MediaPlayer::start_play(const char *_file_name)
{
stop_play();
file_name = const_cast<char *>(_file_name);
if (!open_file(file_name))
{
return false;
@ -167,6 +168,7 @@ bool MediaPlayer::start_play(const char *file_name)
m_video_render_thread = std::thread(&MediaPlayer::render_audio_thread, this);
m_audio_render_thread = std::thread(&MediaPlayer::render_video_thread, this);
m_theoretical_render_video_time = m_theoretical_render_audio_time = (double)av_gettime() / 1000000.0; // 初始化时钟(获取当前系统时间,然后根据pts叠加到该时间上的方式进行同步)
m_status = v_state::playing;
return true;
}
bool MediaPlayer::stop_play()
@ -222,18 +224,6 @@ void MediaPlayer::pause_resume_play()
SDL_PauseAudioDevice(m_current_audio_deviceId, 1);
}
}
void MediaPlayer::get_video_size(int &width, int &height)
{
if (m_video_avdecoder->m_avcodec_context)
{
width = m_video_avdecoder->m_avcodec_context->width;
height = m_video_avdecoder->m_avcodec_context->height;
}
}
void MediaPlayer::register_render_windows_callback(SDL2RenderWidget *receiver)
{
m_render_receive_obj = receiver;
}
void MediaPlayer::pause_or_resume()
{
std::unique_lock<std::mutex> locker(m_pause_mutex);
@ -523,7 +513,7 @@ void MediaPlayer::render_video_thread()
QMetaObject::invokeMethod(
QApplication::instance(), [=]()
{ m_render_receive_obj->updateImage(yuv_frame); },
{ m_render_receive_obj->update_image(yuv_frame); },
Qt::QueuedConnection);
}
av_free(buffer);
@ -600,3 +590,43 @@ void MediaPlayer::render_audio_thread()
}
printf("[finished]:void MediaPlayer::render_audio_thread()\n");
}
void MediaPlayer::get_video_size(int &width, int &height)
{
if (m_video_avdecoder->m_avcodec_context)
{
width = m_video_avdecoder->m_avcodec_context->width;
height = m_video_avdecoder->m_avcodec_context->height;
}
}
v_state MediaPlayer::get_state()
{
return m_status;
}
AVFormatContext *MediaPlayer::get_fmt_ctx()
{
return m_avformat_ctx;
}
char *MediaPlayer::get_file_name()
{
return file_name;
}
void MediaPlayer::register_render_windows_callback(SDL2RenderWidget *receiver)
{
m_render_receive_obj = receiver;
m_video_default_image_thread = std::thread(render_default_image_thread,this);
}
void MediaPlayer::render_default_image_thread()
{
while (true)
{
if (m_status != playing)
{
QMetaObject::invokeMethod(
QApplication::instance(), [=]()
{ m_render_receive_obj->default_image(); },
Qt::QueuedConnection);
}
}
}

106
mediaplayer.h

@ -4,12 +4,12 @@
#include <stdio.h>
extern "C"
{
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libswscale/swscale.h>
#include <libavutil/time.h>
#include <libavutil/imgutils.h>
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libswscale/swscale.h>
#include <libavutil/time.h>
#include <libavutil/imgutils.h>
};
#include "MediaAVDecoder.h"
#include "MediaAVStream.h"
@ -21,15 +21,17 @@ extern "C"
#include <condition_variable>
#include <mutex>
#include <deque>
#include <iostream>
#include <future>
#include <QApplication>
#define PACKETSIZE 1024*5*10
#define PACKETSIZE 1024 * 5 * 10
#define AV_SYNC_THRESHOLD 0.01
#define AV_NOSYNC_THRESHOLD 10.0
class MediaPlayer
class MediaPlayer : public QObject
{
Q_OBJECT
public:
MediaPlayer();
~MediaPlayer();
@ -37,8 +39,11 @@ public:
bool stop_play();
void pause_resume_play();
void get_video_size(int &width, int &height);
v_state get_state();
AVFormatContext *get_fmt_ctx();
char *get_file_name();
public:
void register_render_windows_callback(SDL2RenderWidget *receiver);
void register_render_windows_callback(SDL2RenderWidget *receiver);
private:
bool open_file(const char *filename);
void close_file();
@ -48,54 +53,57 @@ private:
void push_pkt_queue(std::shared_ptr<MediaAVPacket> pkt);
void video_decode(std::shared_ptr<MediaAVPacket> video_pkt);
void audio_decode(std::shared_ptr<MediaAVPacket> audio_pkt);
void audio_decode_thread();
void video_decode_thread();
void render_audio_thread();
void render_video_thread();
void audio_decode_thread();
void video_decode_thread();
void render_audio_thread();
void render_video_thread();
void render_default_image_thread();
private:
AVFormatContext* m_avformat_ctx;
MediaAVDecoder* m_video_avdecoder;
MediaAVDecoder* m_audio_avdecoder;
SwsContext* m_sws_ctx;
v_sws_spec* m_sws_spec;
SDL2RenderWidget* m_render_receive_obj = nullptr;
SDL_AudioDeviceID m_current_audio_deviceId;
char *file_name;
AVFormatContext *m_avformat_ctx;
MediaAVDecoder *m_video_avdecoder;
MediaAVDecoder *m_audio_avdecoder;
SwsContext *m_sws_ctx;
v_sws_spec *m_sws_spec;
SDL2RenderWidget *m_render_receive_obj = nullptr;
SDL_AudioDeviceID m_current_audio_deviceId;
std::thread m_demux_thread;
std::thread m_video_decode_thread;
std::thread m_audio_decode_thread;
std::thread m_video_render_thread;
std::thread m_video_default_image_thread;
std::thread m_audio_render_thread;
std::mutex m_pause_mutex;
std::mutex m_pkt_audio_queue_mutex;
std::mutex m_pkt_video_queue_mutex;
std::mutex m_audio_frame_queue_mutex;
std::mutex m_video_frame_queue_mutex;
std::thread m_demux_thread;
std::thread m_video_decode_thread;
std::thread m_audio_decode_thread;
std::thread m_video_render_thread;
std::thread m_audio_render_thread;
std::mutex m_pause_mutex;
std::mutex m_pkt_audio_queue_mutex;
std::mutex m_pkt_video_queue_mutex;
std::mutex m_audio_frame_queue_mutex;
std::mutex m_video_frame_queue_mutex;
std::condition_variable m_pause_condition_variable;
std::condition_variable m_demuex_condition_variable;
std::condition_variable m_pkt_audio_condition_variable;
std::condition_variable m_pkt_vidoe_condition_variable;
std::condition_variable m_demuex_condition_variable;
std::condition_variable m_pkt_audio_condition_variable;
std::condition_variable m_pkt_vidoe_condition_variable;
std::condition_variable m_frame_video_condition_varible;
std::deque<std::shared_ptr<MediaAVPacket>> m_video_packet_queue;
std::deque<std::shared_ptr<MediaAVFrame>> m_video_frame_queue;
std::deque<std::shared_ptr<MediaAVPacket>> m_audio_packet_queue;
std::deque<std::shared_ptr<MediaAVFrame>> m_audio_frame_queue;
std::deque<std::shared_ptr<MediaAVFrame>> m_video_frame_queue;
std::deque<std::shared_ptr<MediaAVPacket>> m_audio_packet_queue;
std::deque<std::shared_ptr<MediaAVFrame>> m_audio_frame_queue;
bool m_demux_finish = false;
bool m_audio_decode_finish = false;
bool m_video_decode_finish = false;
bool m_audio_decode_finish = false;
bool m_video_decode_finish = false;
bool m_stop = false;
v_state m_status = v_state::none;
double m_theoretical_render_audio_time;
double m_theoretical_render_video_time;
double m_current_aduio_render_time;
double m_previous_audio_pts = 0;
double m_audio_current_pts = 0;
v_state m_status = v_state::none;
double m_theoretical_render_audio_time;
double m_theoretical_render_video_time;
double m_current_aduio_render_time;
double m_previous_audio_pts = 0;
double m_audio_current_pts = 0;
};
#endif

11
res.qrc

@ -1,5 +1,16 @@
<RCC>
<qresource prefix="/">
<file>res/img/zvo.png</file>
<file>res/btn/loud.png</file>
<file>res/btn/lastMedia.png</file>
<file>res/btn/mutex.png</file>
<file>res/btn/nextMedia.png</file>
<file>res/btn/pause.png</file>
<file>res/btn/play.png</file>
<file>res/btn/reverse3s.png</file>
<file>res/btn/stop.png</file>
<file>res/btn/addFile.png</file>
<file>res/btn/addFolder.png</file>
<file>res/btn/speed3s.png</file>
</qresource>
</RCC>

BIN
res/btn/addFile.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

BIN
res/btn/addFolder.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

BIN
res/btn/clear.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 442 B

BIN
res/btn/fill.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

BIN
res/btn/hideList.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

BIN
res/btn/lastMedia.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

BIN
res/btn/loud.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

BIN
res/btn/mutex.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.9 KiB

BIN
res/btn/nextMedia.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

BIN
res/btn/pause.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 898 B

BIN
res/btn/play.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

BIN
res/btn/reverse3s.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

BIN
res/btn/showList.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

BIN
res/btn/speed3s.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.9 KiB

BIN
res/btn/stop.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 422 B

BIN
res/btn/zoom.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 KiB

155
zffmpeg.cpp

@ -2,70 +2,119 @@
ZFFmpeg::ZFFmpeg(QWidget *parent)
: QWidget(parent),
mLayout(new QVBoxLayout),
cLayout(new QHBoxLayout),
vLayout(new QHBoxLayout),
l_path(new QLabel),
m_Widget(new QWidget),
e_edit(new QLineEdit),
t_play(new QPushButton),
t_pause(new QPushButton),
t_stop(new QPushButton),
t_exit(new QPushButton),
t_open_file(new QPushButton)
widget_player(new QWidget),
layout_main(new QVBoxLayout),
layout_player(new QVBoxLayout),
layout_player_state(new QHBoxLayout),
layout_player_tool(new QHBoxLayout),
slider_time(new MediaSlider),
label_current_time(new QLabel),
label_n(new QLabel),
label_duration_time(new QLabel),
btn_play_reverse3s(new QPushButton),
btn_play_last(new QPushButton),
btn_play(new QPushButton),
btn_play_next(new QPushButton),
btn_play_speed3s(new QPushButton),
btn_stop(new QPushButton),
btn_open_file(new QPushButton),
btn_open_dir(new QPushButton),
mutipleSpeed(new QComboBox),
btn_mutex_volumn(new QPushButton),
slider_volumn(new MediaSlider)
{
l_path->setText(tr("播放地址:"));
t_play->setText(tr("播放"));
t_pause->setText(tr("暂停"));
t_stop->setText(tr("停止"));
t_exit->setText(tr("退出"));
t_open_file->setText(tr("打开文件"));
setMinimumSize(800,600);
cLayout->addWidget(l_path);
cLayout->addWidget(e_edit);
cLayout->addWidget(t_open_file);
cLayout->addWidget(t_play);
cLayout->addWidget(t_pause);
cLayout->addWidget(t_stop);
cLayout->addWidget(t_exit);
btn_play_reverse3s->setIcon(QIcon(tr(":/res/btn/reverse3s.png")));
btn_play_last->setIcon(QIcon(tr(":/res/btn/lastMedia.png")));
btn_play->setIcon(QIcon(tr(":/res/btn/play.png")));
btn_play_next->setIcon(QIcon(tr(":/res/btn/nextMedia.png")));
btn_play_speed3s->setIcon(QIcon(tr(":/res/btn/speed3s.png")));
btn_stop->setIcon(QIcon(tr(":/res/btn/stop.png")));
btn_open_file->setIcon(QIcon(tr(":/res/btn/addFile.png")));
btn_open_dir->setIcon(QIcon(tr(":/res/btn/addFolder.png")));
btn_mutex_volumn->setIcon(QIcon(tr(":/res/btn/loud.png")));
slider_volumn->setOrientation(Qt::Orientation::Horizontal);
m_SDL2Widget=new SDL2RenderWidget(m_Widget);
m_Player=new MediaPlayer;
m_Player->register_render_windows_callback(m_SDL2Widget);
vLayout->addWidget(m_Widget);
mLayout->addLayout(cLayout);
mLayout->addLayout(vLayout);
setLayout(mLayout);
e_edit->setText(QDir::currentPath()+tr("/Av7046974.mp4"));
ms_Path=QDir::currentPath()+tr("/Av7046974.mp4");
connect(t_play, SIGNAL(clicked()), this, SLOT(play()));
connect(t_pause, SIGNAL(clicked()), this, SLOT(pause()));
connect(t_stop, SIGNAL(clicked()), this, SLOT(stop()),Qt::QueuedConnection);
connect(t_open_file,SIGNAL(clicked()),this,SLOT(set_open_file()));
//connect(t_exit, SIGNAL(clicked()), this, SIGNAL(QWidget::close()));
layout_player_tool->addWidget(btn_play_reverse3s);
layout_player_tool->addWidget(btn_play_last);
layout_player_tool->addWidget(btn_play);
layout_player_tool->addWidget(btn_play_next);
layout_player_tool->addWidget(btn_play_speed3s);
layout_player_tool->addWidget(btn_stop);
layout_player_tool->addWidget(btn_open_file);
layout_player_tool->addWidget(btn_open_dir);
layout_player_tool->addSpacerItem(sparcer_item);
layout_player_tool->addWidget(btn_mutex_volumn);
layout_player_tool->addWidget(slider_volumn);
layout_player_tool->setSpacing(0);
label_current_time->setText(tr("00:00:00"));
label_n->setText(tr("/"));
label_duration_time->setText(tr("00:00:00"));
slider_time->setOrientation(Qt::Orientation::Horizontal);
layout_player_state->addWidget(slider_time);
layout_player_state->addWidget(label_current_time);
layout_player_state->addWidget(label_n);
layout_player_state->addWidget(label_duration_time);
layout_player_state->setSpacing(0);
widget_player->setContentsMargins(0, 0, 0, 0);
widget_sdl2 = new SDL2RenderWidget(widget_player);
m_player = new MediaPlayer;
m_player->register_render_windows_callback(widget_sdl2);
layout_player->addWidget(widget_player);
layout_main->addLayout(layout_player);
layout_main->addLayout(layout_player_state);
layout_main->addLayout(layout_player_tool);
layout_main->setStretch(0, 8);
layout_main->setStretch(1, 1);
layout_main->setStretch(2, 1);
layout_main->setSpacing(0);
setLayout(layout_main);
setMinimumSize(800, 600);
connect(btn_play, SIGNAL(clicked()), this, SLOT(play()));
connect(btn_play, SIGNAL(clicked()), this, SLOT(pause()));
connect(btn_stop, SIGNAL(clicked()), this, SLOT(stop()), Qt::QueuedConnection);
connect(btn_open_file, SIGNAL(clicked()), this, SLOT(set_open_file()));
}
ZFFmpeg::~ZFFmpeg()
{
}
void ZFFmpeg::set_open_file(){
ms_Path=QFileDialog::getOpenFileName(this,tr("选择要播放的文件"),tr(""));
if(ms_Path!=nullptr){
e_edit->setText(ms_Path);
void ZFFmpeg::mousePressEvent(QMouseEvent *event)
{
if (event->button() == Qt::RightButton && m_player->get_state() == playing)
{
if (!m_avinfo)
{
m_avinfo = new MediaAVInfo();
}
m_avinfo->update_media_info(m_player->get_fmt_ctx(), m_player->get_file_name());
m_avinfo->exec();
}
m_Player->start_play(ms_Path.toUtf8().data());
}
void ZFFmpeg::play(){
m_Player->start_play(ms_Path.toUtf8().data());
void ZFFmpeg::resizeEvent(QResizeEvent *event)
{
widget_sdl2->set_Size(size());
}
void ZFFmpeg::set_open_file()
{
ms_Path = QFileDialog::getOpenFileName(this, tr("选择要播放的文件"), tr(""));
play();
}
void ZFFmpeg::play()
{
if (ms_Path != nullptr)
{
m_player->start_play(ms_Path.toUtf8().data());
}
}
void ZFFmpeg::pause(){
void ZFFmpeg::pause()
{
}
void ZFFmpeg::stop(){
void ZFFmpeg::stop()
{
}

58
zffmpeg.h

@ -10,8 +10,12 @@
#include <QFileDialog>
#include <QtConcurrent/QtConcurrent>
#include <QMessageBox>
#include <QComboBox>
#include <QMouseEvent>
#include "MediaSlider.h"
#include "MediaPlayer.h"
#include "MediaAVInfo.h"
#include "SDL2RenderWidget.h"
class ZFFmpeg : public QWidget
@ -20,24 +24,52 @@ class ZFFmpeg : public QWidget
public:
ZFFmpeg(QWidget *parent = nullptr);
~ZFFmpeg();
private:
QVBoxLayout* mLayout;
QHBoxLayout* cLayout;
QHBoxLayout* vLayout;
/*主Layout*/
QVBoxLayout* layout_main;
/*播放器Layout*/
QVBoxLayout* layout_player;
QWidget* widget_player;
SDL2RenderWidget* widget_sdl2;
QLabel* l_path;
QLineEdit* e_edit;
QWidget* m_Widget;
/*进度条和时间Layout*/
QHBoxLayout* layout_player_state;
MediaSlider* slider_time;
QLabel* label_current_time;
QLabel* label_n;
QLabel* label_duration_time;
/*播放器操作Layout*/
QHBoxLayout* layout_player_tool;
/*回退3s*/
QPushButton* btn_play_reverse3s;
/*上一个*/
QPushButton* btn_play_last;
/*播放or暂停*/
QPushButton* btn_play;
/*下一个*/
QPushButton* btn_play_next;
/*快进3s*/
QPushButton* btn_play_speed3s;
/*停止*/
QPushButton* btn_stop;
/*打开文件*/
QPushButton* btn_open_file;
QPushButton* btn_open_dir;
/*变速*/
QComboBox* mutipleSpeed;
/*音量*/
QPushButton* btn_mutex_volumn;
MediaSlider* slider_volumn;
QPushButton* t_play;
QPushButton* t_pause;
QPushButton* t_stop;
QPushButton* t_exit;
QPushButton* t_open_file;
QSpacerItem * sparcer_item = new QSpacerItem(0,0,QSizePolicy::Expanding,QSizePolicy::Fixed);
MediaPlayer* m_Player;
SDL2RenderWidget* m_SDL2Widget;
MediaPlayer *m_player=nullptr;
MediaAVInfo *m_avinfo=nullptr;
QString ms_Path;
protected:
virtual void resizeEvent(QResizeEvent *event) override;
virtual void mousePressEvent(QMouseEvent *event) override;
public slots:
void set_open_file();
void play();

Loading…
Cancel
Save