SDL (Simple DirectMedia Layer) 和 FFmpeg (Fast Forward mepg) 是兩個獨立的函式庫,可以結合使用以實現視訊播放。
以下是一個簡單的例子,展示如何使用 SDL 和 FFmpeg 來播放 H.264 編碼的影片檔案(例如xxx.264)。
需要注意,這只是一個簡單的起始點,實際專案中可能需要更多的錯誤檢查和處理。必須確保已經安裝了 SDL 和 FFmpeg,並將對應的函式庫和頭檔包含到專案中。
#include <SDL2/SDL.h>
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
}
int screen_width = 1280;
int screen_height = 720;
int main(int argc, char *argv[]) {
// Initialize SDL
if (SDL_Init(SDL_INIT_VIDEO) < 0) {
fprintf(stderr, "SDL initialization failed: %s\n", SDL_GetError());
return -1;
}
// Open the video file
const char *filename = "/Users/happyman/Work/TestSDL/TestSDL/sample.264";
AVFormatContext *formatContext = avformat_alloc_context();
if (avformat_open_input(&formatContext, filename, NULL, NULL) != 0) {
fprintf(stderr, "Failed to open video file\n");
return -1;
}
// Find the video stream information
if (avformat_find_stream_info(formatContext, NULL) < 0) {
fprintf(stderr, "Failed to find stream information\n");
return -1;
}
// Find the video stream
int videoStream = -1;
for (int i = 0; i < formatContext->nb_streams; i++) {
if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStream = i;
break;
}
}
if (videoStream == -1) {
fprintf(stderr, "No video stream found\n");
return -1;
}
// Get the codec parameters
AVCodecParameters *codecParameters = formatContext->streams[videoStream]->codecpar;
// Find the decoder
AVCodec *codec = (AVCodec *)avcodec_find_decoder(codecParameters->codec_id);
if (codec == NULL) {
fprintf(stderr, "Unsupported codec\n");
return -1;
}
// Create a codec context
AVCodecContext *codecContext = avcodec_alloc_context3(codec);
if (avcodec_parameters_to_context(codecContext, codecParameters) < 0) {
fprintf(stderr, "Failed to copy codec parameters to codec context\n");
return -1;
}
// Open the codec
if (avcodec_open2(codecContext, codec, NULL) < 0) {
fprintf(stderr, "Failed to open codec\n");
return -1;
}
// Create an SDL window
SDL_Window *window = SDL_CreateWindow("Video Player", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, screen_width, screen_height, SDL_WINDOW_SHOWN);
if (window == NULL) {
fprintf(stderr, "Failed to create SDL window: %s\n", SDL_GetError());
return -1;
}
// Create an SDL renderer
SDL_Renderer *renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC);
if (renderer == NULL) {
fprintf(stderr, "Failed to create SDL renderer: %s\n", SDL_GetError());
return -1;
}
// Create an SDL texture
SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, codecContext->width, codecContext->height);
if (texture == NULL) {
fprintf(stderr, "Failed to create SDL texture: %s\n", SDL_GetError());
return -1;
}
// Allocate video frame
AVFrame *frame = av_frame_alloc();
if (frame == NULL) {
fprintf(stderr, "Failed to allocate video frame\n");
return -1;
}
// Allocate an AVPacket
AVPacket *packet = av_packet_alloc();
if (packet == NULL) {
fprintf(stderr, "Failed to allocate packet\n");
return -1;
}
// Initialize SWS context for software scaling
struct SwsContext *swsContext = sws_getContext(codecContext->width, codecContext->height, codecContext->pix_fmt, codecContext->width, codecContext->height, AV_PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL);
// 顯示位置與大小
int width = 1000;
SDL_Rect sdl_rect;
sdl_rect.x = 0;
sdl_rect.y = 0;
sdl_rect.w = width;
sdl_rect.h = codecContext->height*width/codecContext->width;
// Event loop
SDL_Event event;
Uint32 targetFramerate = 30;
Uint32 frameDelay = 1000 / targetFramerate; // Delay between frames in milliseconds
Uint32 frameStart, frameTime;
while (1) {
frameStart = SDL_GetTicks();
// Read frame
if (av_read_frame(formatContext, packet) >= 0) {
if (SDL_PollEvent(&event)) {
if (event.type == SDL_MOUSEBUTTONDOWN) {
}
if (event.type == SDL_QUIT) {
break;
}
}
if (packet->stream_index == videoStream) {
// Send packet to decoder
if (avcodec_send_packet(codecContext, packet) == 0) {
// Receive frame from decoder
if (avcodec_receive_frame(codecContext, frame) == 0) {
// Convert the image from its native format to YUV420
sws_scale(swsContext, frame->data, frame->linesize, 0, codecContext->height,
frame->data, frame->linesize);
// Update texture
SDL_UpdateYUVTexture(texture, // sdl texture
NULL, // sdl rect
frame->data[0], // y plane
frame->linesize[0], // y pitch
frame->data[1], // u plane
frame->linesize[1], // u pitch
frame->data[2], // v plane
frame->linesize[2] // v pitch
);
// Clear the screen
SDL_RenderClear(renderer);
// Copy the texture to the renderer
SDL_RenderCopy(renderer, texture, NULL, &sdl_rect);
// Render the frame
SDL_RenderPresent(renderer);
// Delay to control frame rate
frameTime = SDL_GetTicks() - frameStart;
if (frameDelay > frameTime) {
SDL_Delay(frameDelay - frameTime);
}
}
}
}
// Free the packet that was allocated by av_read_frame
av_packet_unref(packet);
}
}
// Clean up
SDL_DestroyTexture(texture);
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);
sws_freeContext(swsContext);
av_frame_free(&frame);
av_packet_free(&packet);
avcodec_free_context(&codecContext);
avformat_close_input(&formatContext);
SDL_Quit();
return 0;
}
可以播放 .264 或 mp4 等影片檔。
targetFramerate 為每秒播放的幀數,此例設為 30。若設為 15,表示半速慢來播放。
此程式碼是跑在 MacOS 上,影片位置規則須留意。

若想要同時播放影片,還能偵測滑鼠點擊事件,可參考文章:SDL 非同步執行任務。
參考:ChatGPT、WiKi – FFmpeg、WiKi – SDL。

隨意留個言吧:)~