Skip to content

Instantly share code, notes, and snippets.

@pkupper
Created December 20, 2022 14:40
Show Gist options
  • Save pkupper/108eb8a712f479ecfdb1eaf9b86cd128 to your computer and use it in GitHub Desktop.
Save pkupper/108eb8a712f479ecfdb1eaf9b86cd128 to your computer and use it in GitHub Desktop.
Basic video playback in Bevy using ffmpeg-next
[package]
name = "bevy_video_demo_ffmpeg"
version = "0.1.0"
edition = "2021"
[dependencies]
bevy = "0.9"
ffmpeg-next = "5.1.1"
use std::path::Path;
use bevy::prelude::*;
use bevy::render::render_resource::{TextureDimension, TextureFormat, TextureUsages};
use bevy::utils::HashMap;
use ffmpeg_next as ffmpeg;
use ffmpeg::format::{input, Pixel};
use ffmpeg::frame::Video;
use ffmpeg::media::Type;
use ffmpeg::software::scaling::{context::Context, flag::Flags};
fn main() {
App::new()
.add_plugins(DefaultPlugins)
.init_non_send_resource::<VideoResource>()
.add_startup_system(init_ui)
.add_startup_system(initialize_ffmpeg)
.add_system(play_video)
.run();
}
fn init_ui(
mut commands: Commands,
images: ResMut<Assets<Image>>,
mut video_resource: NonSendMut<VideoResource>,
) {
let (video_player, video_player_non_send) =
VideoPlayer::new("./assets/sample.mp4", images).unwrap();
commands.spawn(Camera2dBundle::default());
commands
.spawn(NodeBundle {
style: Style {
size: Size::new(Val::Percent(100.0), Val::Percent(100.0)),
position_type: PositionType::Absolute,
justify_content: JustifyContent::Center,
align_items: AlignItems::Center,
..default()
},
..default()
})
.with_children(|parent| {
let entity = parent
.spawn(ImageBundle {
style: Style {
size: Size::new(Val::Px(500.0), Val::Auto),
..default()
},
image: video_player.image_handle.clone().into(),
..default()
})
.insert(video_player)
.id();
video_resource
.video_players
.insert(entity, video_player_non_send);
});
}
fn initialize_ffmpeg() {
ffmpeg::init().unwrap();
}
// workaround non-send data not being allowed in components by using non-send resource instead
#[derive(Default)]
struct VideoResource {
video_players: HashMap<Entity, VideoPlayerNonSendData>,
}
struct VideoPlayerNonSendData {
decoder: ffmpeg::decoder::Video,
input_context: ffmpeg::format::context::Input,
scaler_context: Context,
}
#[derive(Component)]
struct VideoPlayer {
image_handle: Handle<Image>,
video_stream_index: usize,
}
impl VideoPlayer {
fn new<'a, P>(
path: P,
mut images: ResMut<Assets<Image>>,
) -> Result<(VideoPlayer, VideoPlayerNonSendData), ffmpeg::Error>
where
P: AsRef<Path>,
{
let input_context = input(&path)?;
// initialize decoder
let input_stream = input_context
.streams()
.best(Type::Video)
.ok_or(ffmpeg::Error::StreamNotFound)?;
let video_stream_index = input_stream.index();
let context_decoder =
ffmpeg::codec::context::Context::from_parameters(input_stream.parameters())?;
let decoder = context_decoder.decoder().video()?;
// initialize scaler
let scaler_context = Context::get(
decoder.format(),
decoder.width(),
decoder.height(),
Pixel::RGBA,
decoder.width(),
decoder.height(),
Flags::BILINEAR,
)?;
// create image texture
let mut image = Image::new_fill(
bevy::render::render_resource::Extent3d {
width: decoder.width(),
height: decoder.height(),
depth_or_array_layers: 1,
},
TextureDimension::D2,
&Color::PINK.as_rgba_u32().to_le_bytes(),
TextureFormat::Rgba8UnormSrgb,
);
image.texture_descriptor.usage = TextureUsages::COPY_DST | TextureUsages::TEXTURE_BINDING;
let image_handle = images.add(image);
Ok((
VideoPlayer {
image_handle,
video_stream_index,
},
VideoPlayerNonSendData {
decoder,
input_context,
scaler_context,
},
))
}
}
fn play_video(
mut video_player_query: Query<(&mut VideoPlayer, Entity)>,
mut video_resource: NonSendMut<VideoResource>,
mut images: ResMut<Assets<Image>>,
) {
for (video_player, entity) in video_player_query.iter_mut() {
let video_player_non_send = video_resource.video_players.get_mut(&entity).unwrap();
// read packets from stream until complete frame received
while let Some((stream, packet)) = video_player_non_send.input_context.packets().next() {
// check if packets is for the selected video stream
if stream.index() == video_player.video_stream_index {
// pass packet to decoder
video_player_non_send.decoder.send_packet(&packet).unwrap();
let mut decoded = Video::empty();
// check if complete frame was received
if let Ok(()) = video_player_non_send.decoder.receive_frame(&mut decoded) {
let mut rgb_frame = Video::empty();
// run frame through scaler for color space conversion
video_player_non_send
.scaler_context
.run(&decoded, &mut rgb_frame)
.unwrap();
// update data of image texture
let image = images.get_mut(&video_player.image_handle).unwrap();
image.data.copy_from_slice(rgb_frame.data(0));
return;
}
}
}
// no frame received
// signal end of playback to decoder
match video_player_non_send.decoder.send_eof() {
Err(ffmpeg::Error::Eof) => {}
other => other.unwrap(),
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment