gtk4-rs
gtk4-rs copied to clipboard
[HELP] Drawing OpenGL textures is not working
I have been creating a few OpenGL widgets in the same way as the gtk4-rs example glium_gl_area. They have worked well up until when I wanted to draw a texture. I can't for the life of me get it to render the texture. To load and render textures I started from the image example in the glium crate.
I have created a "minimal" example of my issue where I have factored out the texture loading and GL drawing code to a module and then use the exact same code to draw a texture in a winit window and a gtk4 window. It works as expected in the winit window but nothing is drawn in the gtk4 window. My code is available here: https://github.com/faern/gtk_glium_texture/. I'm also copying all the code here, so it can be read without leaving the issue:
main.rs
use std::ptr;
use gtk::{glib, prelude::*};
mod glium_gl_area;
use glium_gl_area::GliumGLArea;
pub mod gl;
mod glium;
fn main() -> glib::ExitCode {
// Load GL pointers from epoxy (GL context management library used by GTK).
{
#[cfg(target_os = "macos")]
let library = unsafe { libloading::os::unix::Library::new("libepoxy.0.dylib") }.unwrap();
#[cfg(all(unix, not(target_os = "macos")))]
let library = unsafe { libloading::os::unix::Library::new("libepoxy.so.0") }.unwrap();
#[cfg(windows)]
let library = libloading::os::windows::Library::open_already_loaded("libepoxy-0.dll")
.or_else(|_| libloading::os::windows::Library::open_already_loaded("epoxy-0.dll"))
.unwrap();
epoxy::load_with(|name| {
unsafe { library.get::<_>(name.as_bytes()) }
.map(|symbol| *symbol)
.unwrap_or(ptr::null())
});
}
// Spawn glium window (that works and draws texture!) in a separate thread
std::thread::spawn(move || {
glium::State::run_loop();
});
// Run GTK window (where textures don't render!) in main thread
let application = gtk::Application::builder()
.application_id("com.example.not-working-gtk-textures")
.build();
application.connect_activate(build_ui);
application.run()
}
fn build_ui(application: >k::Application) {
let window = gtk::ApplicationWindow::new(application);
window.set_title(Some("Glium in GLArea"));
let widget = GliumGLArea::default();
window.set_child(Some(&widget));
window.present();
}
gl.rs
use std::io::Cursor;
use glium::backend::Facade;
use glium::program::Program;
use glium::texture::{CompressedTexture2d, RawImage2d};
use glium::uniform;
use glium::Surface;
use glium::{implement_vertex, index::PrimitiveType, program};
use glium::{Frame, IndexBuffer, VertexBuffer};
#[derive(Copy, Clone)]
pub struct Vertex {
position: [f32; 2],
tex_coords: [f32; 2],
}
implement_vertex!(Vertex, position, tex_coords);
fn compile_program<F: Facade>(context: &F) -> Program {
program!(context,
140 => {
vertex: "
#version 140
uniform mat4 matrix;
in vec2 position;
in vec2 tex_coords;
out vec2 v_tex_coords;
void main() {
gl_Position = matrix * vec4(position, 0.0, 1.0);
v_tex_coords = tex_coords;
}
",
fragment: "
#version 140
uniform sampler2D tex;
in vec2 v_tex_coords;
out vec4 f_color;
void main() {
f_color = texture(tex, v_tex_coords);
}
"
},
)
.unwrap()
}
fn rectangle_vertices<F: Facade>(context: &F) -> (VertexBuffer<Vertex>, IndexBuffer<u16>) {
let vertex_buffer = {
glium::VertexBuffer::new(
context,
&[
Vertex {
position: [-1.0, -1.0],
tex_coords: [0.0, 0.0],
},
Vertex {
position: [-1.0, 1.0],
tex_coords: [0.0, 1.0],
},
Vertex {
position: [1.0, 1.0],
tex_coords: [1.0, 1.0],
},
Vertex {
position: [1.0, -1.0],
tex_coords: [1.0, 0.0],
},
],
)
.unwrap()
};
let index_buffer =
glium::IndexBuffer::new(context, PrimitiveType::TriangleStrip, &[1 as u16, 2, 0, 3])
.unwrap();
(vertex_buffer, index_buffer)
}
fn load_texture<F: Facade>(context: &F) -> CompressedTexture2d {
// building a texture with "OpenGL" drawn on it
let image = image::load(
Cursor::new(&include_bytes!("../opengl.png")[..]),
image::ImageFormat::Png,
)
.unwrap()
.to_rgba8();
let image_dimensions = image.dimensions();
let image = RawImage2d::from_raw_rgba_reversed(&image.into_raw(), image_dimensions);
CompressedTexture2d::new(context, image).unwrap()
}
pub struct DrawTexture {
vertex_buffer: glium::VertexBuffer<crate::gl::Vertex>,
index_buffer: glium::IndexBuffer<u16>,
opengl_texture: glium::texture::CompressedTexture2d,
program: glium::Program,
}
impl DrawTexture {
pub fn new<F: Facade>(context: &F) -> Self {
let opengl_texture = load_texture(context);
let (vertex_buffer, index_buffer) = rectangle_vertices(context);
let program = compile_program(context);
Self {
vertex_buffer,
index_buffer,
opengl_texture,
program,
}
}
pub fn draw(&self, mut frame: Frame) {
let uniforms = uniform! {
matrix: [
[1.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0f32]
],
tex: &self.opengl_texture
};
frame.clear_color(0.0, 0.0, 0.0, 0.0);
frame
.draw(
&self.vertex_buffer,
&self.index_buffer,
&self.program,
&uniforms,
&Default::default(),
)
.unwrap();
frame.finish().unwrap();
}
}
glium_gl_area/mod.rs
mod imp;
use gtk::{gdk, glib, prelude::*};
glib::wrapper! {
pub struct GliumGLArea(ObjectSubclass<imp::GliumGLArea>)
@extends gtk::GLArea, gtk::Widget;
}
impl Default for GliumGLArea {
fn default() -> Self {
glib::Object::new()
}
}
unsafe impl glium::backend::Backend for GliumGLArea {
fn swap_buffers(&self) -> Result<(), glium::SwapBuffersError> {
// We're supposed to draw (and hence swap buffers) only inside the `render()`
// vfunc or signal, which means that GLArea will handle buffer swaps for
// us.
Ok(())
}
unsafe fn get_proc_address(&self, symbol: &str) -> *const std::ffi::c_void {
epoxy::get_proc_addr(symbol)
}
fn get_framebuffer_dimensions(&self) -> (u32, u32) {
let scale = self.scale_factor();
let width = self.width();
let height = self.height();
((width * scale) as u32, (height * scale) as u32)
}
fn is_current(&self) -> bool {
match self.context() {
Some(context) => gdk::GLContext::current() == Some(context),
None => false,
}
}
unsafe fn make_current(&self) {
GLAreaExt::make_current(self);
}
fn resize(&self, size: (u32, u32)) {
self.set_size_request(size.0 as i32, size.1 as i32);
}
}
glium_gl_area/imp.rs
use std::{cell::RefCell, rc::Rc};
use glium::Frame;
use gtk::{glib, prelude::*, subclass::prelude::*};
struct Renderer {
context: Rc<glium::backend::Context>,
draw_texture: crate::gl::DrawTexture,
}
impl Renderer {
fn new(context: Rc<glium::backend::Context>) -> Self {
let draw_texture = crate::gl::DrawTexture::new(&context);
Self {
context,
draw_texture,
}
}
fn draw(&self) {
let frame = Frame::new(
self.context.clone(),
self.context.get_framebuffer_dimensions(),
);
self.draw_texture.draw(frame);
}
}
#[derive(Default)]
pub struct GliumGLArea {
renderer: RefCell<Option<Renderer>>,
}
#[glib::object_subclass]
impl ObjectSubclass for GliumGLArea {
const NAME: &'static str = "GliumGLArea";
type Type = super::GliumGLArea;
type ParentType = gtk::GLArea;
}
impl ObjectImpl for GliumGLArea {}
impl WidgetImpl for GliumGLArea {
fn realize(&self) {
self.parent_realize();
let widget = self.obj();
if widget.error().is_some() {
return;
}
// SAFETY: we know the GdkGLContext exists as we checked for errors above, and
// we haven't done any operations on it which could lead to glium's
// state mismatch. (In theory, GTK doesn't do any state-breaking
// operations on the context either.)
//
// We will also ensure glium's context does not outlive the GdkGLContext by
// destroying it in `unrealize()`.
let context = unsafe {
glium::backend::Context::new(
widget.clone(),
true,
glium::debug::DebugCallbackBehavior::PrintAll,
)
}
.unwrap();
*self.renderer.borrow_mut() = Some(Renderer::new(context));
}
fn unrealize(&self) {
*self.renderer.borrow_mut() = None;
self.parent_unrealize();
}
}
impl GLAreaImpl for GliumGLArea {
fn render(&self, _context: >k::gdk::GLContext) -> glib::Propagation {
self.renderer.borrow().as_ref().unwrap().draw();
glib::Propagation::Stop
}
}
glium.rs
use glium::Display;
use glutin::display::GetGlDisplay;
use glutin::prelude::*;
use glutin::surface::WindowSurface;
use raw_window_handle::HasRawWindowHandle;
use std::num::NonZeroU32;
use winit::platform::wayland::EventLoopBuilderExtWayland;
pub struct Application {
draw_texture: crate::gl::DrawTexture,
}
impl Application {
fn new(display: &Display<WindowSurface>) -> Self {
Self {
draw_texture: crate::gl::DrawTexture::new(display),
}
}
fn draw_frame(&mut self, display: &Display<WindowSurface>) {
let frame = display.draw();
self.draw_texture.draw(frame);
}
}
pub struct State {
pub display: glium::Display<WindowSurface>,
pub window: winit::window::Window,
pub context: Application,
}
impl State {
pub fn new<W>(event_loop: &winit::event_loop::EventLoopWindowTarget<W>, visible: bool) -> Self {
let window_builder = winit::window::WindowBuilder::new()
.with_title("Glium image example")
.with_visible(visible);
let config_template_builder = glutin::config::ConfigTemplateBuilder::new();
let display_builder =
glutin_winit::DisplayBuilder::new().with_window_builder(Some(window_builder));
// First we create a window
let (window, gl_config) = display_builder
.build(event_loop, config_template_builder, |mut configs| {
// Just use the first configuration since we don't have any special preferences here
configs.next().unwrap()
})
.unwrap();
let window = window.unwrap();
// Then the configuration which decides which OpenGL version we'll end up using, here we just use the default which is currently 3.3 core
// When this fails we'll try and create an ES context, this is mainly used on mobile devices or various ARM SBC's
// If you depend on features available in modern OpenGL Versions you need to request a specific, modern, version. Otherwise things will very likely fail.
let raw_window_handle = window.raw_window_handle();
let context_attributes =
glutin::context::ContextAttributesBuilder::new().build(Some(raw_window_handle));
let fallback_context_attributes = glutin::context::ContextAttributesBuilder::new()
.with_context_api(glutin::context::ContextApi::Gles(None))
.build(Some(raw_window_handle));
let not_current_gl_context = Some(unsafe {
gl_config
.display()
.create_context(&gl_config, &context_attributes)
.unwrap_or_else(|_| {
gl_config
.display()
.create_context(&gl_config, &fallback_context_attributes)
.expect("failed to create context")
})
});
// Determine our framebuffer size based on the window size, or default to 800x600 if it's invisible
let (width, height): (u32, u32) = if visible {
window.inner_size().into()
} else {
(800, 600)
};
let attrs = glutin::surface::SurfaceAttributesBuilder::<WindowSurface>::new().build(
raw_window_handle,
NonZeroU32::new(width).unwrap(),
NonZeroU32::new(height).unwrap(),
);
// Now we can create our surface, use it to make our context current and finally create our display
let surface = unsafe {
gl_config
.display()
.create_window_surface(&gl_config, &attrs)
.unwrap()
};
let current_context = not_current_gl_context
.unwrap()
.make_current(&surface)
.unwrap();
let display = glium::Display::from_context_surface(current_context, surface).unwrap();
Self::from_display_window(display, window)
}
pub fn from_display_window(
display: glium::Display<WindowSurface>,
window: winit::window::Window,
) -> Self {
let context = Application::new(&display);
Self {
display,
window,
context,
}
}
/// Start the event_loop and keep rendering frames until the program is closed
pub fn run_loop() {
let event_loop = winit::event_loop::EventLoopBuilder::new()
.with_any_thread(true)
.build()
.expect("event loop building");
let mut state: Option<State> = None;
let result = event_loop.run(move |event, window_target| {
match event {
// The Resumed/Suspended events are mostly for Android compatiblity since the context can get lost there at any point.
// For convenience's sake the Resumed event is also delivered on other platforms on program startup.
winit::event::Event::Resumed => {
state = Some(State::new(window_target, true));
}
winit::event::Event::Suspended => state = None,
// By requesting a redraw in response to a AboutToWait event we get continuous rendering.
// For applications that only change due to user input you could remove this handler.
winit::event::Event::AboutToWait => {
if let Some(state) = &state {
state.window.request_redraw();
}
}
winit::event::Event::WindowEvent { event, .. } => match event {
winit::event::WindowEvent::Resized(new_size) => {
if let Some(state) = &state {
state.display.resize(new_size.into());
}
}
winit::event::WindowEvent::RedrawRequested => {
if let Some(state) = &mut state {
state.context.draw_frame(&state.display);
}
}
// Exit the event loop when requested (by closing the window for example) or when
// pressing the Esc key.
winit::event::WindowEvent::CloseRequested
| winit::event::WindowEvent::KeyboardInput {
event:
winit::event::KeyEvent {
state: winit::event::ElementState::Pressed,
logical_key:
winit::keyboard::Key::Named(winit::keyboard::NamedKey::Escape),
..
},
..
} => window_target.exit(),
// Every other event
_ev => {}
},
_ => (),
};
});
result.unwrap();
}
}
Here is a screenshot of the two windows side by side, winit/glium window to the left and gtk to the right:
I run this on Fedora Linux 39 under sway (wayland). But I have tried it under XWayland also and I get the same result. So I don't think it's an X11/Wayland issue.
Since all the texture loading and drawing code is exactly the same I know the texture will have loaded properly, should be bound properly before the draw call(?) etc. The only difference I can think of is if the initialization of the GL contexts are different in a way that cause this? Or a bug in gtk4-rs?