Compare commits

...

1 Commits

Author SHA1 Message Date
copygirl 8caf363d22 WIP flecs-zig-ble new system init 7 months ago
  1. 5
      .gitignore
  2. 4
      build.zig
  3. 75
      src/App.zig
  4. 211
      src/Renderer.zig
  5. 2
      src/primitives.zig

5
.gitignore vendored

@ -3,5 +3,6 @@
/zig-out/
# Dependencies (cloned manually)
/libs/flecs-zig-ble/
/libs/zig-gamedev/
# Could be symlinks, so not including trailing slash.
/libs/flecs-zig-ble
/libs/zig-gamedev

@ -18,7 +18,7 @@ pub fn build(b: *std.Build) !void {
const app = try mach.CoreApp.init(b, mach_dep.builder, .{
.name = "zig-bloxel-game",
.src = "src/main.zig",
.src = "src/App.zig",
.target = target,
.optimize = optimize,
.deps = &.{
@ -36,7 +36,7 @@ pub fn build(b: *std.Build) !void {
run_step.dependOn(&app.run.step);
const unit_tests = b.addTest(.{
.root_source_file = .{ .path = "src/main.zig" },
.root_source_file = .{ .path = "src/App.zig" },
.target = target,
.optimize = optimize,
});

@ -2,18 +2,15 @@ const std = @import("std");
const GeneralPurposeAllocator = std.heap.GeneralPurposeAllocator(.{});
const core = @import("mach").core;
const Renderer = @import("./renderer.zig");
const Renderer = @import("./Renderer.zig");
const flecszigble = @import("flecs-zig-ble");
const flecs = flecszigble.flecs;
const Context = flecszigble.Context(void);
const World = Context.World;
const Iter = Context.Iter;
const flecs = flecszigble.flecs;
const OnLoad = flecs.pipeline.OnLoad;
const OnUpdate = flecs.pipeline.OnUpdate;
const OnStore = flecs.pipeline.OnStore;
pub const App = @This();
gpa: GeneralPurposeAllocator,
@ -57,14 +54,10 @@ pub fn init(app: *App) !void {
app.world = world;
// Create a singleton component for accessing the `App` from ECS.
_ = try world.singleton("App", *App, app);
// TODO: The way we register systems using flecs-zig-ble is still very WIP.
_ = try world.system("PollEvents", pollEvents, OnLoad, "App");
_ = try world.singleton(*App, app);
const s = try world.system("UpdateWindowTitle", updateWindowTitle, OnStore, "");
// Set the update interval of the `UpdateWindowTitle` system to 1 second.
_ = flecszigble.c.ecs_set_interval(world.raw, s.raw, 1.0);
_ = try world.system(PollEvents);
_ = try world.system(UpdateWindowTitle);
app.renderer = try Renderer.init(app);
}
@ -84,30 +77,36 @@ pub fn update(app: *App) !bool {
return !app.world.progress(0.0);
}
/// Read events from the OS such as input.
pub fn pollEvents(it: Iter) void {
const app = it.field(*App, 1)[0];
var pollIter = core.pollEvents();
while (pollIter.next()) |event| {
switch (event) {
// Allow the renderer to act on the window being resized.
// This is required so we can resize necessary buffers.
.framebuffer_resize => |_| app.renderer.resize(),
// Close the window when requested, such as when
// pressing the X button in the window title bar.
.close => it.world.quit(),
else => {},
/// System that reads events from the OS such as input.
pub const PollEvents = struct {
pub const phase = flecs.pipeline.OnLoad;
pub const expr = "App($)";
pub fn callback(world: *World, app: *const *App) void {
var pollIter = core.pollEvents();
while (pollIter.next()) |event| {
switch (event) {
// Allow the renderer to act on the window being resized.
// This is required so we can resize necessary buffers.
.framebuffer_resize => |_| app.*.renderer.resize(),
// Close the window when requested, such as when
// pressing the X button in the window title bar.
.close => world.quit(),
else => {},
}
}
}
}
/// Update the window title to show FPS and input frequency.
pub fn updateWindowTitle(_: Iter) void {
core.printTitle(
"Triangle [ {d}fps ] [ Input {d}hz ]",
.{ core.frameRate(), core.inputRate() },
) catch @panic("Title too long!");
}
};
/// System that updates the window title to show FPS and input frequency.
pub const UpdateWindowTitle = struct {
pub const phase = flecs.pipeline.OnStore;
pub const interval = 1.0; // Run only once a second.
pub fn callback(_: Iter) void {
core.printTitle(
"Triangle [ {d}fps ] [ Input {d}hz ]",
.{ core.frameRate(), core.inputRate() },
) catch @panic("Title too long!");
}
};

@ -11,20 +11,19 @@ const zm = @import("zmath");
const vec = zm.f32x4;
const Mat = zm.Mat;
const App = @import("./main.zig");
const App = @import("./App.zig");
const primitives = @import("./primitives.zig");
const VertexData = primitives.VertexData;
const PrimitiveData = primitives.PrimitiveData;
const flecszigble = @import("flecs-zig-ble");
const flecs = flecszigble.flecs;
const Context = flecszigble.Context(void);
const Entity = Context.Entity;
const Iter = Context.Iter;
const flecs = flecszigble.flecs;
const OnStore = flecs.pipeline.OnStore;
const Transform = struct { value: Mat };
const CameraPerspective = struct {
/// Vertical field of view (in degrees).
@ -231,8 +230,8 @@ pub fn init(app: *App) !*Renderer {
}
// Register components necessary for the camera.
_ = try app.world.component("Transform", Transform);
_ = try app.world.component("CameraPerspective", CameraPerspective);
_ = try app.world.component(Transform);
_ = try app.world.component(CameraPerspective);
const camera_entity = try app.world.entity(
.{ .name = "Camera", .symbol = "Camera" },
@ -244,8 +243,7 @@ pub fn init(app: *App) !*Renderer {
.far_plane = 80.0,
});
const render_expr = "App, [in] CameraPerspective(Camera), [out] Transform(Camera)";
_ = try app.world.system("Render", render, OnStore, render_expr);
_ = try app.world.system(Render);
const result = try app.allocator.create(Renderer);
result.* = .{
@ -293,105 +291,110 @@ pub fn resize(self: *Renderer) void {
self.recreateDepthTexture();
}
pub fn render(it: Iter) void {
const app = it.field(*App, 1)[0];
const camera_perspective = it.field(CameraPerspective, 2)[0];
const camera_transform = &it.field(Transform, 3)[0];
const self = app.renderer;
self.time += it.deltaTime();
// Set up a view matrix from the camera transform.
// This moves everything to be relative to the camera.
// TODO: Actually implement camera transform instead of hardcoding a look-at matrix.
// const view_matrix = zm.inverse(app.camera_transform);
const camera_distance = 8.0;
const x = @cos(self.time * std.math.tau / 20) * camera_distance;
const z = @sin(self.time * std.math.tau / 20) * camera_distance;
const camera_pos = vec(x, 2.0, z, 1.0);
const view_matrix = zm.lookAtLh(camera_pos, vec(0, 0, 0, 1), vec(0, 1, 0, 1));
// Setting the transform here doesn't do anything because it's not used
// anywhere. In the future we would want to set the camera transform
// outside of the rendering step, and then get and use it here, instead.
camera_transform.* = .{ .value = view_matrix };
// TODO: Not sure if this is the proper transform, or actually inverted.
// Set up a projection matrix using the size of the window.
// The perspective projection will make things further away appear smaller.
const width: f32 = @floatFromInt(core.descriptor.width);
const height: f32 = @floatFromInt(core.descriptor.height);
const proj_matrix = zm.perspectiveFovLh(
std.math.degreesToRadians(f32, camera_perspective.field_of_view),
width / height,
camera_perspective.near_plane,
camera_perspective.far_plane,
);
const view_proj_matrix = zm.mul(view_matrix, proj_matrix);
// Get back buffer texture to render to.
const back_buffer_view = core.swap_chain.getCurrentTextureView().?;
defer back_buffer_view.release();
// Once rendering is done (hence `defer`), swap back buffer to the front to display.
defer core.swap_chain.present();
const render_pass_info = gpu.RenderPassDescriptor.init(.{
.color_attachments = &.{.{
.view = back_buffer_view,
.clear_value = std.mem.zeroes(gpu.Color),
.load_op = .clear,
.store_op = .store,
}},
.depth_stencil_attachment = &.{
.view = self.depth_texture_view.?,
.depth_load_op = .clear,
.depth_store_op = .store,
.depth_clear_value = 1.0,
},
});
// Create a `WGPUCommandEncoder` which provides an interface for recording GPU commands.
const encoder = core.device.createCommandEncoder(null);
defer encoder.release();
// Write to the scene uniform buffer for this set of commands.
encoder.writeBuffer(self.view_proj_buffer, 0, &[_]zm.Mat{
// All matrices the GPU has to work with need to be transposed,
// because WebGPU uses column-major matrices while zmath is row-major.
zm.transpose(view_proj_matrix),
});
{
const pass = encoder.beginRenderPass(&render_pass_info);
defer pass.release();
defer pass.end();
pass.setPipeline(self.pipeline);
pass.setBindGroup(0, self.camera_bind_group, &.{});
for (self.object_data) |object| {
// Set the vertex and index buffer used to render this
// object to the ones from the primitive it wants to use.
const prim = object.primitive;
pass.setVertexBuffer(0, prim.vertex_buffer, 0, prim.vertex_count * @sizeOf(VertexData));
pass.setIndexBuffer(prim.index_buffer, .uint32, 0, prim.index_count * @sizeOf(u32));
// Set the bind group for the object we want to render.
pass.setBindGroup(1, object.model_bind_group, &.{});
// Draw a number of triangles as specified in the index buffer.
pass.drawIndexed(prim.index_count, 1, 0, 0, 0);
/// System which renders the game world from the camera entity's perspective.
pub const Render = struct {
pub const phase = flecs.pipeline.OnStore;
pub const expr = "App($), [in] CameraPerspective(Camera), [out] Transform(Camera)";
pub fn callback(it: Iter) void {
const app = it.field(*App, 1)[0];
const camera_perspective = it.field(CameraPerspective, 2)[0];
const camera_transform = &it.field(Transform, 3)[0];
const self = app.renderer;
self.time += it.deltaTime();
// Set up a view matrix from the camera transform.
// This moves everything to be relative to the camera.
// TODO: Actually implement camera transform instead of hardcoding a look-at matrix.
// const view_matrix = zm.inverse(app.camera_transform);
const camera_distance = 8.0;
const x = @cos(self.time * std.math.tau / 20) * camera_distance;
const z = @sin(self.time * std.math.tau / 20) * camera_distance;
const camera_pos = vec(x, 2.0, z, 1.0);
const view_matrix = zm.lookAtLh(camera_pos, vec(0, 0, 0, 1), vec(0, 1, 0, 1));
// Setting the transform here doesn't do anything because it's not used
// anywhere. In the future we would want to set the camera transform
// outside of the rendering step, and then get and use it here, instead.
camera_transform.* = .{ .value = view_matrix };
// TODO: Not sure if this is the proper transform, or actually inverted.
// Set up a projection matrix using the size of the window.
// The perspective projection will make things further away appear smaller.
const width: f32 = @floatFromInt(core.descriptor.width);
const height: f32 = @floatFromInt(core.descriptor.height);
const proj_matrix = zm.perspectiveFovLh(
std.math.degreesToRadians(f32, camera_perspective.field_of_view),
width / height,
camera_perspective.near_plane,
camera_perspective.far_plane,
);
const view_proj_matrix = zm.mul(view_matrix, proj_matrix);
// Get back buffer texture to render to.
const back_buffer_view = core.swap_chain.getCurrentTextureView().?;
defer back_buffer_view.release();
// Once rendering is done (hence `defer`), swap back buffer to the front to display.
defer core.swap_chain.present();
const render_pass_info = gpu.RenderPassDescriptor.init(.{
.color_attachments = &.{.{
.view = back_buffer_view,
.clear_value = std.mem.zeroes(gpu.Color),
.load_op = .clear,
.store_op = .store,
}},
.depth_stencil_attachment = &.{
.view = self.depth_texture_view.?,
.depth_load_op = .clear,
.depth_store_op = .store,
.depth_clear_value = 1.0,
},
});
// Create a `WGPUCommandEncoder` which provides an interface for recording GPU commands.
const encoder = core.device.createCommandEncoder(null);
defer encoder.release();
// Write to the scene uniform buffer for this set of commands.
encoder.writeBuffer(self.view_proj_buffer, 0, &[_]zm.Mat{
// All matrices the GPU has to work with need to be transposed,
// because WebGPU uses column-major matrices while zmath is row-major.
zm.transpose(view_proj_matrix),
});
{
const pass = encoder.beginRenderPass(&render_pass_info);
defer pass.release();
defer pass.end();
pass.setPipeline(self.pipeline);
pass.setBindGroup(0, self.camera_bind_group, &.{});
for (self.object_data) |object| {
// Set the vertex and index buffer used to render this
// object to the ones from the primitive it wants to use.
const prim = object.primitive;
pass.setVertexBuffer(0, prim.vertex_buffer, 0, prim.vertex_count * @sizeOf(VertexData));
pass.setIndexBuffer(prim.index_buffer, .uint32, 0, prim.index_count * @sizeOf(u32));
// Set the bind group for the object we want to render.
pass.setBindGroup(1, object.model_bind_group, &.{});
// Draw a number of triangles as specified in the index buffer.
pass.drawIndexed(prim.index_count, 1, 0, 0, 0);
}
}
}
// Finish recording commands, creating a `WGPUCommandBuffer`.
var command = encoder.finish(null);
defer command.release();
// Finish recording commands, creating a `WGPUCommandBuffer`.
var command = encoder.finish(null);
defer command.release();
// Submit the command(s) to the GPU.
core.queue.submit(&.{command});
}
// Submit the command(s) to the GPU.
core.queue.submit(&.{command});
}
};
/// Loads a texture from the provided buffer and uploads it to the GPU.
pub fn loadTexture(allocator: std.mem.Allocator, buffer: []const u8) !*gpu.TextureView {

@ -3,7 +3,7 @@ const tau = std.math.tau;
const gpu = @import("mach").core.gpu;
const Renderer = @import("./renderer.zig");
const Renderer = @import("./Renderer.zig");
const createAndWriteBuffer = Renderer.createAndWriteBuffer;
/// Describes the layout of each vertex that a primitive is made of.

Loading…
Cancel
Save