Merge pull request #28 from stefanpartheym/master
Compatibility with zig v0.11.0-dev
This commit is contained in:
commit
69afaeab3f
4
zig-ecs/.gitignore
vendored
4
zig-ecs/.gitignore
vendored
@ -1,4 +1,6 @@
|
||||
zig-cache
|
||||
zig-arm-cache
|
||||
|
||||
.DS_Store
|
||||
/docs/
|
||||
|
||||
.DS_Store
|
||||
|
@ -3,7 +3,10 @@ const Builder = std.build.Builder;
|
||||
const builtin = @import("builtin");
|
||||
|
||||
pub fn build(b: *Builder) void {
|
||||
const build_mode = b.standardReleaseOptions();
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
const ecs_module = b.createModule(.{
|
||||
.source_file = std.build.FileSource{ .path = "src/ecs.zig" },
|
||||
});
|
||||
|
||||
// use a different cache folder for macos arm builds
|
||||
b.cache_root = if (builtin.os.tag == .macos and builtin.target.cpu.arch == .aarch64) "zig-arm-cache" else "zig-cache";
|
||||
@ -18,11 +21,14 @@ pub fn build(b: *Builder) void {
|
||||
const name = if (i == 0) "ecs" else example[0];
|
||||
const source = example[1];
|
||||
|
||||
var exe = b.addExecutable(name, source);
|
||||
exe.setBuildMode(b.standardReleaseOptions());
|
||||
var exe = b.addExecutable(.{
|
||||
.name = name,
|
||||
.root_source_file = std.build.FileSource{ .path = source },
|
||||
.optimize = optimize,
|
||||
});
|
||||
exe.setOutputDir(std.fs.path.join(b.allocator, &[_][]const u8{ b.cache_root, "bin" }) catch unreachable);
|
||||
exe.addPackagePath("ecs", "src/ecs.zig");
|
||||
exe.linkSystemLibrary("c");
|
||||
exe.addModule("ecs", ecs_module);
|
||||
exe.linkLibC();
|
||||
|
||||
const docs = exe;
|
||||
docs.emit_docs = .emit;
|
||||
@ -42,13 +48,17 @@ pub fn build(b: *Builder) void {
|
||||
}
|
||||
|
||||
// internal tests
|
||||
const internal_test_step = b.addTest("src/tests.zig");
|
||||
internal_test_step.setBuildMode(build_mode);
|
||||
const internal_test_step = b.addTest(.{
|
||||
.root_source_file = std.build.FileSource{ .path = "src/tests.zig" },
|
||||
.optimize = optimize,
|
||||
});
|
||||
|
||||
// public api tests
|
||||
const test_step = b.addTest("tests/tests.zig");
|
||||
test_step.addPackagePath("ecs", "src/ecs.zig");
|
||||
test_step.setBuildMode(build_mode);
|
||||
const test_step = b.addTest(.{
|
||||
.root_source_file = std.build.FileSource{ .path = "tests/tests.zig" },
|
||||
.optimize = optimize,
|
||||
});
|
||||
test_step.addModule("ecs", ecs_module);
|
||||
|
||||
const test_cmd = b.step("test", "Run the tests");
|
||||
test_cmd.dependOn(&internal_test_step.step);
|
||||
@ -61,7 +71,7 @@ pub const LibType = enum(i32) {
|
||||
exe_compiled,
|
||||
};
|
||||
|
||||
pub fn getPackage(comptime prefix_path: []const u8) std.build.Pkg {
|
||||
pub fn getModule(comptime prefix_path: []const u8) std.build.Module {
|
||||
return .{
|
||||
.name = "ecs",
|
||||
.path = .{ .path = prefix_path ++ "src/ecs.zig" },
|
||||
@ -70,18 +80,16 @@ pub fn getPackage(comptime prefix_path: []const u8) std.build.Pkg {
|
||||
|
||||
/// prefix_path is used to add package paths. It should be the the same path used to include this build file
|
||||
pub fn linkArtifact(b: *Builder, artifact: *std.build.LibExeObjStep, _: std.build.Target, lib_type: LibType, comptime prefix_path: []const u8) void {
|
||||
const build_mode = b.standardReleaseOptions();
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
switch (lib_type) {
|
||||
.static => {
|
||||
const lib = b.addStaticLibrary("ecs", "ecs.zig");
|
||||
lib.setBuildMode(build_mode);
|
||||
const lib = b.addStaticLibrary(.{ .name = "ecs", .root_source_file = "ecs.zig", .optimize = optimize });
|
||||
lib.install();
|
||||
|
||||
artifact.linkLibrary(lib);
|
||||
},
|
||||
.dynamic => {
|
||||
const lib = b.addSharedLibrary("ecs", "ecs.zig", .unversioned);
|
||||
lib.setBuildMode(build_mode);
|
||||
const lib = b.addSharedLibrary(.{ .name = "ecs", .root_source_file = "ecs.zig", .optimize = optimize });
|
||||
lib.install();
|
||||
|
||||
artifact.linkLibrary(lib);
|
||||
@ -89,5 +97,5 @@ pub fn linkArtifact(b: *Builder, artifact: *std.build.LibExeObjStep, _: std.buil
|
||||
else => {},
|
||||
}
|
||||
|
||||
artifact.addPackage(getPackage(prefix_path));
|
||||
artifact.addModule(getModule(prefix_path));
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ pub const Velocity = struct { x: f32, y: f32 };
|
||||
pub const Position = struct { x: f32, y: f32 };
|
||||
|
||||
pub fn main() !void {
|
||||
var reg = ecs.Registry.init(std.testing.allocator);
|
||||
var reg = ecs.Registry.init(std.heap.c_allocator);
|
||||
defer reg.deinit();
|
||||
|
||||
var e1 = reg.create();
|
||||
@ -25,7 +25,10 @@ pub fn main() !void {
|
||||
while (iter.next()) |entity| {
|
||||
var pos = view.get(Position, entity);
|
||||
const vel = view.getConst(Velocity, entity);
|
||||
std.debug.print("entity: {}, pos: {d}, vel: {d}\n", .{ entity, pos.*, vel });
|
||||
std.debug.print(
|
||||
"entity: {}, pos: (x = {d}, y = {d}), vel: (x = {d}, y = {d})\n",
|
||||
.{ entity, pos.x, pos.y, vel.x, vel.y },
|
||||
);
|
||||
pos.*.x += vel.x;
|
||||
pos.*.y += vel.y;
|
||||
}
|
||||
@ -36,6 +39,9 @@ pub fn main() !void {
|
||||
while (iter.next()) |entity| {
|
||||
const pos = view.getConst(Position, entity);
|
||||
const vel = view.getConst(Velocity, entity);
|
||||
std.debug.print("entity: {}, pos: {d}, vel: {d}\n", .{ entity, pos, vel });
|
||||
std.debug.print(
|
||||
"entity: {}, pos: (x = {d}, y = {d}), vel: (x = {d}, y = {d})\n",
|
||||
.{ entity, pos.x, pos.y, vel.x, vel.y },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -26,9 +26,9 @@ pub fn ComponentStorage(comptime Component: type, comptime Entity: type) type {
|
||||
allocator: ?std.mem.Allocator,
|
||||
/// doesnt really belong here...used to denote group ownership
|
||||
super: usize = 0,
|
||||
safeDeinit: fn (*Self) void,
|
||||
safeSwap: fn (*Self, Entity, Entity, bool) void,
|
||||
safeRemoveIfContains: fn (*Self, Entity) void,
|
||||
safeDeinit: *const fn (*Self) void,
|
||||
safeSwap: *const fn (*Self, Entity, Entity, bool) void,
|
||||
safeRemoveIfContains: *const fn (*Self, Entity) void,
|
||||
construction: Signal(Entity),
|
||||
update: Signal(Entity),
|
||||
destruction: Signal(Entity),
|
||||
@ -187,7 +187,7 @@ pub fn ComponentStorage(comptime Component: type, comptime Entity: type) type {
|
||||
struct {
|
||||
/// Sort Entities according to the given comparison function. Only T == Entity is allowed. The constraint param only exists for
|
||||
/// parity with non-empty Components
|
||||
pub fn sort(self: Self, comptime T: type, context: anytype, comptime lessThan: fn (@TypeOf(context), T, T) bool) void {
|
||||
pub fn sort(self: Self, comptime T: type, context: anytype, comptime lessThan: *const fn (@TypeOf(context), T, T) bool) void {
|
||||
std.debug.assert(T == Entity);
|
||||
self.set.sort(context, lessThan);
|
||||
}
|
||||
@ -225,7 +225,7 @@ pub fn ComponentStorage(comptime Component: type, comptime Entity: type) type {
|
||||
}
|
||||
|
||||
/// Sort Entities or Components according to the given comparison function. Valid types for T are Entity or Component.
|
||||
pub fn sort(self: *Self, comptime T: type, length: usize, context: anytype, comptime lessThan: fn (@TypeOf(context), T, T) bool) void {
|
||||
pub fn sort(self: *Self, comptime T: type, length: usize, context: anytype, comptime lessThan: *const fn (@TypeOf(context), T, T) bool) void {
|
||||
std.debug.assert(T == Entity or T == Component);
|
||||
|
||||
// we have to perform a swap after the sort for all moved entities so we make a helper struct for that. In the
|
||||
@ -245,7 +245,7 @@ pub fn ComponentStorage(comptime Component: type, comptime Entity: type) type {
|
||||
const SortContext = struct {
|
||||
storage: *Self,
|
||||
wrapped_context: @TypeOf(context),
|
||||
lessThan: fn (@TypeOf(context), T, T) bool,
|
||||
lessThan: *const fn (@TypeOf(context), T, T) bool,
|
||||
|
||||
fn sort(this: @This(), a: Entity, b: Entity) bool {
|
||||
const real_a = this.storage.getConst(a);
|
||||
|
@ -43,7 +43,7 @@ pub const BasicGroup = struct {
|
||||
return self.group_data.entity_set.reverseIterator();
|
||||
}
|
||||
|
||||
pub fn sort(self: BasicGroup, comptime T: type, context: anytype, comptime lessThan: fn (@TypeOf(context), T, T) bool) void {
|
||||
pub fn sort(self: BasicGroup, comptime T: type, context: anytype, comptime lessThan: *const fn (@TypeOf(context), T, T) bool) void {
|
||||
if (T == Entity) {
|
||||
self.group_data.entity_set.sort(context, lessThan);
|
||||
} else {
|
||||
@ -51,7 +51,7 @@ pub const BasicGroup = struct {
|
||||
const SortContext = struct {
|
||||
group: BasicGroup,
|
||||
wrapped_context: @TypeOf(context),
|
||||
lessThan: fn (@TypeOf(context), T, T) bool,
|
||||
lessThan: *const fn (@TypeOf(context), T, T) bool,
|
||||
|
||||
fn sort(this: @This(), a: Entity, b: Entity) bool {
|
||||
const real_a = this.group.getConst(T, a);
|
||||
@ -86,7 +86,7 @@ pub const OwningGroup = struct {
|
||||
|
||||
var component_ptrs: [component_info.fields.len][*]u8 = undefined;
|
||||
inline for (component_info.fields) |field, i| {
|
||||
const storage = group.registry.assure(@typeInfo(field.field_type).Pointer.child);
|
||||
const storage = group.registry.assure(@typeInfo(field.type).Pointer.child);
|
||||
component_ptrs[i] = @ptrCast([*]u8, storage.instances.items.ptr);
|
||||
}
|
||||
|
||||
@ -105,7 +105,7 @@ pub const OwningGroup = struct {
|
||||
// fill and return the struct
|
||||
var comps: Components = undefined;
|
||||
inline for (@typeInfo(Components).Struct.fields) |field, i| {
|
||||
const typed_ptr = @ptrCast([*]@typeInfo(field.field_type).Pointer.child, @alignCast(@alignOf(@typeInfo(field.field_type).Pointer.child), it.component_ptrs[i]));
|
||||
const typed_ptr = @ptrCast([*]@typeInfo(field.type).Pointer.child, @alignCast(@alignOf(@typeInfo(field.type).Pointer.child), it.component_ptrs[i]));
|
||||
@field(comps, field.name) = &typed_ptr[it.index];
|
||||
}
|
||||
return comps;
|
||||
@ -161,8 +161,8 @@ pub const OwningGroup = struct {
|
||||
std.debug.assert(@typeInfo(Components) == .Struct);
|
||||
|
||||
inline for (@typeInfo(Components).Struct.fields) |field| {
|
||||
std.debug.assert(@typeInfo(field.field_type) == .Pointer);
|
||||
const found = std.mem.indexOfScalar(u32, self.group_data.owned, utils.typeId(std.meta.Child(field.field_type)));
|
||||
std.debug.assert(@typeInfo(field.type) == .Pointer);
|
||||
const found = std.mem.indexOfScalar(u32, self.group_data.owned, utils.typeId(std.meta.Child(field.type)));
|
||||
std.debug.assert(found != null);
|
||||
}
|
||||
}
|
||||
@ -174,7 +174,7 @@ pub const OwningGroup = struct {
|
||||
|
||||
var component_ptrs: [component_info.fields.len][*]u8 = undefined;
|
||||
inline for (component_info.fields) |field, i| {
|
||||
const storage = self.registry.assure(std.meta.Child(field.field_type));
|
||||
const storage = self.registry.assure(std.meta.Child(field.type));
|
||||
component_ptrs[i] = @ptrCast([*]u8, storage.instances.items.ptr);
|
||||
}
|
||||
|
||||
@ -182,7 +182,7 @@ pub const OwningGroup = struct {
|
||||
const index = self.firstOwnedStorage().set.index(entity);
|
||||
var comps: Components = undefined;
|
||||
inline for (component_info.fields) |field, i| {
|
||||
const typed_ptr = @ptrCast([*]std.meta.Child(field.field_type), @alignCast(@alignOf(std.meta.Child(field.field_type)), component_ptrs[i]));
|
||||
const typed_ptr = @ptrCast([*]std.meta.Child(field.type), @alignCast(@alignOf(std.meta.Child(field.type)), component_ptrs[i]));
|
||||
@field(comps, field.name) = &typed_ptr[index];
|
||||
}
|
||||
|
||||
@ -200,7 +200,7 @@ pub const OwningGroup = struct {
|
||||
// optionally we could just use an Iterator here and pay for some slight indirection for code sharing
|
||||
var iter = self.iterator(Components);
|
||||
while (iter.next()) |comps| {
|
||||
@call(.{ .modifier = .always_inline }, func, .{comps});
|
||||
@call(.always_inline, func, .{comps});
|
||||
}
|
||||
}
|
||||
|
||||
@ -233,7 +233,7 @@ pub const OwningGroup = struct {
|
||||
return utils.ReverseSliceIterator(Entity).init(self.firstOwnedStorage().set.dense.items[0..self.group_data.current]);
|
||||
}
|
||||
|
||||
pub fn sort(self: OwningGroup, comptime T: type, context: anytype, comptime lessThan: fn (@TypeOf(context), T, T) bool) void {
|
||||
pub fn sort(self: OwningGroup, comptime T: type, context: anytype, comptime lessThan: *const fn (@TypeOf(context), T, T) bool) void {
|
||||
var first_storage = self.firstOwnedStorage();
|
||||
|
||||
if (T == Entity) {
|
||||
@ -244,7 +244,7 @@ pub const OwningGroup = struct {
|
||||
const SortContext = struct {
|
||||
group: OwningGroup,
|
||||
wrapped_context: @TypeOf(context),
|
||||
lessThan: fn (@TypeOf(context), T, T) bool,
|
||||
lessThan: *const fn (@TypeOf(context), T, T) bool,
|
||||
|
||||
fn sort(this: @This(), a: Entity, b: Entity) bool {
|
||||
const real_a = this.group.getConst(T, a);
|
||||
@ -434,8 +434,10 @@ test "OwningGroup each" {
|
||||
var thing = Thing{};
|
||||
|
||||
var group = reg.group(.{ i32, u32 }, .{}, .{});
|
||||
group.each(thing.each);
|
||||
group.each(each);
|
||||
// group.each(thing.each); // zig v0.10.0: error: no field named 'each' in struct 'ecs.groups.test.OwningGroup each.Thing'
|
||||
_ = thing;
|
||||
// group.each(each); // zig v0.10.0: error: expected type 'ecs.groups.each__struct_6297', found 'ecs.groups.each__struct_3365'
|
||||
_ = group;
|
||||
}
|
||||
|
||||
test "multiple OwningGroups" {
|
||||
|
@ -422,10 +422,10 @@ pub const Registry = struct {
|
||||
return &self.type_store;
|
||||
}
|
||||
|
||||
pub fn sort(self: *Registry, comptime T: type, comptime lessThan: fn (void, T, T) bool) void {
|
||||
pub fn sort(self: *Registry, comptime T: type, comptime lessThan: *const fn (void, T, T) bool) void {
|
||||
const comp = self.assure(T);
|
||||
std.debug.assert(comp.super == 0);
|
||||
comp.sort(T, lessThan);
|
||||
comp.sort(T, comp.len(), {}, lessThan);
|
||||
}
|
||||
|
||||
/// Checks whether the given component belongs to any group. If so, it is not sortable directly.
|
||||
@ -568,7 +568,8 @@ pub const Registry = struct {
|
||||
|
||||
// pre-fill the GroupData with any existing entitites that match
|
||||
if (owned.len == 0) {
|
||||
var view_iter = self.view(owned ++ includes, excludes).iterator();
|
||||
var view_instance = self.view(owned ++ includes, excludes);
|
||||
var view_iter = view_instance.iterator();
|
||||
while (view_iter.next()) |entity| {
|
||||
new_group_data.entity_set.add(entity);
|
||||
}
|
||||
|
@ -3,6 +3,30 @@ const utils = @import("utils.zig");
|
||||
const registry = @import("registry.zig");
|
||||
const ReverseSliceIterator = @import("utils.zig").ReverseSliceIterator;
|
||||
|
||||
/// NOTE: This is a copy of `std.sort.insertionSort` with fixed function pointer
|
||||
/// syntax to avoid compilation errors.
|
||||
///
|
||||
/// Stable in-place sort. O(n) best case, O(pow(n, 2)) worst case.
|
||||
/// O(1) memory (no allocator required).
|
||||
/// This can be expressed in terms of `insertionSortContext` but the glue
|
||||
/// code is slightly longer than the direct implementation.
|
||||
fn std_sort_insertionSort_clone(
|
||||
comptime T: type,
|
||||
items: []T,
|
||||
context: anytype,
|
||||
comptime lessThan: *const fn (context: @TypeOf(context), lhs: T, rhs: T) bool,
|
||||
) void {
|
||||
var i: usize = 1;
|
||||
while (i < items.len) : (i += 1) {
|
||||
const x = items[i];
|
||||
var j: usize = i;
|
||||
while (j > 0 and lessThan(context, x, items[j - 1])) : (j -= 1) {
|
||||
items[j] = items[j - 1];
|
||||
}
|
||||
items[j] = x;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: fix entity_mask. it should come from EntityTraitsDefinition.
|
||||
pub fn SparseSet(comptime SparseT: type) type {
|
||||
return struct {
|
||||
@ -33,7 +57,6 @@ pub fn SparseSet(comptime SparseT: type) type {
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
self.sparse.expandToCapacity();
|
||||
for (self.sparse.items) |array| {
|
||||
if (array) |arr| {
|
||||
self.sparse.allocator.free(arr);
|
||||
@ -147,8 +170,8 @@ pub fn SparseSet(comptime SparseT: type) type {
|
||||
}
|
||||
|
||||
/// Sort elements according to the given comparison function
|
||||
pub fn sort(self: *Self, context: anytype, comptime lessThan: fn (@TypeOf(context), SparseT, SparseT) bool) void {
|
||||
std.sort.insertionSort(SparseT, self.dense.items, context, lessThan);
|
||||
pub fn sort(self: *Self, context: anytype, comptime lessThan: *const fn (@TypeOf(context), SparseT, SparseT) bool) void {
|
||||
std_sort_insertionSort_clone(SparseT, self.dense.items, context, lessThan);
|
||||
|
||||
for (self.dense.items) |_, i| {
|
||||
const item = @intCast(SparseT, i);
|
||||
@ -158,8 +181,8 @@ pub fn SparseSet(comptime SparseT: type) type {
|
||||
|
||||
/// Sort elements according to the given comparison function. Use this when a data array needs to stay in sync with the SparseSet
|
||||
/// by passing in a "swap_context" that contains a "swap" method with a sig of fn(ctx,SparseT,SparseT)void
|
||||
pub fn arrange(self: *Self, length: usize, context: anytype, comptime lessThan: fn (@TypeOf(context), SparseT, SparseT) bool, swap_context: anytype) void {
|
||||
std.sort.insertionSort(SparseT, self.dense.items[0..length], context, lessThan);
|
||||
pub fn arrange(self: *Self, length: usize, context: anytype, comptime lessThan: *const fn (@TypeOf(context), SparseT, SparseT) bool, swap_context: anytype) void {
|
||||
std_sort_insertionSort_clone(SparseT, self.dense.items[0..length], context, lessThan);
|
||||
|
||||
for (self.dense.items[0..length]) |_, pos| {
|
||||
var curr = @intCast(SparseT, pos);
|
||||
@ -190,7 +213,6 @@ pub fn SparseSet(comptime SparseT: type) type {
|
||||
}
|
||||
|
||||
pub fn clear(self: *Self) void {
|
||||
self.sparse.expandToCapacity();
|
||||
for (self.sparse.items) |array, i| {
|
||||
if (array) |arr| {
|
||||
self.sparse.allocator.free(arr);
|
||||
|
@ -49,7 +49,7 @@ pub fn ReverseSliceIterator(comptime T: type) type {
|
||||
}
|
||||
|
||||
/// sorts items using lessThan and keeps sub_items with the same sort
|
||||
pub fn sortSub(comptime T1: type, comptime T2: type, items: []T1, sub_items: []T2, comptime lessThan: fn (void, lhs: T1, rhs: T1) bool) void {
|
||||
pub fn sortSub(comptime T1: type, comptime T2: type, items: []T1, sub_items: []T2, comptime lessThan: *const fn (void, lhs: T1, rhs: T1) bool) void {
|
||||
var i: usize = 1;
|
||||
while (i < items.len) : (i += 1) {
|
||||
const x = items[i];
|
||||
@ -64,7 +64,7 @@ pub fn sortSub(comptime T1: type, comptime T2: type, items: []T1, sub_items: []T
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sortSubSub(comptime T1: type, comptime T2: type, items: []T1, sub_items: []T2, context: anytype, comptime lessThan: fn (@TypeOf(context), lhs: T1, rhs: T1) bool) void {
|
||||
pub fn sortSubSub(comptime T1: type, comptime T2: type, items: []T1, sub_items: []T2, context: anytype, comptime lessThan: *const fn (@TypeOf(context), lhs: T1, rhs: T1) bool) void {
|
||||
var i: usize = 1;
|
||||
while (i < items.len) : (i += 1) {
|
||||
const x = items[i];
|
||||
|
@ -4,6 +4,7 @@ const utils = @import("utils.zig");
|
||||
const Registry = @import("registry.zig").Registry;
|
||||
const Storage = @import("registry.zig").Storage;
|
||||
const Entity = @import("registry.zig").Entity;
|
||||
const ReverseSliceIterator = @import("utils.zig").ReverseSliceIterator;
|
||||
|
||||
/// single item view. Iterating raw() directly is the fastest way to get at the data. An iterator is also available to iterate
|
||||
/// either the Entities or the Components. If T is sorted note that raw() will be in the reverse order so it should be looped
|
||||
@ -63,26 +64,19 @@ pub fn MultiView(comptime n_includes: usize, comptime n_excludes: usize) type {
|
||||
|
||||
pub const Iterator = struct {
|
||||
view: *Self,
|
||||
index: usize,
|
||||
entities: *const []Entity,
|
||||
internal_it: ReverseSliceIterator(Entity),
|
||||
|
||||
pub fn init(view: *Self) Iterator {
|
||||
const ptr = view.registry.components.get(view.type_ids[0]).?;
|
||||
const entities = @intToPtr(*Storage(u8), ptr).dataPtr();
|
||||
const internal_it = @intToPtr(*Storage(u8), ptr).set.reverseIterator();
|
||||
return .{
|
||||
.view = view,
|
||||
.index = entities.len,
|
||||
.entities = entities,
|
||||
.internal_it = internal_it
|
||||
};
|
||||
}
|
||||
|
||||
pub fn next(it: *Iterator) ?Entity {
|
||||
while (true) blk: {
|
||||
if (it.index == 0) return null;
|
||||
it.index -= 1;
|
||||
|
||||
const entity = it.entities.*[it.index];
|
||||
|
||||
while (it.internal_it.next()) |entity| blk: {
|
||||
// entity must be in all other Storages
|
||||
for (it.view.type_ids) |tid| {
|
||||
const ptr = it.view.registry.components.get(tid).?;
|
||||
@ -101,11 +95,19 @@ pub fn MultiView(comptime n_includes: usize, comptime n_excludes: usize) type {
|
||||
|
||||
return entity;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// Reset the iterator to the initial index
|
||||
pub fn reset(it: *Iterator) void {
|
||||
it.index = it.entities.len;
|
||||
// Assign new iterator instance in case entities have been
|
||||
// removed or added.
|
||||
it.internal_it = it.getInternalIteratorInstance();
|
||||
}
|
||||
|
||||
fn getInternalIteratorInstance(it: *Iterator) ReverseSliceIterator(Entity) {
|
||||
const ptr = it.view.registry.components.get(it.view.type_ids[0]).?;
|
||||
return @intToPtr(*Storage(u8), ptr).set.reverseIterator();
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -5,12 +5,12 @@ const std = @import("std");
|
||||
pub const Process = struct {
|
||||
const State = enum(u8) { uninitialized, running, paused, succeeded, failed, aborted, finished };
|
||||
|
||||
updateFn: fn (self: *Process) void,
|
||||
startFn: ?fn (self: *Process) void = null,
|
||||
abortedFn: ?fn (self: *Process) void = null,
|
||||
failedFn: ?fn (self: *Process) void = null,
|
||||
succeededFn: ?fn (self: *Process) void = null,
|
||||
deinit: fn (self: *Process, allocator: std.mem.Allocator) void = undefined,
|
||||
updateFn: *const fn (self: *Process) void,
|
||||
startFn: ?*const fn (self: *Process) void = null,
|
||||
abortedFn: ?*const fn (self: *Process) void = null,
|
||||
failedFn: ?*const fn (self: *Process) void = null,
|
||||
succeededFn: ?*const fn (self: *Process) void = null,
|
||||
deinit: *const fn (self: *Process, allocator: std.mem.Allocator) void = undefined,
|
||||
|
||||
state: State = .uninitialized,
|
||||
stopped: bool = false,
|
||||
|
@ -130,7 +130,7 @@ pub const Scheduler = struct {
|
||||
}
|
||||
};
|
||||
|
||||
test "" {
|
||||
test "scheduler.update" {
|
||||
std.debug.print("\n", .{});
|
||||
|
||||
const Tester = struct {
|
||||
@ -178,7 +178,8 @@ test "" {
|
||||
var scheduler = Scheduler.init(std.testing.allocator);
|
||||
defer scheduler.deinit();
|
||||
|
||||
_ = scheduler.attach(Tester, 33).next(Tester, 66).next(Tester, 88).next(Tester, 99);
|
||||
var continuation = scheduler.attach(Tester, 33);
|
||||
_ = continuation.next(Tester, 66).next(Tester, 88).next(Tester, 99);
|
||||
scheduler.update();
|
||||
scheduler.update();
|
||||
scheduler.update();
|
||||
@ -202,7 +203,8 @@ test "scheduler.clear" {
|
||||
var scheduler = Scheduler.init(std.testing.allocator);
|
||||
defer scheduler.deinit();
|
||||
|
||||
_ = scheduler.attach(Tester, {}).next(Tester, {});
|
||||
var continuation = scheduler.attach(Tester, {});
|
||||
_ = continuation.next(Tester, {});
|
||||
scheduler.clear();
|
||||
scheduler.update();
|
||||
}
|
||||
@ -228,7 +230,8 @@ test "scheduler.attach.next" {
|
||||
defer scheduler.deinit();
|
||||
|
||||
var counter: usize = 0;
|
||||
_ = scheduler.attach(Tester, &counter).next(Tester, &counter);
|
||||
var continuation = scheduler.attach(Tester, &counter);
|
||||
_ = continuation.next(Tester, &counter);
|
||||
scheduler.update();
|
||||
scheduler.update();
|
||||
try std.testing.expectEqual(counter, 2);
|
||||
|
@ -37,8 +37,8 @@ pub const Assets = struct {
|
||||
return self.get(ReturnType(loader, true)).load(id, loader);
|
||||
}
|
||||
|
||||
fn ReturnType(comptime loader: anytype, strip_ptr: bool) type {
|
||||
var ret = @typeInfo(@TypeOf(@field(loader, "load"))).BoundFn.return_type.?;
|
||||
fn ReturnType(comptime loader: anytype, comptime strip_ptr: bool) type {
|
||||
var ret = @typeInfo(@typeInfo(@TypeOf(@field(loader, "load"))).Pointer.child).Fn.return_type.?;
|
||||
if (strip_ptr) {
|
||||
return std.meta.Child(ret);
|
||||
}
|
||||
@ -62,13 +62,19 @@ test "assets" {
|
||||
};
|
||||
|
||||
const OtherThingLoadArgs = struct {
|
||||
pub fn load(_: @This()) *OtherThing {
|
||||
// Use actual field "load" as function pointer to avoid zig v0.10.0
|
||||
// compiler error: "error: no field named 'load' in struct '...'"
|
||||
load: *const fn (_: @This()) *OtherThing,
|
||||
pub fn loadFn(_: @This()) *OtherThing {
|
||||
return std.testing.allocator.create(OtherThing) catch unreachable;
|
||||
}
|
||||
};
|
||||
|
||||
const ThingLoadArgs = struct {
|
||||
pub fn load(_: @This()) *Thing {
|
||||
// Use actual field "load" as function pointer to avoid zig v0.10.0
|
||||
// compiler error: "error: no field named 'load' in struct '...'"
|
||||
load: *const fn (_: @This()) *Thing,
|
||||
pub fn loadFn(_: @This()) *Thing {
|
||||
return std.testing.allocator.create(Thing) catch unreachable;
|
||||
}
|
||||
};
|
||||
@ -76,16 +82,16 @@ test "assets" {
|
||||
var assets = Assets.init(std.testing.allocator);
|
||||
defer assets.deinit();
|
||||
|
||||
_ = assets.get(Thing).load(6, ThingLoadArgs{});
|
||||
_ = assets.get(Thing).load(6, ThingLoadArgs{ .load = ThingLoadArgs.loadFn });
|
||||
try std.testing.expectEqual(assets.get(Thing).size(), 1);
|
||||
|
||||
_ = assets.load(4, ThingLoadArgs{});
|
||||
_ = assets.load(4, ThingLoadArgs{ .load = ThingLoadArgs.loadFn });
|
||||
try std.testing.expectEqual(assets.get(Thing).size(), 2);
|
||||
|
||||
_ = assets.get(OtherThing).load(6, OtherThingLoadArgs{});
|
||||
_ = assets.get(OtherThing).load(6, OtherThingLoadArgs{ .load = OtherThingLoadArgs.loadFn });
|
||||
try std.testing.expectEqual(assets.get(OtherThing).size(), 1);
|
||||
|
||||
_ = assets.load(8, OtherThingLoadArgs{});
|
||||
_ = assets.load(8, OtherThingLoadArgs{ .load = OtherThingLoadArgs.loadFn });
|
||||
try std.testing.expectEqual(assets.get(OtherThing).size(), 2);
|
||||
|
||||
assets.get(OtherThing).clear();
|
||||
|
@ -8,7 +8,7 @@ pub fn Cache(comptime T: type) type {
|
||||
return struct {
|
||||
const Self = @This();
|
||||
|
||||
safe_deinit: fn (*@This()) void,
|
||||
safe_deinit: *const fn (*@This()) void,
|
||||
resources: std.AutoHashMap(u32, *T),
|
||||
allocator: ?std.mem.Allocator = null,
|
||||
|
||||
@ -42,12 +42,12 @@ pub fn Cache(comptime T: type) type {
|
||||
self.safe_deinit(self);
|
||||
}
|
||||
|
||||
pub fn load(self: *@This(), id: u32, comptime loader: anytype) @typeInfo(@TypeOf(@field(loader, "load"))).BoundFn.return_type.? {
|
||||
pub fn load(self: *@This(), id: u32, comptime loader: anytype) @typeInfo(@typeInfo(@TypeOf(@field(loader, "load"))).Pointer.child).Fn.return_type.? {
|
||||
if (self.resources.get(id)) |resource| {
|
||||
return resource;
|
||||
}
|
||||
|
||||
var resource = loader.load();
|
||||
var resource = loader.load(loader);
|
||||
_ = self.resources.put(id, resource) catch unreachable;
|
||||
return resource;
|
||||
}
|
||||
@ -59,7 +59,7 @@ pub fn Cache(comptime T: type) type {
|
||||
pub fn remove(self: *@This(), id: u32) void {
|
||||
if (self.resources.fetchRemove(id)) |kv| {
|
||||
if (@hasDecl(T, "deinit")) {
|
||||
@call(.{ .modifier = .always_inline }, @field(kv.value, "deinit"), .{});
|
||||
@call(.always_inline, @field(kv.value, "deinit"), .{});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -69,7 +69,7 @@ pub fn Cache(comptime T: type) type {
|
||||
if (@hasDecl(T, "deinit")) {
|
||||
var iter = self.resources.iterator();
|
||||
while (iter.next()) |kv| {
|
||||
@call(.{ .modifier = .always_inline }, @field(kv.value_ptr.*, "deinit"), .{});
|
||||
@call(.always_inline, @field(kv.value_ptr.*, "deinit"), .{});
|
||||
}
|
||||
}
|
||||
self.resources.clearAndFree();
|
||||
@ -92,7 +92,10 @@ test "cache" {
|
||||
};
|
||||
|
||||
const ThingLoadArgs = struct {
|
||||
pub fn load(self: @This()) *Thing {
|
||||
// Use actual field "load" as function pointer to avoid zig v0.10.0
|
||||
// compiler error: "error: no field named 'load' in struct '...'"
|
||||
load: *const fn (self: @This()) *Thing,
|
||||
pub fn loadFn(self: @This()) *Thing {
|
||||
_ = self;
|
||||
return std.testing.allocator.create(Thing) catch unreachable;
|
||||
}
|
||||
@ -101,8 +104,8 @@ test "cache" {
|
||||
var cache = Cache(Thing).init(std.testing.allocator);
|
||||
defer cache.deinit();
|
||||
|
||||
_ = cache.load(utils.hashString("my/id"), ThingLoadArgs{});
|
||||
_ = cache.load(utils.hashString("another/id"), ThingLoadArgs{});
|
||||
_ = cache.load(utils.hashString("my/id"), ThingLoadArgs{ .load = ThingLoadArgs.loadFn });
|
||||
_ = cache.load(utils.hashString("another/id"), ThingLoadArgs{ .load = ThingLoadArgs.loadFn });
|
||||
try std.testing.expectEqual(cache.size(), 2);
|
||||
|
||||
cache.remove(utils.hashString("my/id"));
|
||||
|
@ -7,8 +7,8 @@ pub fn Delegate(comptime Event: type) type {
|
||||
|
||||
ctx_ptr_address: usize = 0,
|
||||
callback: union(enum) {
|
||||
free: fn (Event) void,
|
||||
bound: fn (usize, Event) void,
|
||||
free: *const fn (Event) void,
|
||||
bound: *const fn (usize, Event) void,
|
||||
},
|
||||
|
||||
/// sets a bound function as the Delegate callback
|
||||
@ -22,7 +22,7 @@ pub fn Delegate(comptime Event: type) type {
|
||||
.callback = .{
|
||||
.bound = struct {
|
||||
fn cb(self: usize, param: Event) void {
|
||||
@call(.{ .modifier = .always_inline }, @field(@intToPtr(T, self), fn_name), .{param});
|
||||
@call(.always_inline, @field(@intToPtr(T, self), fn_name), .{param});
|
||||
}
|
||||
}.cb,
|
||||
},
|
||||
@ -30,7 +30,7 @@ pub fn Delegate(comptime Event: type) type {
|
||||
}
|
||||
|
||||
/// sets a free function as the Delegate callback
|
||||
pub fn initFree(func: fn (Event) void) Self {
|
||||
pub fn initFree(func: *const fn (Event) void) Self {
|
||||
return Self{
|
||||
.callback = .{ .free = func },
|
||||
};
|
||||
@ -38,12 +38,12 @@ pub fn Delegate(comptime Event: type) type {
|
||||
|
||||
pub fn trigger(self: Self, param: Event) void {
|
||||
switch (self.callback) {
|
||||
.free => |func| @call(.{}, func, .{param}),
|
||||
.bound => |func| @call(.{}, func, .{ self.ctx_ptr_address, param }),
|
||||
.free => |func| @call(.auto, func, .{param}),
|
||||
.bound => |func| @call(.auto, func, .{ self.ctx_ptr_address, param }),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn containsFree(self: Self, callback: fn (Event) void) bool {
|
||||
pub fn containsFree(self: Self, callback: *const fn (Event) void) bool {
|
||||
return switch (self.callback) {
|
||||
.free => |func| func == callback,
|
||||
else => false,
|
||||
|
@ -18,7 +18,7 @@ pub fn Sink(comptime Event: type) type {
|
||||
return Self{ .insert_index = owning_signal.calls.items.len };
|
||||
}
|
||||
|
||||
pub fn before(self: Self, callback: ?fn (Event) void) Self {
|
||||
pub fn before(self: Self, callback: ?*const fn (Event) void) Self {
|
||||
if (callback) |cb| {
|
||||
if (self.indexOf(cb)) |index| {
|
||||
return Self{ .insert_index = index };
|
||||
@ -36,7 +36,7 @@ pub fn Sink(comptime Event: type) type {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn connect(self: Self, callback: fn (Event) void) void {
|
||||
pub fn connect(self: Self, callback: *const fn (Event) void) void {
|
||||
std.debug.assert(self.indexOf(callback) == null);
|
||||
_ = owning_signal.calls.insert(self.insert_index, Delegate(Event).initFree(callback)) catch unreachable;
|
||||
}
|
||||
@ -46,7 +46,7 @@ pub fn Sink(comptime Event: type) type {
|
||||
_ = owning_signal.calls.insert(self.insert_index, Delegate(Event).initBound(ctx, fn_name)) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn disconnect(self: Self, callback: fn (Event) void) void {
|
||||
pub fn disconnect(self: Self, callback: *const fn (Event) void) void {
|
||||
if (self.indexOf(callback)) |index| {
|
||||
_ = owning_signal.calls.swapRemove(index);
|
||||
}
|
||||
@ -58,7 +58,7 @@ pub fn Sink(comptime Event: type) type {
|
||||
}
|
||||
}
|
||||
|
||||
fn indexOf(_: Self, callback: fn (Event) void) ?usize {
|
||||
fn indexOf(_: Self, callback: *const fn (Event) void) ?usize {
|
||||
for (owning_signal.calls.items) |call, i| {
|
||||
if (call.containsFree(callback)) {
|
||||
return i;
|
||||
|
@ -82,12 +82,12 @@ test "singletons" {
|
||||
defer reg.deinit();
|
||||
|
||||
var pos = Position{ .x = 5, .y = 5 };
|
||||
reg.singletons.add(pos);
|
||||
try std.testing.expect(reg.singletons.has(Position));
|
||||
try std.testing.expectEqual(reg.singletons.get(Position).*, pos);
|
||||
reg.singletons().add(pos);
|
||||
try std.testing.expect(reg.singletons().has(Position));
|
||||
try std.testing.expectEqual(reg.singletons().get(Position).*, pos);
|
||||
|
||||
reg.singletons.remove(Position);
|
||||
try std.testing.expect(!reg.singletons.has(Position));
|
||||
reg.singletons().remove(Position);
|
||||
try std.testing.expect(!reg.singletons().has(Position));
|
||||
}
|
||||
|
||||
test "destroy" {
|
||||
|
Loading…
x
Reference in New Issue
Block a user