Update to latest zig release

master
Lucas 2 years ago
parent 69afaeab3f
commit 217b1a9e51

@ -9,7 +9,10 @@ pub fn build(b: *Builder) void {
});
// use a different cache folder for macos arm builds
b.cache_root = if (builtin.os.tag == .macos and builtin.target.cpu.arch == .aarch64) "zig-arm-cache" else "zig-cache";
b.cache_root = .{
.handle = std.fs.cwd(),
.path = if (builtin.os.tag == .macos and builtin.target.cpu.arch == .aarch64) "zig-arm-cache" else "zig-cache",
};
const examples = [_][2][]const u8{
[_][]const u8{ "view_vs_group", "examples/view_vs_group.zig" },
@ -17,7 +20,7 @@ pub fn build(b: *Builder) void {
[_][]const u8{ "simple", "examples/simple.zig" },
};
for (examples) |example, i| {
for (examples, 0..) |example, i| {
const name = if (i == 0) "ecs" else example[0];
const source = example[1];
@ -26,17 +29,18 @@ pub fn build(b: *Builder) void {
.root_source_file = std.build.FileSource{ .path = source },
.optimize = optimize,
});
exe.setOutputDir(std.fs.path.join(b.allocator, &[_][]const u8{ b.cache_root, "bin" }) catch unreachable);
// exe.setOutputDir(std.fs.path.join(b.allocator, &[_][]const u8{ b.cache_root, "bin" }) catch unreachable);
exe.output_dirname_source = .{ .path = std.fs.path.join(b.allocator, &[_][]const u8{ b.cache_root.path.?, "bin" }) catch unreachable, .step = &exe.step };
exe.addModule("ecs", ecs_module);
exe.linkLibC();
const docs = exe;
docs.emit_docs = .emit;
const doc = b.step("docs", "Generate documentation");
const doc = b.step(b.fmt("{s}-docs", .{name}), "Generate documentation");
doc.dependOn(&docs.step);
const run_cmd = exe.run();
const run_cmd = b.addRunArtifact(exe);
const exe_step = b.step(name, b.fmt("run {s}.zig", .{name}));
exe_step.dependOn(&run_cmd.step);

@ -326,7 +326,7 @@ test "iterate" {
store.add(5, 66.45);
store.add(7, 66.45);
for (store.data()) |entity, i| {
for (store.data(), 0..) |entity, i| {
if (i == 0) {
try std.testing.expectEqual(entity, 3);
}
@ -392,7 +392,7 @@ test "sort empty component" {
const asc_u32 = comptime std.sort.asc(u32);
store.sort(u32, {}, asc_u32);
for (store.data()) |e, i| {
for (store.data(), 0..) |e, i| {
try std.testing.expectEqual(@intCast(u32, i), e);
}

@ -85,7 +85,7 @@ pub const OwningGroup = struct {
const component_info = @typeInfo(Components).Struct;
var component_ptrs: [component_info.fields.len][*]u8 = undefined;
inline for (component_info.fields) |field, i| {
inline for (component_info.fields, 0..) |field, i| {
const storage = group.registry.assure(@typeInfo(field.type).Pointer.child);
component_ptrs[i] = @ptrCast([*]u8, storage.instances.items.ptr);
}
@ -104,7 +104,7 @@ pub const OwningGroup = struct {
// fill and return the struct
var comps: Components = undefined;
inline for (@typeInfo(Components).Struct.fields) |field, i| {
inline for (@typeInfo(Components).Struct.fields, 0..) |field, i| {
const typed_ptr = @ptrCast([*]@typeInfo(field.type).Pointer.child, @alignCast(@alignOf(@typeInfo(field.type).Pointer.child), it.component_ptrs[i]));
@field(comps, field.name) = &typed_ptr[it.index];
}
@ -173,7 +173,7 @@ pub const OwningGroup = struct {
const component_info = @typeInfo(Components).Struct;
var component_ptrs: [component_info.fields.len][*]u8 = undefined;
inline for (component_info.fields) |field, i| {
inline for (component_info.fields, 0..) |field, i| {
const storage = self.registry.assure(std.meta.Child(field.type));
component_ptrs[i] = @ptrCast([*]u8, storage.instances.items.ptr);
}
@ -181,7 +181,7 @@ pub const OwningGroup = struct {
// fill the struct
const index = self.firstOwnedStorage().set.index(entity);
var comps: Components = undefined;
inline for (component_info.fields) |field, i| {
inline for (component_info.fields, 0..) |field, i| {
const typed_ptr = @ptrCast([*]std.meta.Child(field.type), @alignCast(@alignOf(std.meta.Child(field.type)), component_ptrs[i]));
@field(comps, field.name) = &typed_ptr[index];
}
@ -191,8 +191,7 @@ pub const OwningGroup = struct {
pub fn each(self: OwningGroup, comptime func: anytype) void {
const Components = switch (@typeInfo(@TypeOf(func))) {
.BoundFn => |func_info| func_info.args[1].arg_type.?,
.Fn => |func_info| func_info.args[0].arg_type.?,
.Fn => |func_info| func_info.params[0].type.?,
else => std.debug.assert("invalid func"),
};
self.validate(Components);

@ -146,7 +146,7 @@ pub const Registry = struct {
/// which ensures more specialized (ie less matches) will always be swapping inside the bounds of
/// the less specialized groups.
fn findInsertionIndex(self: GroupData, groups: []*GroupData) ?usize {
for (groups) |grp, i| {
for (groups, 0..) |grp, i| {
var overlapping: u8 = 0;
for (grp.owned) |grp_owned| {
if (std.mem.indexOfScalar(u32, self.owned, grp_owned)) |_| overlapping += 1;
@ -443,13 +443,13 @@ pub const Registry = struct {
return BasicView(includes[0]).init(self.assure(includes[0]));
var includes_arr: [includes.len]u32 = undefined;
inline for (includes) |t, i| {
inline for (includes, 0..) |t, i| {
_ = self.assure(t);
includes_arr[i] = utils.typeId(t);
}
var excludes_arr: [excludes.len]u32 = undefined;
inline for (excludes) |t, i| {
inline for (excludes, 0..) |t, i| {
_ = self.assure(t);
excludes_arr[i] = utils.typeId(t);
}
@ -486,19 +486,19 @@ pub const Registry = struct {
// gather up all our Types as typeIds
var includes_arr: [includes.len]u32 = undefined;
inline for (includes) |t, i| {
inline for (includes, 0..) |t, i| {
_ = self.assure(t);
includes_arr[i] = utils.typeId(t);
}
var excludes_arr: [excludes.len]u32 = undefined;
inline for (excludes) |t, i| {
inline for (excludes, 0..) |t, i| {
_ = self.assure(t);
excludes_arr[i] = utils.typeId(t);
}
var owned_arr: [owned.len]u32 = undefined;
inline for (owned) |t, i| {
inline for (owned, 0..) |t, i| {
_ = self.assure(t);
owned_arr[i] = utils.typeId(t);
}
@ -628,7 +628,7 @@ pub const Registry = struct {
};
var names: [types.len][]const u8 = undefined;
for (names) |*name, i| {
for (&names, 0..) |*name, i| {
name.* = @typeName(types[i]);
}

@ -84,12 +84,13 @@ pub fn SparseSet(comptime SparseT: type) type {
const start_pos = self.sparse.items.len;
self.sparse.resize(pos + 1) catch unreachable;
self.sparse.expandToCapacity();
std.mem.set(?[]SparseT, self.sparse.items[start_pos..], null);
@memset(self.sparse.items[start_pos..], null);
}
if (self.sparse.items[pos] == null) {
var new_page = self.sparse.allocator.alloc(SparseT, page_size) catch unreachable;
std.mem.set(SparseT, new_page, std.math.maxInt(SparseT));
@memset(new_page, std.math.maxInt(SparseT));
self.sparse.items[pos] = new_page;
}
@ -173,7 +174,7 @@ pub fn SparseSet(comptime SparseT: type) type {
pub fn sort(self: *Self, context: anytype, comptime lessThan: *const fn (@TypeOf(context), SparseT, SparseT) bool) void {
std_sort_insertionSort_clone(SparseT, self.dense.items, context, lessThan);
for (self.dense.items) |_, i| {
for (self.dense.items, 0..) |_, i| {
const item = @intCast(SparseT, i);
self.sparse.items[self.page(self.dense.items[self.page(item)])].?[self.offset(self.dense.items[self.page(item)])] = @intCast(SparseT, i);
}
@ -184,7 +185,7 @@ pub fn SparseSet(comptime SparseT: type) type {
pub fn arrange(self: *Self, length: usize, context: anytype, comptime lessThan: *const fn (@TypeOf(context), SparseT, SparseT) bool, swap_context: anytype) void {
std_sort_insertionSort_clone(SparseT, self.dense.items[0..length], context, lessThan);
for (self.dense.items[0..length]) |_, pos| {
for (self.dense.items[0..length], 0..) |_, pos| {
var curr = @intCast(SparseT, pos);
var next = self.index(self.dense.items[curr]);
@ -213,7 +214,7 @@ pub fn SparseSet(comptime SparseT: type) type {
}
pub fn clear(self: *Self) void {
for (self.sparse.items) |array, i| {
for (self.sparse.items, 0..) |array, i| {
if (array) |arr| {
self.sparse.allocator.free(arr);
self.sparse.items[i] = null;
@ -358,7 +359,7 @@ test "respect 2" {
set.sort({}, desc_u32);
for (set.dense.items) |item, i| {
for (set.dense.items, 0..) |item, i| {
if (i < set.dense.items.len - 1) {
std.debug.assert(item > set.dense.items[i + 1]);
}

@ -81,12 +81,12 @@ pub fn sortSubSub(comptime T1: type, comptime T2: type, items: []T1, sub_items:
/// comptime string hashing for the type names
pub fn typeId(comptime T: type) u32 {
comptime return hashStringFnv(u32, @typeName(T));
return hashStringFnv(u32, @typeName(T));
}
/// comptime string hashing for the type names
pub fn typeId64(comptime T: type) u64 {
comptime return hashStringFnv(u64, @typeName(T));
return hashStringFnv(u64, @typeName(T));
}
/// u32 Fowler-Noll-Vo string hash
@ -126,7 +126,7 @@ test "ReverseSliceIterator" {
var slice = std.testing.allocator.alloc(usize, 10) catch unreachable;
defer std.testing.allocator.free(slice);
for (slice) |*item, i| {
for (slice, 0..) |*item, i| {
item.* = i;
}

@ -69,10 +69,7 @@ pub fn MultiView(comptime n_includes: usize, comptime n_excludes: usize) type {
pub fn init(view: *Self) Iterator {
const ptr = view.registry.components.get(view.type_ids[0]).?;
const internal_it = @intToPtr(*Storage(u8), ptr).set.reverseIterator();
return .{
.view = view,
.internal_it = internal_it
};
return .{ .view = view, .internal_it = internal_it };
}
pub fn next(it: *Iterator) ?Entity {
@ -130,7 +127,7 @@ pub fn MultiView(comptime n_includes: usize, comptime n_excludes: usize) type {
fn sort(self: *Self) void {
// get our component counts in an array so we can sort the type_ids based on how many entities are in each
var sub_items: [n_includes]usize = undefined;
for (self.type_ids) |tid, i| {
for (self.type_ids, 0..) |tid, i| {
const ptr = self.registry.components.get(tid).?;
const store = @intToPtr(*Storage(u8), ptr);
sub_items[i] = store.len();
@ -201,7 +198,7 @@ test "single basic view data" {
try std.testing.expectEqual(view.get(3).*, 30);
for (view.data()) |entity, i| {
for (view.data(), 0..) |entity, i| {
if (i == 0)
try std.testing.expectEqual(entity, 3);
if (i == 1)
@ -210,7 +207,7 @@ test "single basic view data" {
try std.testing.expectEqual(entity, 7);
}
for (view.raw()) |data, i| {
for (view.raw(), 0..) |data, i| {
if (i == 0)
try std.testing.expectEqual(data, 30);
if (i == 1)

@ -59,7 +59,7 @@ pub fn Sink(comptime Event: type) type {
}
fn indexOf(_: Self, callback: *const fn (Event) void) ?usize {
for (owning_signal.calls.items) |call, i| {
for (owning_signal.calls.items, 0..) |call, i| {
if (call.containsFree(callback)) {
return i;
}
@ -68,7 +68,7 @@ pub fn Sink(comptime Event: type) type {
}
fn indexOfBound(_: Self, ctx: anytype) ?usize {
for (owning_signal.calls.items) |call, i| {
for (owning_signal.calls.items, 0..) |call, i| {
if (call.containsBound(ctx)) {
return i;
}

@ -14,7 +14,7 @@ const Rotation = struct { x: f32 = 0 };
fn printStore(store: anytype, name: []const u8) void {
std.debug.print("--- {} ---\n", .{name});
for (store.set.dense.items) |e, i| {
for (store.set.dense.items, 0..) |e, i| {
std.debug.print("e[{}] s[{}]{}", .{ e, store.set.page(store.set.dense.items[i]), store.set.sparse.items[store.set.page(store.set.dense.items[i])] });
std.debug.print(" ({d:.2}) ", .{store.instances.items[i]});
}

Loading…
Cancel
Save