diff --git a/.gitignore b/.gitignore index af3120ab439e4e..b791b8bc3a047f 100644 --- a/.gitignore +++ b/.gitignore @@ -151,6 +151,7 @@ src/bake/generated.ts test/cli/install/registry/packages/publish-pkg-* test/cli/install/registry/packages/@secret/publish-pkg-8 test/js/third_party/prisma/prisma/sqlite/dev.db-journal +tmp # Dependencies /vendor @@ -178,4 +179,4 @@ test/js/third_party/prisma/prisma/sqlite/dev.db-journal .buildkite/ci.yml *.sock -scratch*.{js,ts,tsx,cjs,mjs} \ No newline at end of file +scratch*.{js,ts,tsx,cjs,mjs} diff --git a/build.zig b/build.zig index 01ebdaaeea9d2d..a487dc66239ee7 100644 --- a/build.zig +++ b/build.zig @@ -19,7 +19,7 @@ const OperatingSystem = @import("src/env.zig").OperatingSystem; const pathRel = fs.path.relative; /// Do not rename this constant. It is scanned by some scripts to determine which zig version to install. -const recommended_zig_version = "0.13.0"; +const recommended_zig_version = "0.14.0-dev.2987+183bb8b08"; comptime { if (!std.mem.eql(u8, builtin.zig_version_string, recommended_zig_version)) { @@ -154,8 +154,6 @@ pub fn build(b: *Build) !void { std.log.info("zig compiler v{s}", .{builtin.zig_version_string}); checked_file_exists = std.AutoHashMap(u64, void).init(b.allocator); - b.zig_lib_dir = b.zig_lib_dir orelse b.path("vendor/zig/lib"); - // TODO: Upgrade path for 0.14.0 // b.graph.zig_lib_directory = brk: { // const sub_path = "vendor/zig/lib"; @@ -209,7 +207,7 @@ pub fn build(b: *Build) !void { const bun_version = b.option([]const u8, "version", "Value of `Bun.version`") orelse "0.0.0"; b.reference_trace = ref_trace: { - const trace = b.option(u32, "reference-trace", "Set the reference trace") orelse 16; + const trace = b.option(u32, "reference-trace", "Set the reference trace") orelse 24; break :ref_trace if (trace == 0) null else trace; }; @@ -331,11 +329,25 @@ pub fn build(b: *Build) !void { .{ .os = .windows, .arch = .x86_64 }, }); } + { + const step = b.step("check-macos", "Check for semantic analysis errors on Windows"); + addMultiCheck(b, step, build_options, &.{ + .{ .os = .mac, .arch = .x86_64 }, + .{ .os = .mac, .arch = .aarch64 }, + }); + } + { + const step = b.step("check-linux", "Check for semantic analysis errors on Windows"); + addMultiCheck(b, step, build_options, &.{ + .{ .os = .linux, .arch = .x86_64 }, + .{ .os = .linux, .arch = .aarch64 }, + }); + } // zig build translate-c-headers { const step = b.step("translate-c", "Copy generated translated-c-headers.zig to zig-out"); - step.dependOn(&b.addInstallFile(getTranslateC(b, b.host, .Debug).getOutput(), "translated-c-headers.zig").step); + step.dependOn(&b.addInstallFile(getTranslateC(b, b.graph.host, .Debug).getOutput(), "translated-c-headers.zig").step); } // zig build enum-extractor @@ -363,7 +375,7 @@ pub fn addMultiCheck( const check_target = b.resolveTargetQuery(.{ .os_tag = OperatingSystem.stdOSTag(check.os), .cpu_arch = check.arch, - .cpu_model = getCpuModel(check.os, check.arch) orelse .determined_by_cpu_arch, + .cpu_model = getCpuModel(check.os, check.arch) orelse .determined_by_arch_os, .os_version_min = getOSVersionMin(check.os), .glibc_version = if (check.musl) null else getOSGlibCVersion(check.os), }); @@ -429,7 +441,6 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile { .strip = false, // stripped at the end }); obj.bundle_compiler_rt = false; - obj.formatted_panics = true; obj.root_module.omit_frame_pointer = false; // Link libc @@ -614,7 +625,7 @@ const WindowsShim = struct { .optimize = .ReleaseFast, .use_llvm = true, .use_lld = true, - .unwind_tables = false, + .unwind_tables = .none, .omit_frame_pointer = true, .strip = true, .linkage = .static, diff --git a/cmake/scripts/DownloadZig.cmake b/cmake/scripts/DownloadZig.cmake index f7f9d8789e90d6..2fb68ac4ca28b9 100644 --- a/cmake/scripts/DownloadZig.cmake +++ b/cmake/scripts/DownloadZig.cmake @@ -38,7 +38,7 @@ else() set(ZIG_FILENAME ${ZIG_NAME}.tar.xz) endif() -set(ZIG_DOWNLOAD_URL https://ziglang.org/download/${ZIG_VERSION}/${ZIG_FILENAME}) +set(ZIG_DOWNLOAD_URL http://mirrors.nektro.net/zig/${ZIG_VERSION}/${ZIG_FILENAME}) execute_process( COMMAND diff --git a/cmake/tools/SetupZig.cmake b/cmake/tools/SetupZig.cmake index e5a5e574ef99aa..d1c6727c831c62 100644 --- a/cmake/tools/SetupZig.cmake +++ b/cmake/tools/SetupZig.cmake @@ -20,8 +20,8 @@ else() unsupported(CMAKE_SYSTEM_NAME) endif() -optionx(ZIG_VERSION STRING "The zig version of the compiler to download" DEFAULT "0.13.0") -optionx(ZIG_COMMIT STRING "The zig commit to use in oven-sh/zig" DEFAULT "131a009ba2eb127a3447d05b9e12f710429aa5ee") +optionx(ZIG_VERSION STRING "The zig version of the compiler to download" DEFAULT "0.14.0-dev.2987+183bb8b08") +optionx(ZIG_COMMIT STRING "The zig commit to use in oven-sh/zig" DEFAULT "b11877fd3e8fbc031c17872155ed481d5ba4e6af") optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET}) if(CMAKE_BUILD_TYPE STREQUAL "Release") diff --git a/src/Global.zig b/src/Global.zig index f935d3b958c3a9..0a07e0ec8f56c8 100644 --- a/src/Global.zig +++ b/src/Global.zig @@ -170,13 +170,13 @@ pub inline fn configureAllocator(_: AllocatorConfiguration) void { } pub fn notimpl() noreturn { - @setCold(true); + @branchHint(.cold); Output.panic("Not implemented yet!!!!!", .{}); } // Make sure we always print any leftover pub fn crash() noreturn { - @setCold(true); + @branchHint(.cold); Global.exit(1); } diff --git a/src/Mutex.zig b/src/Mutex.zig index 7e824f2d822a9b..0aaaee366d3e72 100644 --- a/src/Mutex.zig +++ b/src/Mutex.zig @@ -164,7 +164,7 @@ const FutexImpl = struct { } fn lockSlow(self: *@This()) void { - @setCold(true); + @branchHint(.cold); // Avoid doing an atomic swap below if we already know the state is contended. // An atomic swap unconditionally stores which marks the cache-line as modified unnecessarily. diff --git a/src/Watcher.zig b/src/Watcher.zig index 7639bc11bed75f..3f710c64ceb354 100644 --- a/src/Watcher.zig +++ b/src/Watcher.zig @@ -333,11 +333,11 @@ fn appendFileAssumeCapacity( // https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man2/kqueue.2.html var event = std.mem.zeroes(KEvent); - event.flags = std.c.EV_ADD | std.c.EV_CLEAR | std.c.EV_ENABLE; + event.flags = std.c.EV.ADD | std.c.EV.CLEAR | std.c.EV.ENABLE; // we want to know about the vnode - event.filter = std.c.EVFILT_VNODE; + event.filter = std.c.EVFILT.VNODE; - event.fflags = std.c.NOTE_WRITE | std.c.NOTE_RENAME | std.c.NOTE_DELETE; + event.fflags = std.c.NOTE.WRITE | std.c.NOTE.RENAME | std.c.NOTE.DELETE; // id event.ident = @intCast(fd.int()); @@ -425,15 +425,15 @@ fn appendDirectoryAssumeCapacity( // https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man2/kqueue.2.html var event = std.mem.zeroes(KEvent); - event.flags = std.c.EV_ADD | std.c.EV_CLEAR | std.c.EV_ENABLE; + event.flags = std.c.EV.ADD | std.c.EV.CLEAR | std.c.EV.ENABLE; // we want to know about the vnode - event.filter = std.c.EVFILT_VNODE; + event.filter = std.c.EVFILT.VNODE; // monitor: // - Write // - Rename // - Delete - event.fflags = std.c.NOTE_WRITE | std.c.NOTE_RENAME | std.c.NOTE_DELETE; + event.fflags = std.c.NOTE.WRITE | std.c.NOTE.RENAME | std.c.NOTE.DELETE; // id event.ident = @intCast(fd.int()); diff --git a/src/analytics/analytics_thread.zig b/src/analytics/analytics_thread.zig index 50c9f2a810c055..51365d41261d45 100644 --- a/src/analytics/analytics_thread.zig +++ b/src/analytics/analytics_thread.zig @@ -126,8 +126,8 @@ pub const Features = struct { pub var s3: usize = 0; comptime { - @export(napi_module_register, .{ .name = "Bun__napi_module_register_count" }); - @export(process_dlopen, .{ .name = "Bun__process_dlopen_count" }); + @export(&napi_module_register, .{ .name = "Bun__napi_module_register_count" }); + @export(&process_dlopen, .{ .name = "Bun__process_dlopen_count" }); } pub fn formatter() Formatter { @@ -138,14 +138,14 @@ pub const Features = struct { pub fn format(_: Formatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { const fields = comptime brk: { const info: std.builtin.Type = @typeInfo(Features); - var buffer: [info.Struct.decls.len][]const u8 = .{""} ** info.Struct.decls.len; + var buffer: [info.@"struct".decls.len][]const u8 = .{""} ** info.@"struct".decls.len; var count: usize = 0; - for (info.Struct.decls) |decl| { + for (info.@"struct".decls) |decl| { var f = &@field(Features, decl.name); _ = &f; const Field = @TypeOf(f); const FieldT: std.builtin.Type = @typeInfo(Field); - if (FieldT.Pointer.child != usize) continue; + if (FieldT.pointer.child != usize) continue; buffer[count] = decl.name; count += 1; } @@ -216,7 +216,7 @@ pub const packed_features_list = brk: { }; pub const PackedFeatures = @Type(.{ - .Struct = .{ + .@"struct" = .{ .layout = .@"packed", .backing_integer = u64, .fields = brk: { @@ -226,7 +226,7 @@ pub const PackedFeatures = @Type(.{ fields[i] = .{ .name = name, .type = bool, - .default_value = &false, + .default_value_ptr = &false, .is_comptime = false, .alignment = 0, }; @@ -236,7 +236,7 @@ pub const PackedFeatures = @Type(.{ fields[i] = .{ .name = std.fmt.comptimePrint("_{d}", .{i}), .type = bool, - .default_value = &false, + .default_value_ptr = &false, .is_comptime = false, .alignment = 0, }; diff --git a/src/ast/base.zig b/src/ast/base.zig index ffd6240ad3e3ba..de1f8a5a3f3d20 100644 --- a/src/ast/base.zig +++ b/src/ast/base.zig @@ -62,7 +62,7 @@ pub const Index = packed struct(u32) { pub fn init(num: anytype) Index { const NumType = @TypeOf(num); - if (comptime @typeInfo(NumType) == .Pointer) { + if (comptime @typeInfo(NumType) == .pointer) { return init(num.*); } diff --git a/src/async/posix_event_loop.zig b/src/async/posix_event_loop.zig index 2dfb108952fae8..6ab59c66f7330c 100644 --- a/src/async/posix_event_loop.zig +++ b/src/async/posix_event_loop.zig @@ -366,42 +366,42 @@ pub const FilePoll = struct { // var loader = ptr.as(ShellSubprocessCapturedBufferedWriterMini); // loader.onPoll(size_or_offset, 0); // }, - @field(Owner.Tag, bun.meta.typeBaseName(@typeName(ShellBufferedWriter))) => { + @field(Owner.Tag, @typeName(ShellBufferedWriter)) => { var handler: *ShellBufferedWriter = ptr.as(ShellBufferedWriter); handler.onPoll(size_or_offset, poll.flags.contains(.hup)); }, - @field(Owner.Tag, bun.meta.typeBaseName(@typeName(ShellStaticPipeWriter))) => { + @field(Owner.Tag, @typeName(ShellStaticPipeWriter)) => { var handler: *ShellStaticPipeWriter = ptr.as(ShellStaticPipeWriter); handler.onPoll(size_or_offset, poll.flags.contains(.hup)); }, - @field(Owner.Tag, bun.meta.typeBaseName(@typeName(StaticPipeWriter))) => { + @field(Owner.Tag, @typeName(StaticPipeWriter)) => { var handler: *StaticPipeWriter = ptr.as(StaticPipeWriter); handler.onPoll(size_or_offset, poll.flags.contains(.hup)); }, - @field(Owner.Tag, bun.meta.typeBaseName(@typeName(FileSink))) => { + @field(Owner.Tag, @typeName(FileSink)) => { var handler: *FileSink = ptr.as(FileSink); handler.onPoll(size_or_offset, poll.flags.contains(.hup)); }, - @field(Owner.Tag, bun.meta.typeBaseName(@typeName(BufferedReader))) => { + @field(Owner.Tag, @typeName(BufferedReader)) => { log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) Reader", .{poll.fd}); var handler: *BufferedReader = ptr.as(BufferedReader); handler.onPoll(size_or_offset, poll.flags.contains(.hup)); }, - @field(Owner.Tag, bun.meta.typeBaseName(@typeName(Process))) => { + @field(Owner.Tag, @typeName(Process)) => { log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) Process", .{poll.fd}); var loader = ptr.as(Process); loader.onWaitPidFromEventLoopTask(); }, - @field(Owner.Tag, "DNSResolver") => { + @field(Owner.Tag, @typeName(DNSResolver)) => { log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) DNSResolver", .{poll.fd}); var loader: *DNSResolver = ptr.as(DNSResolver); loader.onDNSPoll(poll); }, - @field(Owner.Tag, "GetAddrInfoRequest") => { + @field(Owner.Tag, @typeName(GetAddrInfoRequest)) => { if (comptime !Environment.isMac) { unreachable; } @@ -411,7 +411,7 @@ pub const FilePoll = struct { loader.onMachportChange(); }, - @field(Owner.Tag, "Request") => { + @field(Owner.Tag, @typeName(Request)) => { if (comptime !Environment.isMac) { unreachable; } @@ -503,19 +503,19 @@ pub const FilePoll = struct { pub fn fromKQueueEvent(kqueue_event: std.posix.system.kevent64_s) Flags.Set { var flags = Flags.Set{}; - if (kqueue_event.filter == std.posix.system.EVFILT_READ) { + if (kqueue_event.filter == std.posix.system.EVFILT.READ) { flags.insert(Flags.readable); - if (kqueue_event.flags & std.posix.system.EV_EOF != 0) { + if (kqueue_event.flags & std.posix.system.EV.EOF != 0) { flags.insert(Flags.hup); } - } else if (kqueue_event.filter == std.posix.system.EVFILT_WRITE) { + } else if (kqueue_event.filter == std.posix.system.EVFILT.WRITE) { flags.insert(Flags.writable); - if (kqueue_event.flags & std.posix.system.EV_EOF != 0) { + if (kqueue_event.flags & std.posix.system.EV.EOF != 0) { flags.insert(Flags.hup); } - } else if (kqueue_event.filter == std.posix.system.EVFILT_PROC) { + } else if (kqueue_event.filter == std.posix.system.EVFILT.PROC) { flags.insert(Flags.process); - } else if (kqueue_event.filter == std.posix.system.EVFILT_MACHPORT) { + } else if (kqueue_event.filter == std.posix.system.EVFILT.MACHPORT) { flags.insert(Flags.machport); } return flags; @@ -763,7 +763,7 @@ pub const FilePoll = struct { pub fn onTick(loop: *Loop, tagged_pointer: ?*anyopaque) callconv(.C) void { var tag = Pollable.from(tagged_pointer); - if (tag.tag() != @field(Pollable.Tag, "FilePoll")) + if (tag.tag() != @field(Pollable.Tag, @typeName(FilePoll))) return; var file_poll: *FilePoll = tag.as(FilePoll); @@ -782,7 +782,7 @@ pub const FilePoll = struct { }); comptime { - @export(onTick, .{ .name = "Bun__internal_dispatch_ready_poll" }); + @export(&onTick, .{ .name = "Bun__internal_dispatch_ready_poll" }); } const timeout = std.mem.zeroes(std.posix.timespec); @@ -837,45 +837,45 @@ pub const FilePoll = struct { const one_shot_flag: u16 = if (!this.flags.contains(.one_shot)) 0 else if (one_shot == .dispatch) - std.c.EV_DISPATCH | std.c.EV_ENABLE + std.c.EV.DISPATCH | std.c.EV.ENABLE else - std.c.EV_ONESHOT; + std.c.EV.ONESHOT; changelist[0] = switch (flag) { .readable => .{ .ident = @intCast(fd.cast()), - .filter = std.posix.system.EVFILT_READ, + .filter = std.posix.system.EVFILT.READ, .data = 0, .fflags = 0, .udata = @intFromPtr(Pollable.init(this).ptr()), - .flags = std.c.EV_ADD | one_shot_flag, + .flags = std.c.EV.ADD | one_shot_flag, .ext = .{ this.generation_number, 0 }, }, .writable => .{ .ident = @intCast(fd.cast()), - .filter = std.posix.system.EVFILT_WRITE, + .filter = std.posix.system.EVFILT.WRITE, .data = 0, .fflags = 0, .udata = @intFromPtr(Pollable.init(this).ptr()), - .flags = std.c.EV_ADD | one_shot_flag, + .flags = std.c.EV.ADD | one_shot_flag, .ext = .{ this.generation_number, 0 }, }, .process => .{ .ident = @intCast(fd.cast()), - .filter = std.posix.system.EVFILT_PROC, + .filter = std.posix.system.EVFILT.PROC, .data = 0, - .fflags = std.c.NOTE_EXIT, + .fflags = std.c.NOTE.EXIT, .udata = @intFromPtr(Pollable.init(this).ptr()), - .flags = std.c.EV_ADD | one_shot_flag, + .flags = std.c.EV.ADD | one_shot_flag, .ext = .{ this.generation_number, 0 }, }, .machport => .{ .ident = @intCast(fd.cast()), - .filter = std.posix.system.EVFILT_MACHPORT, + .filter = std.posix.system.EVFILT.MACHPORT, .data = 0, .fflags = 0, .udata = @intFromPtr(Pollable.init(this).ptr()), - .flags = std.c.EV_ADD | one_shot_flag, + .flags = std.c.EV.ADD | one_shot_flag, .ext = .{ this.generation_number, 0 }, }, else => unreachable, @@ -913,7 +913,7 @@ pub const FilePoll = struct { // processing an element of the changelist and there is enough room // in the eventlist, then the event will be placed in the eventlist // with EV_ERROR set in flags and the system error in data. - if (changelist[0].flags == std.c.EV_ERROR and changelist[0].data != 0) { + if (changelist[0].flags == std.c.EV.ERROR and changelist[0].data != 0) { return JSC.Maybe(void).errnoSys(changelist[0].data, .kevent).?; // Otherwise, -1 will be returned, and errno will be set to // indicate the error condition. @@ -1008,38 +1008,38 @@ pub const FilePoll = struct { changelist[0] = switch (flag) { .readable => .{ .ident = @intCast(fd.cast()), - .filter = std.posix.system.EVFILT_READ, + .filter = std.posix.system.EVFILT.READ, .data = 0, .fflags = 0, .udata = @intFromPtr(Pollable.init(this).ptr()), - .flags = std.c.EV_DELETE, + .flags = std.c.EV.DELETE, .ext = .{ 0, 0 }, }, .machport => .{ .ident = @intCast(fd.cast()), - .filter = std.posix.system.EVFILT_MACHPORT, + .filter = std.posix.system.EVFILT.MACHPORT, .data = 0, .fflags = 0, .udata = @intFromPtr(Pollable.init(this).ptr()), - .flags = std.c.EV_DELETE, + .flags = std.c.EV.DELETE, .ext = .{ 0, 0 }, }, .writable => .{ .ident = @intCast(fd.cast()), - .filter = std.posix.system.EVFILT_WRITE, + .filter = std.posix.system.EVFILT.WRITE, .data = 0, .fflags = 0, .udata = @intFromPtr(Pollable.init(this).ptr()), - .flags = std.c.EV_DELETE, + .flags = std.c.EV.DELETE, .ext = .{ 0, 0 }, }, .process => .{ .ident = @intCast(fd.cast()), - .filter = std.posix.system.EVFILT_PROC, + .filter = std.posix.system.EVFILT.PROC, .data = 0, - .fflags = std.c.NOTE_EXIT, + .fflags = std.c.NOTE.EXIT, .udata = @intFromPtr(Pollable.init(this).ptr()), - .flags = std.c.EV_DELETE, + .flags = std.c.EV.DELETE, .ext = .{ 0, 0 }, }, else => unreachable, @@ -1065,7 +1065,7 @@ pub const FilePoll = struct { // processing an element of the changelist and there is enough room // in the eventlist, then the event will be placed in the eventlist // with EV_ERROR set in flags and the system error in data. - if (changelist[0].flags == std.c.EV_ERROR) { + if (changelist[0].flags == std.c.EV.ERROR) { return JSC.Maybe(void).errnoSys(changelist[0].data, .kevent).?; // Otherwise, -1 will be returned, and errno will be set to // indicate the error condition. diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 04a8d5f23b714a..6b7f957926d542 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -865,7 +865,7 @@ const DeferredRequest = struct { server_handler: bun.JSC.API.SavedRequest, js_payload: *Response, - const Tag = @typeInfo(Data).Union.tag_type.?; + const Tag = @typeInfo(Data).@"union".tag_type.?; }; }; @@ -3715,20 +3715,20 @@ const HmrTopic = enum(u8) { /// Invalid data _, - pub const max_count = @typeInfo(HmrTopic).Enum.fields.len; - pub const Bits = @Type(.{ .Struct = .{ - .backing_integer = @Type(.{ .Int = .{ + pub const max_count = @typeInfo(HmrTopic).@"enum".fields.len; + pub const Bits = @Type(.{ .@"struct" = .{ + .backing_integer = @Type(.{ .int = .{ .bits = max_count, .signedness = .unsigned, } }), .fields = &brk: { - const enum_fields = @typeInfo(HmrTopic).Enum.fields; + const enum_fields = @typeInfo(HmrTopic).@"enum".fields; var fields: [enum_fields.len]std.builtin.Type.StructField = undefined; for (enum_fields, &fields) |e, *s| { s.* = .{ .name = e.name, .type = bool, - .default_value = &false, + .default_value_ptr = &false, .is_comptime = false, .alignment = 0, }; @@ -3768,7 +3768,7 @@ const HmrSocket = struct { const topics = msg[1..]; if (topics.len > HmrTopic.max_count) return; outer: for (topics) |char| { - inline for (@typeInfo(HmrTopic).Enum.fields) |field| { + inline for (@typeInfo(HmrTopic).@"enum".fields) |field| { if (char == field.value) { @field(new_bits, field.name) = true; continue :outer; @@ -4063,7 +4063,7 @@ const WatcherAtomics = struct { ev.timer = std.time.Timer.start() catch unreachable; }, 1 => { - // @branchHint(.unlikely); + @branchHint(.unlikely); // DevServer stole this event. Unlikely but possible when // the user is saving very heavily (10-30 times per second) state.current +%= 1; @@ -4087,12 +4087,12 @@ const WatcherAtomics = struct { ev.owner.bun_watcher.thread_lock.assertLocked(); if (ev.files.count() > 0) { - // @branchHint(.likely); + @branchHint(.likely); // There are files to be processed, increment this count first. const prev_count = state.watcher_events_emitted.fetchAdd(1, .seq_cst); if (prev_count == 0) { - // @branchHint(.likely); + @branchHint(.likely); // Submit a task to the DevServer, notifying it that there is // work to do. The watcher will move to the other event. ev.concurrent_task = .{ @@ -4434,7 +4434,7 @@ pub const EntryPointList = struct { pub fn append(entry_points: *EntryPointList, allocator: std.mem.Allocator, abs_path: []const u8, flags: Flags) !void { const gop = try entry_points.set.getOrPut(allocator, abs_path); if (gop.found_existing) { - const T = @typeInfo(Flags).Struct.backing_integer.?; + const T = @typeInfo(Flags).@"struct".backing_integer.?; gop.value_ptr.* = @bitCast(@as(T, @bitCast(gop.value_ptr.*)) | @as(T, @bitCast(flags))); } else { gop.value_ptr.* = flags; diff --git a/src/bake/FrameworkRouter.zig b/src/bake/FrameworkRouter.zig index c3b6b134cc677d..3182e8ca7c594a 100644 --- a/src/bake/FrameworkRouter.zig +++ b/src/bake/FrameworkRouter.zig @@ -331,7 +331,7 @@ pub const Part = union(enum(u3)) { group: []const u8, const SerializedHeader = packed struct(u32) { - tag: @typeInfo(Part).Union.tag_type.?, + tag: @typeInfo(Part).@"union".tag_type.?, len: u29, }; diff --git a/src/bit_set.zig b/src/bit_set.zig index 2ecd5b3591f12d..f702915dfbe581 100644 --- a/src/bit_set.zig +++ b/src/bit_set.zig @@ -325,10 +325,10 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type { const mask_info: std.builtin.Type = @typeInfo(MaskIntType); // Make sure the mask int is indeed an int - if (mask_info != .Int) @compileError("ArrayBitSet can only operate on integer masks, but was passed " ++ @typeName(MaskIntType)); + if (mask_info != .int) @compileError("ArrayBitSet can only operate on integer masks, but was passed " ++ @typeName(MaskIntType)); // It must also be unsigned. - if (mask_info.Int.signedness != .unsigned) @compileError("ArrayBitSet requires an unsigned integer mask type, but was passed " ++ @typeName(MaskIntType)); + if (mask_info.int.signedness != .unsigned) @compileError("ArrayBitSet requires an unsigned integer mask type, but was passed " ++ @typeName(MaskIntType)); // And it must not be empty. if (MaskIntType == u0) @@ -1620,7 +1620,7 @@ fn testSupersetOf(empty: anytype, full: anytype, even: anytype, odd: anytype, le fn testBitSet(a: anytype, b: anytype, len: usize) !void { try testing.expectEqual(len, a.capacity()); try testing.expectEqual(len, b.capacity()); - const needs_ptr = @hasField(std.meta.Child(@TypeOf(a)), "masks") and @typeInfo(@TypeOf(@field(a, "masks"))) != .Pointer; + const needs_ptr = @hasField(std.meta.Child(@TypeOf(a)), "masks") and @typeInfo(@TypeOf(@field(a, "masks"))) != .pointer; { for (0..len) |i| { @@ -1844,7 +1844,7 @@ fn fillOdd(set: anytype, len: usize) void { fn testPureBitSet(comptime Set: type) !void { var empty_ = Set.initEmpty(); var full_ = Set.initFull(); - const needs_ptr = @hasField(Set, "masks") and @typeInfo(@TypeOf(empty_.masks)) != .Pointer; + const needs_ptr = @hasField(Set, "masks") and @typeInfo(@TypeOf(empty_.masks)) != .pointer; var even_ = even: { var bit_set = Set.initEmpty(); @@ -1900,7 +1900,7 @@ fn testPureBitSet(comptime Set: type) !void { try testing.expect(full.differenceWith(even).eql(odd)); } -fn testStaticBitSet(comptime Set: type, comptime Container: @Type(.EnumLiteral)) !void { +fn testStaticBitSet(comptime Set: type, comptime Container: @Type(.enum_literal)) !void { var a = Set.initEmpty(); var b = Set.initFull(); try testing.expectEqual(@as(usize, 0), a.count()); diff --git a/src/bitflags.zig b/src/bitflags.zig index f851a694908f66..908179686af7b2 100644 --- a/src/bitflags.zig +++ b/src/bitflags.zig @@ -2,9 +2,9 @@ const std = @import("std"); pub fn Bitflags(comptime T: type) type { const tyinfo = @typeInfo(T); - const IntType = tyinfo.Struct.backing_integer.?; + const IntType = tyinfo.@"struct".backing_integer.?; const IntTypeInfo = @typeInfo(IntType); - const IntRepresentingNumOfBits = std.math.IntFittingRange(0, IntTypeInfo.Int.bits); + const IntRepresentingNumOfBits = std.math.IntFittingRange(0, IntTypeInfo.int.bits); return struct { pub const IMPL_BITFLAGS: u0 = 0; diff --git a/src/bun.js/ConsoleObject.zig b/src/bun.js/ConsoleObject.zig index 38d3fd48cb4da1..b78c9afe7d22ef 100644 --- a/src/bun.js/ConsoleObject.zig +++ b/src/bun.js/ConsoleObject.zig @@ -3433,7 +3433,7 @@ pub const Formatter = struct { ", ... {d} more"; writer.print(comptime Output.prettyFmt(fmt_, enable_ansi_colors), .{ - if (@typeInfo(Number) == .Float) bun.fmt.double(@floatCast(slice[0])) else slice[0], + if (@typeInfo(Number) == .float) bun.fmt.double(@floatCast(slice[0])) else slice[0], }); var leftover = slice[1..]; const max = 512; @@ -3443,7 +3443,7 @@ pub const Formatter = struct { writer.space(); writer.print(comptime Output.prettyFmt(fmt_, enable_ansi_colors), .{ - if (@typeInfo(Number) == .Float) bun.fmt.double(@floatCast(el)) else el, + if (@typeInfo(Number) == .float) bun.fmt.double(@floatCast(el)) else el, }); } @@ -3686,17 +3686,17 @@ pub fn screenshot( ) callconv(JSC.conv) void {} comptime { - @export(messageWithTypeAndLevel, .{ .name = shim.symbolName("messageWithTypeAndLevel") }); - @export(count, .{ .name = shim.symbolName("count") }); - @export(countReset, .{ .name = shim.symbolName("countReset") }); - @export(time, .{ .name = shim.symbolName("time") }); - @export(timeLog, .{ .name = shim.symbolName("timeLog") }); - @export(timeEnd, .{ .name = shim.symbolName("timeEnd") }); - @export(profile, .{ .name = shim.symbolName("profile") }); - @export(profileEnd, .{ .name = shim.symbolName("profileEnd") }); - @export(takeHeapSnapshot, .{ .name = shim.symbolName("takeHeapSnapshot") }); - @export(timeStamp, .{ .name = shim.symbolName("timeStamp") }); - @export(record, .{ .name = shim.symbolName("record") }); - @export(recordEnd, .{ .name = shim.symbolName("recordEnd") }); - @export(screenshot, .{ .name = shim.symbolName("screenshot") }); + @export(&messageWithTypeAndLevel, .{ .name = shim.symbolName("messageWithTypeAndLevel") }); + @export(&count, .{ .name = shim.symbolName("count") }); + @export(&countReset, .{ .name = shim.symbolName("countReset") }); + @export(&time, .{ .name = shim.symbolName("time") }); + @export(&timeLog, .{ .name = shim.symbolName("timeLog") }); + @export(&timeEnd, .{ .name = shim.symbolName("timeEnd") }); + @export(&profile, .{ .name = shim.symbolName("profile") }); + @export(&profileEnd, .{ .name = shim.symbolName("profileEnd") }); + @export(&takeHeapSnapshot, .{ .name = shim.symbolName("takeHeapSnapshot") }); + @export(&timeStamp, .{ .name = shim.symbolName("timeStamp") }); + @export(&record, .{ .name = shim.symbolName("record") }); + @export(&recordEnd, .{ .name = shim.symbolName("recordEnd") }); + @export(&screenshot, .{ .name = shim.symbolName("screenshot") }); } diff --git a/src/bun.js/api/BunObject.zig b/src/bun.js/api/BunObject.zig index d6e02a1ad09ef9..6830bc9fda21d5 100644 --- a/src/bun.js/api/BunObject.zig +++ b/src/bun.js/api/BunObject.zig @@ -100,70 +100,70 @@ pub const BunObject = struct { } // --- Getters --- - @export(BunObject.CryptoHasher, .{ .name = getterName("CryptoHasher") }); - @export(BunObject.FFI, .{ .name = getterName("FFI") }); - @export(BunObject.FileSystemRouter, .{ .name = getterName("FileSystemRouter") }); - @export(BunObject.MD4, .{ .name = getterName("MD4") }); - @export(BunObject.MD5, .{ .name = getterName("MD5") }); - @export(BunObject.SHA1, .{ .name = getterName("SHA1") }); - @export(BunObject.SHA224, .{ .name = getterName("SHA224") }); - @export(BunObject.SHA256, .{ .name = getterName("SHA256") }); - @export(BunObject.SHA384, .{ .name = getterName("SHA384") }); - @export(BunObject.SHA512, .{ .name = getterName("SHA512") }); - @export(BunObject.SHA512_256, .{ .name = getterName("SHA512_256") }); - - @export(BunObject.TOML, .{ .name = getterName("TOML") }); - @export(BunObject.Glob, .{ .name = getterName("Glob") }); - @export(BunObject.Transpiler, .{ .name = getterName("Transpiler") }); - @export(BunObject.argv, .{ .name = getterName("argv") }); - @export(BunObject.cwd, .{ .name = getterName("cwd") }); - @export(BunObject.enableANSIColors, .{ .name = getterName("enableANSIColors") }); - @export(BunObject.hash, .{ .name = getterName("hash") }); - @export(BunObject.inspect, .{ .name = getterName("inspect") }); - @export(BunObject.main, .{ .name = getterName("main") }); - @export(BunObject.origin, .{ .name = getterName("origin") }); - @export(BunObject.stderr, .{ .name = getterName("stderr") }); - @export(BunObject.stdin, .{ .name = getterName("stdin") }); - @export(BunObject.stdout, .{ .name = getterName("stdout") }); - @export(BunObject.unsafe, .{ .name = getterName("unsafe") }); - @export(BunObject.semver, .{ .name = getterName("semver") }); - @export(BunObject.embeddedFiles, .{ .name = getterName("embeddedFiles") }); - @export(BunObject.S3Client, .{ .name = getterName("S3Client") }); - @export(BunObject.s3, .{ .name = getterName("s3") }); + @export(&BunObject.CryptoHasher, .{ .name = getterName("CryptoHasher") }); + @export(&BunObject.FFI, .{ .name = getterName("FFI") }); + @export(&BunObject.FileSystemRouter, .{ .name = getterName("FileSystemRouter") }); + @export(&BunObject.MD4, .{ .name = getterName("MD4") }); + @export(&BunObject.MD5, .{ .name = getterName("MD5") }); + @export(&BunObject.SHA1, .{ .name = getterName("SHA1") }); + @export(&BunObject.SHA224, .{ .name = getterName("SHA224") }); + @export(&BunObject.SHA256, .{ .name = getterName("SHA256") }); + @export(&BunObject.SHA384, .{ .name = getterName("SHA384") }); + @export(&BunObject.SHA512, .{ .name = getterName("SHA512") }); + @export(&BunObject.SHA512_256, .{ .name = getterName("SHA512_256") }); + + @export(&BunObject.TOML, .{ .name = getterName("TOML") }); + @export(&BunObject.Glob, .{ .name = getterName("Glob") }); + @export(&BunObject.Transpiler, .{ .name = getterName("Transpiler") }); + @export(&BunObject.argv, .{ .name = getterName("argv") }); + @export(&BunObject.cwd, .{ .name = getterName("cwd") }); + @export(&BunObject.enableANSIColors, .{ .name = getterName("enableANSIColors") }); + @export(&BunObject.hash, .{ .name = getterName("hash") }); + @export(&BunObject.inspect, .{ .name = getterName("inspect") }); + @export(&BunObject.main, .{ .name = getterName("main") }); + @export(&BunObject.origin, .{ .name = getterName("origin") }); + @export(&BunObject.stderr, .{ .name = getterName("stderr") }); + @export(&BunObject.stdin, .{ .name = getterName("stdin") }); + @export(&BunObject.stdout, .{ .name = getterName("stdout") }); + @export(&BunObject.unsafe, .{ .name = getterName("unsafe") }); + @export(&BunObject.semver, .{ .name = getterName("semver") }); + @export(&BunObject.embeddedFiles, .{ .name = getterName("embeddedFiles") }); + @export(&BunObject.S3Client, .{ .name = getterName("S3Client") }); + @export(&BunObject.s3, .{ .name = getterName("s3") }); // --- Getters -- // -- Callbacks -- - @export(BunObject.allocUnsafe, .{ .name = callbackName("allocUnsafe") }); - @export(BunObject.build, .{ .name = callbackName("build") }); - @export(BunObject.color, .{ .name = callbackName("color") }); - @export(BunObject.connect, .{ .name = callbackName("connect") }); - @export(BunObject.createParsedShellScript, .{ .name = callbackName("createParsedShellScript") }); - @export(BunObject.createShellInterpreter, .{ .name = callbackName("createShellInterpreter") }); - @export(BunObject.deflateSync, .{ .name = callbackName("deflateSync") }); - @export(BunObject.file, .{ .name = callbackName("file") }); - @export(BunObject.gunzipSync, .{ .name = callbackName("gunzipSync") }); - @export(BunObject.gzipSync, .{ .name = callbackName("gzipSync") }); - @export(BunObject.indexOfLine, .{ .name = callbackName("indexOfLine") }); - @export(BunObject.inflateSync, .{ .name = callbackName("inflateSync") }); - @export(BunObject.jest, .{ .name = callbackName("jest") }); - @export(BunObject.listen, .{ .name = callbackName("listen") }); - @export(BunObject.mmap, .{ .name = callbackName("mmap") }); - @export(BunObject.nanoseconds, .{ .name = callbackName("nanoseconds") }); - @export(BunObject.openInEditor, .{ .name = callbackName("openInEditor") }); - @export(BunObject.registerMacro, .{ .name = callbackName("registerMacro") }); - @export(BunObject.resolve, .{ .name = callbackName("resolve") }); - @export(BunObject.resolveSync, .{ .name = callbackName("resolveSync") }); - @export(BunObject.serve, .{ .name = callbackName("serve") }); - @export(BunObject.sha, .{ .name = callbackName("sha") }); - @export(BunObject.shellEscape, .{ .name = callbackName("shellEscape") }); - @export(BunObject.shrink, .{ .name = callbackName("shrink") }); - @export(BunObject.sleepSync, .{ .name = callbackName("sleepSync") }); - @export(BunObject.spawn, .{ .name = callbackName("spawn") }); - @export(BunObject.spawnSync, .{ .name = callbackName("spawnSync") }); - @export(BunObject.udpSocket, .{ .name = callbackName("udpSocket") }); - @export(BunObject.which, .{ .name = callbackName("which") }); - @export(BunObject.write, .{ .name = callbackName("write") }); + @export(&BunObject.allocUnsafe, .{ .name = callbackName("allocUnsafe") }); + @export(&BunObject.build, .{ .name = callbackName("build") }); + @export(&BunObject.color, .{ .name = callbackName("color") }); + @export(&BunObject.connect, .{ .name = callbackName("connect") }); + @export(&BunObject.createParsedShellScript, .{ .name = callbackName("createParsedShellScript") }); + @export(&BunObject.createShellInterpreter, .{ .name = callbackName("createShellInterpreter") }); + @export(&BunObject.deflateSync, .{ .name = callbackName("deflateSync") }); + @export(&BunObject.file, .{ .name = callbackName("file") }); + @export(&BunObject.gunzipSync, .{ .name = callbackName("gunzipSync") }); + @export(&BunObject.gzipSync, .{ .name = callbackName("gzipSync") }); + @export(&BunObject.indexOfLine, .{ .name = callbackName("indexOfLine") }); + @export(&BunObject.inflateSync, .{ .name = callbackName("inflateSync") }); + @export(&BunObject.jest, .{ .name = callbackName("jest") }); + @export(&BunObject.listen, .{ .name = callbackName("listen") }); + @export(&BunObject.mmap, .{ .name = callbackName("mmap") }); + @export(&BunObject.nanoseconds, .{ .name = callbackName("nanoseconds") }); + @export(&BunObject.openInEditor, .{ .name = callbackName("openInEditor") }); + @export(&BunObject.registerMacro, .{ .name = callbackName("registerMacro") }); + @export(&BunObject.resolve, .{ .name = callbackName("resolve") }); + @export(&BunObject.resolveSync, .{ .name = callbackName("resolveSync") }); + @export(&BunObject.serve, .{ .name = callbackName("serve") }); + @export(&BunObject.sha, .{ .name = callbackName("sha") }); + @export(&BunObject.shellEscape, .{ .name = callbackName("shellEscape") }); + @export(&BunObject.shrink, .{ .name = callbackName("shrink") }); + @export(&BunObject.sleepSync, .{ .name = callbackName("sleepSync") }); + @export(&BunObject.spawn, .{ .name = callbackName("spawn") }); + @export(&BunObject.spawnSync, .{ .name = callbackName("spawnSync") }); + @export(&BunObject.udpSocket, .{ .name = callbackName("udpSocket") }); + @export(&BunObject.which, .{ .name = callbackName("which") }); + @export(&BunObject.write, .{ .name = callbackName("write") }); // -- Callbacks -- } }; @@ -1920,7 +1920,7 @@ pub const Crypto = struct { hash: []const u8, pub fn toErrorInstance(this: Value, globalObject: *JSC.JSGlobalObject) JSC.JSValue { - const error_code = std.fmt.allocPrint(bun.default_allocator, "PASSWORD_{}", .{PascalToUpperUnderscoreCaseFormatter{ .input = @errorName(this.err) }}) catch bun.outOfMemory(); + const error_code = std.fmt.allocPrint(bun.default_allocator, "PASSWORD{}", .{PascalToUpperUnderscoreCaseFormatter{ .input = @errorName(this.err) }}) catch bun.outOfMemory(); defer bun.default_allocator.free(error_code); const instance = globalObject.createErrorInstance("Password hashing failed with error \"{s}\"", .{@errorName(this.err)}); instance.put(globalObject, ZigString.static("code"), JSC.ZigString.init(error_code).toJS(globalObject)); @@ -2857,7 +2857,7 @@ pub const Crypto = struct { return globalThis.throw("Bun.file() is not supported here yet (it needs an async version)", .{}); } - if (comptime @typeInfo(@TypeOf(Hasher.hash)).Fn.params.len == 3) { + if (comptime @typeInfo(@TypeOf(Hasher.hash)).@"fn".params.len == 3) { Hasher.hash(input.slice(), &output_digest_buf, JSC.VirtualMachine.get().rareData().boringEngine()); } else { Hasher.hash(input.slice(), &output_digest_buf); @@ -2877,7 +2877,7 @@ pub const Crypto = struct { output_digest_slice = bytes[0..Hasher.digest]; } - if (comptime @typeInfo(@TypeOf(Hasher.hash)).Fn.params.len == 3) { + if (comptime @typeInfo(@TypeOf(Hasher.hash)).@"fn".params.len == 3) { Hasher.hash(input.slice(), output_digest_slice, JSC.VirtualMachine.get().rareData().boringEngine()); } else { Hasher.hash(input.slice(), output_digest_slice); @@ -3266,7 +3266,7 @@ pub fn mmapFile(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun. if (try opts.get(globalThis, "offset")) |value| { offset = @as(usize, @intCast(value.toInt64())); - offset = std.mem.alignBackwardAnyAlign(offset, std.mem.page_size); + offset = std.mem.alignBackwardAnyAlign(usize, offset, std.mem.page_size); } } diff --git a/src/bun.js/api/JSBundler.zig b/src/bun.js/api/JSBundler.zig index 8aa4f79e382a67..b5d4a6a6775fa7 100644 --- a/src/bun.js/api/JSBundler.zig +++ b/src/bun.js/api/JSBundler.zig @@ -1,6 +1,5 @@ const std = @import("std"); const Api = @import("../../api/schema.zig").Api; -const JavaScript = @import("../javascript.zig"); const QueryStringMap = @import("../../url.zig").QueryStringMap; const CombinedScanner = @import("../../url.zig").CombinedScanner; const bun = @import("root").bun; @@ -11,7 +10,6 @@ const WebCore = @import("../webcore/response.zig"); const Transpiler = bun.transpiler; const options = @import("../../options.zig"); const resolve_path = @import("../../resolver/resolve_path.zig"); -const VirtualMachine = JavaScript.VirtualMachine; const ScriptSrcStream = std.io.FixedBufferStream([]u8); const ZigString = JSC.ZigString; const Fs = @import("../../fs.zig"); diff --git a/src/bun.js/api/JSTranspiler.zig b/src/bun.js/api/JSTranspiler.zig index cd42cb5b14784c..0c0aa5a27ac73c 100644 --- a/src/bun.js/api/JSTranspiler.zig +++ b/src/bun.js/api/JSTranspiler.zig @@ -1,6 +1,5 @@ const std = @import("std"); const Api = @import("../../api/schema.zig").Api; -const JavaScript = @import("../javascript.zig"); const QueryStringMap = @import("../../url.zig").QueryStringMap; const CombinedScanner = @import("../../url.zig").CombinedScanner; const bun = @import("root").bun; @@ -10,7 +9,6 @@ const js = JSC.C; const WebCore = @import("../webcore/response.zig"); const Transpiler = bun.transpiler; const options = @import("../../options.zig"); -const VirtualMachine = JavaScript.VirtualMachine; const ScriptSrcStream = std.io.FixedBufferStream([]u8); const ZigString = JSC.ZigString; const Fs = @import("../../fs.zig"); @@ -454,7 +452,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std allocator, &transpiler.log, logger.Source.initPathString("tsconfig.json", transpiler.tsconfig_buf), - &VirtualMachine.get().transpiler.resolver.caches.json, + &JSC.VirtualMachine.get().transpiler.resolver.caches.json, ) catch null) |parsed_tsconfig| { transpiler.tsconfig = parsed_tsconfig; } @@ -488,7 +486,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std if (out.isEmpty()) break :macros; transpiler.macros_buf = out.toOwnedSlice(allocator) catch bun.outOfMemory(); const source = logger.Source.initPathString("macros.json", transpiler.macros_buf); - const json = (VirtualMachine.get().transpiler.resolver.caches.json.parseJSON( + const json = (JSC.VirtualMachine.get().transpiler.resolver.caches.json.parseJSON( &transpiler.log, source, allocator, @@ -731,7 +729,7 @@ pub fn constructor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) b allocator, log, transpiler_options.transform, - JavaScript.VirtualMachine.get().transpiler.env, + JSC.VirtualMachine.get().transpiler.env, ) catch |err| { if ((log.warnings + log.errors) > 0) { return globalThis.throwValue(log.toJS(globalThis, allocator, "Failed to create transpiler")); diff --git a/src/bun.js/api/Timer.zig b/src/bun.js/api/Timer.zig index e7a3569b2de952..0c9bae6db207d1 100644 --- a/src/bun.js/api/Timer.zig +++ b/src/bun.js/api/Timer.zig @@ -302,7 +302,7 @@ pub const All = struct { } comptime { - @export(setImmediate, .{ .name = "Bun__Timer__setImmediate" }); + @export(&setImmediate, .{ .name = "Bun__Timer__setImmediate" }); } pub fn setTimeout( @@ -412,11 +412,11 @@ pub const All = struct { }); comptime { - @export(setTimeout, .{ .name = Export[0].symbol_name }); - @export(setInterval, .{ .name = Export[1].symbol_name }); - @export(clearTimeout, .{ .name = Export[2].symbol_name }); - @export(clearInterval, .{ .name = Export[3].symbol_name }); - @export(getNextID, .{ .name = Export[4].symbol_name }); + @export(&setTimeout, .{ .name = Export[0].symbol_name }); + @export(&setInterval, .{ .name = Export[1].symbol_name }); + @export(&clearTimeout, .{ .name = Export[2].symbol_name }); + @export(&clearInterval, .{ .name = Export[3].symbol_name }); + @export(&getNextID, .{ .name = Export[4].symbol_name }); } }; @@ -444,7 +444,7 @@ pub const TimerObject = struct { }, pub usingnamespace JSC.Codegen.JSTimeout; - pub usingnamespace bun.NewRefCounted(@This(), deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); extern "C" fn Bun__JSTimeout__call(encodedTimeoutValue: JSValue, globalObject: *JSC.JSGlobalObject) void; diff --git a/src/bun.js/api/bun/dns_resolver.zig b/src/bun.js/api/bun/dns_resolver.zig index 60dfc785274ea5..f960f38828ab9a 100644 --- a/src/bun.js/api/bun/dns_resolver.zig +++ b/src/bun.js/api/bun/dns_resolver.zig @@ -41,10 +41,7 @@ const LibInfo = struct { if (loaded) return handle; loaded = true; - const RTLD_LAZY = 1; - const RTLD_LOCAL = 4; - - handle = bun.C.dlopen("libinfo.dylib", RTLD_LAZY | RTLD_LOCAL); + handle = bun.C.dlopen("libinfo.dylib", .{ .LAZY = true, .LOCAL = true }); if (handle == null) Output.debug("libinfo.dylib not found", .{}); return handle; @@ -1395,7 +1392,7 @@ pub const InternalDNS = struct { // https://github.com/nodejs/node/issues/33816 // https://github.com/aio-libs/aiohttp/issues/5357 // https://github.com/libuv/libuv/issues/2225 - .flags = if (Environment.isPosix) bun.C.translated.AI_ADDRCONFIG else 0, + .flags = if (Environment.isPosix) .{ .ADDRCONFIG = true } else .{}, .next = null, .protocol = 0, .socktype = std.c.SOCK.STREAM, @@ -1527,7 +1524,7 @@ pub const InternalDNS = struct { if (Environment.isWindows) { const wsa = std.os.windows.ws2_32; const wsa_hints = wsa.addrinfo{ - .flags = 0, + .flags = .{}, .family = wsa.AF.UNSPEC, .socktype = wsa.SOCK.STREAM, .protocol = 0, @@ -1756,16 +1753,16 @@ pub const InternalDNS = struct { pub const InternalDNSRequest = InternalDNS.Request; comptime { - @export(InternalDNS.us_getaddrinfo_set, .{ + @export(&InternalDNS.us_getaddrinfo_set, .{ .name = "Bun__addrinfo_set", }); - @export(InternalDNS.us_getaddrinfo, .{ + @export(&InternalDNS.us_getaddrinfo, .{ .name = "Bun__addrinfo_get", }); - @export(InternalDNS.freeaddrinfo, .{ + @export(&InternalDNS.freeaddrinfo, .{ .name = "Bun__addrinfo_freeRequest", }); - @export(InternalDNS.getRequestResult, .{ + @export(&InternalDNS.getRequestResult, .{ .name = "Bun__addrinfo_getRequestResult", }); } @@ -1802,7 +1799,7 @@ pub const DNSResolver = struct { pending_nameinfo_cache_cares: NameInfoPendingCache = NameInfoPendingCache.init(), pub usingnamespace JSC.Codegen.JSDNSResolver; - pub usingnamespace bun.NewRefCounted(@This(), deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); const PollsMap = std.AutoArrayHashMap(c_ares.ares_socket_t, *PollType); @@ -1912,7 +1909,7 @@ pub const DNSResolver = struct { } fn anyRequestsPending(this: *DNSResolver) bool { - inline for (@typeInfo(DNSResolver).Struct.fields) |field| { + inline for (@typeInfo(DNSResolver).@"struct".fields) |field| { if (comptime std.mem.startsWith(u8, field.name, "pending_")) { const set = &@field(this, field.name).available; if (set.count() < set.capacity()) { @@ -3396,40 +3393,40 @@ pub const DNSResolver = struct { comptime { const js_resolve = JSC.toJSHostFunction(globalResolve); - @export(js_resolve, .{ .name = "Bun__DNS__resolve" }); + @export(&js_resolve, .{ .name = "Bun__DNS__resolve" }); const js_lookup = JSC.toJSHostFunction(globalLookup); - @export(js_lookup, .{ .name = "Bun__DNS__lookup" }); + @export(&js_lookup, .{ .name = "Bun__DNS__lookup" }); const js_resolveTxt = JSC.toJSHostFunction(globalResolveTxt); - @export(js_resolveTxt, .{ .name = "Bun__DNS__resolveTxt" }); + @export(&js_resolveTxt, .{ .name = "Bun__DNS__resolveTxt" }); const js_resolveSoa = JSC.toJSHostFunction(globalResolveSoa); - @export(js_resolveSoa, .{ .name = "Bun__DNS__resolveSoa" }); + @export(&js_resolveSoa, .{ .name = "Bun__DNS__resolveSoa" }); const js_resolveMx = JSC.toJSHostFunction(globalResolveMx); - @export(js_resolveMx, .{ .name = "Bun__DNS__resolveMx" }); + @export(&js_resolveMx, .{ .name = "Bun__DNS__resolveMx" }); const js_resolveNaptr = JSC.toJSHostFunction(globalResolveNaptr); - @export(js_resolveNaptr, .{ .name = "Bun__DNS__resolveNaptr" }); + @export(&js_resolveNaptr, .{ .name = "Bun__DNS__resolveNaptr" }); const js_resolveSrv = JSC.toJSHostFunction(globalResolveSrv); - @export(js_resolveSrv, .{ .name = "Bun__DNS__resolveSrv" }); + @export(&js_resolveSrv, .{ .name = "Bun__DNS__resolveSrv" }); const js_resolveCaa = JSC.toJSHostFunction(globalResolveCaa); - @export(js_resolveCaa, .{ .name = "Bun__DNS__resolveCaa" }); + @export(&js_resolveCaa, .{ .name = "Bun__DNS__resolveCaa" }); const js_resolveNs = JSC.toJSHostFunction(globalResolveNs); - @export(js_resolveNs, .{ .name = "Bun__DNS__resolveNs" }); + @export(&js_resolveNs, .{ .name = "Bun__DNS__resolveNs" }); const js_resolvePtr = JSC.toJSHostFunction(globalResolvePtr); - @export(js_resolvePtr, .{ .name = "Bun__DNS__resolvePtr" }); + @export(&js_resolvePtr, .{ .name = "Bun__DNS__resolvePtr" }); const js_resolveCname = JSC.toJSHostFunction(globalResolveCname); - @export(js_resolveCname, .{ .name = "Bun__DNS__resolveCname" }); + @export(&js_resolveCname, .{ .name = "Bun__DNS__resolveCname" }); const js_resolveAny = JSC.toJSHostFunction(globalResolveAny); - @export(js_resolveAny, .{ .name = "Bun__DNS__resolveAny" }); + @export(&js_resolveAny, .{ .name = "Bun__DNS__resolveAny" }); const js_getGlobalServers = JSC.toJSHostFunction(getGlobalServers); - @export(js_getGlobalServers, .{ .name = "Bun__DNS__getServers" }); + @export(&js_getGlobalServers, .{ .name = "Bun__DNS__getServers" }); const js_setGlobalServers = JSC.toJSHostFunction(setGlobalServers); - @export(js_setGlobalServers, .{ .name = "Bun__DNS__setServers" }); + @export(&js_setGlobalServers, .{ .name = "Bun__DNS__setServers" }); const js_reverse = JSC.toJSHostFunction(globalReverse); - @export(js_reverse, .{ .name = "Bun__DNS__reverse" }); + @export(&js_reverse, .{ .name = "Bun__DNS__reverse" }); const js_lookupService = JSC.toJSHostFunction(globalLookupService); - @export(js_lookupService, .{ .name = "Bun__DNS__lookupService" }); + @export(&js_lookupService, .{ .name = "Bun__DNS__lookupService" }); const js_prefetchFromJS = JSC.toJSHostFunction(InternalDNS.prefetchFromJS); - @export(js_prefetchFromJS, .{ .name = "Bun__DNS__prefetch" }); + @export(&js_prefetchFromJS, .{ .name = "Bun__DNS__prefetch" }); const js_getDNSCacheStats = JSC.toJSHostFunction(InternalDNS.getDNSCacheStats); - @export(js_getDNSCacheStats, .{ .name = "Bun__DNS__getCacheStats" }); + @export(&js_getDNSCacheStats, .{ .name = "Bun__DNS__getCacheStats" }); } }; diff --git a/src/bun.js/api/bun/h2_frame_parser.zig b/src/bun.js/api/bun/h2_frame_parser.zig index 9522df43bf7f2e..87b719740cf123 100644 --- a/src/bun.js/api/bun/h2_frame_parser.zig +++ b/src/bun.js/api/bun/h2_frame_parser.zig @@ -530,7 +530,7 @@ const Handlers = struct { globalObject: *JSC.JSGlobalObject, strong_ctx: JSC.Strong = .{}, - pub fn callEventHandler(this: *Handlers, comptime event: @Type(.EnumLiteral), thisValue: JSValue, data: []const JSValue) bool { + pub fn callEventHandler(this: *Handlers, comptime event: @Type(.enum_literal), thisValue: JSValue, data: []const JSValue) bool { const callback = @field(this, @tagName(event)); if (callback == .zero) { return false; @@ -546,7 +546,7 @@ const Handlers = struct { return true; } - pub fn callEventHandlerWithResult(this: *Handlers, comptime event: @Type(.EnumLiteral), thisValue: JSValue, data: []const JSValue) JSValue { + pub fn callEventHandlerWithResult(this: *Handlers, comptime event: @Type(.enum_literal), thisValue: JSValue, data: []const JSValue) JSValue { const callback = @field(this, @tagName(event)); if (callback == .zero) { return JSC.JSValue.zero; @@ -643,8 +643,7 @@ const Handlers = struct { pub const H2FrameParser = struct { pub const log = Output.scoped(.H2FrameParser, false); pub usingnamespace JSC.Codegen.JSH2FrameParser; - pub usingnamespace bun.NewRefCounted(@This(), @This().deinit); - pub const DEBUG_REFCOUNT_NAME = "H2"; + pub usingnamespace bun.NewRefCounted(@This(), deinit, "H2"); const ENABLE_AUTO_CORK = true; // ENABLE CORK OPTIMIZATION const ENABLE_ALLOCATOR_POOL = true; // ENABLE HIVE ALLOCATOR OPTIMIZATION @@ -1356,7 +1355,7 @@ pub const H2FrameParser = struct { _ = this.write(&buffer); } - pub fn dispatch(this: *H2FrameParser, comptime event: @Type(.EnumLiteral), value: JSC.JSValue) void { + pub fn dispatch(this: *H2FrameParser, comptime event: @Type(.enum_literal), value: JSC.JSValue) void { JSC.markBinding(@src()); const ctx_value = this.strong_ctx.get() orelse return; @@ -1364,7 +1363,7 @@ pub const H2FrameParser = struct { _ = this.handlers.callEventHandler(event, ctx_value, &[_]JSC.JSValue{ ctx_value, value }); } - pub fn call(this: *H2FrameParser, comptime event: @Type(.EnumLiteral), value: JSC.JSValue) JSValue { + pub fn call(this: *H2FrameParser, comptime event: @Type(.enum_literal), value: JSC.JSValue) JSValue { JSC.markBinding(@src()); const ctx_value = this.strong_ctx.get() orelse return .zero; @@ -1376,7 +1375,7 @@ pub const H2FrameParser = struct { _ = this.handlers.callWriteCallback(callback, &[_]JSC.JSValue{}); } - pub fn dispatchWithExtra(this: *H2FrameParser, comptime event: @Type(.EnumLiteral), value: JSC.JSValue, extra: JSC.JSValue) void { + pub fn dispatchWithExtra(this: *H2FrameParser, comptime event: @Type(.enum_literal), value: JSC.JSValue, extra: JSC.JSValue) void { JSC.markBinding(@src()); const ctx_value = this.strong_ctx.get() orelse return; @@ -1385,7 +1384,7 @@ pub const H2FrameParser = struct { _ = this.handlers.callEventHandler(event, ctx_value, &[_]JSC.JSValue{ ctx_value, value, extra }); } - pub fn dispatchWith2Extra(this: *H2FrameParser, comptime event: @Type(.EnumLiteral), value: JSC.JSValue, extra: JSC.JSValue, extra2: JSC.JSValue) void { + pub fn dispatchWith2Extra(this: *H2FrameParser, comptime event: @Type(.enum_literal), value: JSC.JSValue, extra: JSC.JSValue, extra2: JSC.JSValue) void { JSC.markBinding(@src()); const ctx_value = this.strong_ctx.get() orelse return; @@ -1394,7 +1393,7 @@ pub const H2FrameParser = struct { extra2.ensureStillAlive(); _ = this.handlers.callEventHandler(event, ctx_value, &[_]JSC.JSValue{ ctx_value, value, extra, extra2 }); } - pub fn dispatchWith3Extra(this: *H2FrameParser, comptime event: @Type(.EnumLiteral), value: JSC.JSValue, extra: JSC.JSValue, extra2: JSC.JSValue, extra3: JSC.JSValue) void { + pub fn dispatchWith3Extra(this: *H2FrameParser, comptime event: @Type(.enum_literal), value: JSC.JSValue, extra: JSC.JSValue, extra2: JSC.JSValue, extra3: JSC.JSValue) void { JSC.markBinding(@src()); const ctx_value = this.strong_ctx.get() orelse return; diff --git a/src/bun.js/api/bun/process.zig b/src/bun.js/api/bun/process.zig index d7e7c5be142151..9f69ce021c9d2d 100644 --- a/src/bun.js/api/bun/process.zig +++ b/src/bun.js/api/bun/process.zig @@ -11,12 +11,12 @@ const Maybe = JSC.Maybe; const win_rusage = struct { utime: struct { - tv_sec: i64 = 0, - tv_usec: i64 = 0, + sec: i64 = 0, + usec: i64 = 0, }, stime: struct { - tv_sec: i64 = 0, - tv_usec: i64 = 0, + sec: i64 = 0, + usec: i64 = 0, }, maxrss: u64 = 0, ixrss: u0 = 0, @@ -54,16 +54,16 @@ pub fn uv_getrusage(process: *uv.uv_process_t) win_rusage { var kerneltime: WinTime = undefined; var usertime: WinTime = undefined; // We at least get process times - if (std.os.windows.kernel32.GetProcessTimes(process_pid, &starttime, &exittime, &kerneltime, &usertime) == 1) { + if (bun.windows.GetProcessTimes(process_pid, &starttime, &exittime, &kerneltime, &usertime) == 1) { var temp: u64 = (@as(u64, kerneltime.dwHighDateTime) << 32) | kerneltime.dwLowDateTime; if (temp > 0) { - usage_info.stime.tv_sec = @intCast(temp / 10000000); - usage_info.stime.tv_usec = @intCast(temp % 1000000); + usage_info.stime.sec = @intCast(temp / 10000000); + usage_info.stime.usec = @intCast(temp % 1000000); } temp = (@as(u64, usertime.dwHighDateTime) << 32) | usertime.dwLowDateTime; if (temp > 0) { - usage_info.utime.tv_sec = @intCast(temp / 10000000); - usage_info.utime.tv_usec = @intCast(temp % 1000000); + usage_info.utime.sec = @intCast(temp / 10000000); + usage_info.utime.usec = @intCast(temp % 1000000); } } var counters: IO_COUNTERS = .{}; @@ -110,23 +110,23 @@ pub const ProcessExitHandler = struct { } switch (this.ptr.tag()) { - .Subprocess => { + @field(TaggedPointer.Tag, @typeName(Subprocess)) => { const subprocess = this.ptr.as(Subprocess); subprocess.onProcessExit(process, status, rusage); }, - .LifecycleScriptSubprocess => { + @field(TaggedPointer.Tag, @typeName(LifecycleScriptSubprocess)) => { const subprocess = this.ptr.as(LifecycleScriptSubprocess); subprocess.onProcessExit(process, status, rusage); }, - .ProcessHandle => { + @field(TaggedPointer.Tag, @typeName(ProcessHandle)) => { const subprocess = this.ptr.as(ProcessHandle); subprocess.onProcessExit(process, status, rusage); }, - @field(TaggedPointer.Tag, bun.meta.typeBaseName(@typeName(ShellSubprocess))) => { + @field(TaggedPointer.Tag, @typeName(ShellSubprocess)) => { const subprocess = this.ptr.as(ShellSubprocess); subprocess.onProcessExit(process, status, rusage); }, - @field(TaggedPointer.Tag, bun.meta.typeBaseName(@typeName(SyncProcess))) => { + @field(TaggedPointer.Tag, @typeName(SyncProcess)) => { const subprocess = this.ptr.as(SyncProcess); if (comptime Environment.isPosix) { @panic("This code should not reached"); @@ -157,7 +157,7 @@ pub const Process = struct { return @sizeOf(@This()); } - pub usingnamespace bun.NewRefCounted(Process, deinit); + pub usingnamespace bun.NewRefCounted(Process, deinit, null); pub fn setExitHandler(this: *Process, handler: anytype) void { this.exit_handler.init(handler); @@ -924,7 +924,7 @@ const WaiterThreadPosix = struct { .mask = current_mask, .flags = std.posix.SA.NOCLDSTOP, }; - std.posix.sigaction(std.posix.SIG.CHLD, &act, null) catch {}; + std.posix.sigaction(std.posix.SIG.CHLD, &act, null); } } @@ -2018,7 +2018,9 @@ pub const sync = struct { pub fn spawn( options: *const Options, ) !Maybe(Result) { - const envp = options.envp orelse std.c.environ; + // [*:null]?[*:0]const u8 + // [*:null]?[*:0]u8 + const envp = options.envp orelse @as([*:null]?[*:0]const u8, @ptrCast(std.c.environ)); const argv = options.argv; var string_builder = bun.StringBuilder{}; defer string_builder.deinit(bun.default_allocator); diff --git a/src/bun.js/api/bun/socket.zig b/src/bun.js/api/bun/socket.zig index 65f03c1b0bef93..93801a0e809ab1 100644 --- a/src/bun.js/api/bun/socket.zig +++ b/src/bun.js/api/bun/socket.zig @@ -1342,9 +1342,7 @@ fn NewSocket(comptime ssl: bool) type { // This is wasteful because it means we are keeping a JSC::Weak for every single open socket has_pending_activity: std.atomic.Value(bool) = std.atomic.Value(bool).init(true), native_callback: NativeCallbacks = .none, - pub usingnamespace bun.NewRefCounted(@This(), @This().deinit); - - pub const DEBUG_REFCOUNT_NAME = "Socket"; + pub usingnamespace bun.NewRefCounted(@This(), deinit, "Socket"); // We use this direct callbacks on HTTP2 when available pub const NativeCallbacks = union(enum) { @@ -1395,8 +1393,6 @@ fn NewSocket(comptime ssl: bool) type { JSC.Codegen.JSTLSSocket; pub fn hasPendingActivity(this: *This) callconv(.C) bool { - @fence(.acquire); - return this.has_pending_activity.load(.acquire); } diff --git a/src/bun.js/api/bun/subprocess.zig b/src/bun.js/api/bun/subprocess.zig index 28bf780f8f587a..5a30592c49fc07 100644 --- a/src/bun.js/api/bun/subprocess.zig +++ b/src/bun.js/api/bun/subprocess.zig @@ -48,8 +48,8 @@ pub const ResourceUsage = struct { var cpu = JSC.JSValue.createEmptyObjectWithNullPrototype(globalObject); const rusage = this.rusage; - const usrTime = JSValue.fromTimevalNoTruncate(globalObject, rusage.utime.tv_usec, rusage.utime.tv_sec); - const sysTime = JSValue.fromTimevalNoTruncate(globalObject, rusage.stime.tv_usec, rusage.stime.tv_sec); + const usrTime = JSValue.fromTimevalNoTruncate(globalObject, rusage.utime.usec, rusage.utime.sec); + const sysTime = JSValue.fromTimevalNoTruncate(globalObject, rusage.stime.usec, rusage.stime.sec); cpu.put(globalObject, JSC.ZigString.static("user"), usrTime); cpu.put(globalObject, JSC.ZigString.static("system"), sysTime); @@ -199,7 +199,7 @@ pub const Subprocess = struct { ref_count: u32 = 1, abort_signal: ?*JSC.AbortSignal = null, - usingnamespace bun.NewRefCounted(@This(), Subprocess.deinit); + usingnamespace bun.NewRefCounted(@This(), deinit, null); pub const Flags = packed struct { is_sync: bool = false, @@ -279,7 +279,6 @@ pub const Subprocess = struct { } pub fn updateHasPendingActivity(this: *Subprocess) void { - @fence(.seq_cst); if (comptime Environment.isDebug) { log("updateHasPendingActivity() {any} -> {any}", .{ this.has_pending_activity.raw, @@ -342,7 +341,6 @@ pub const Subprocess = struct { } pub fn hasPendingActivity(this: *Subprocess) callconv(.C) bool { - @fence(.acquire); return this.has_pending_activity.load(.acquire); } @@ -684,7 +682,7 @@ pub const Subprocess = struct { return this.process.kill(@intCast(sig)); } - fn hasCalledGetter(this: *Subprocess, comptime getter: @Type(.EnumLiteral)) bool { + fn hasCalledGetter(this: *Subprocess, comptime getter: @Type(.enum_literal)) bool { return this.observable_getters.contains(getter); } @@ -853,7 +851,7 @@ pub const Subprocess = struct { ref_count: u32 = 1, buffer: []const u8 = "", - pub usingnamespace bun.NewRefCounted(@This(), @This().deinit); + pub usingnamespace bun.NewRefCounted(@This(), _deinit, null); const This = @This(); const print = bun.Output.scoped(.StaticPipeWriter, false); @@ -940,7 +938,7 @@ pub const Subprocess = struct { this.process.onCloseIO(.stdin); } - pub fn deinit(this: *This) void { + fn _deinit(this: *This) void { this.writer.end(); this.source.detach(); this.destroy(); @@ -981,7 +979,7 @@ pub const Subprocess = struct { pub const IOReader = bun.io.BufferedReader; pub const Poll = IOReader; - pub usingnamespace bun.NewRefCounted(PipeReader, PipeReader.deinit); + pub usingnamespace bun.NewRefCounted(PipeReader, _deinit, null); pub fn memoryCost(this: *const PipeReader) usize { return this.reader.memoryCost(); @@ -1148,7 +1146,7 @@ pub const Subprocess = struct { return this.event_loop.virtual_machine.uwsLoop(); } - fn deinit(this: *PipeReader) void { + fn _deinit(this: *PipeReader) void { if (comptime Environment.isPosix) { bun.assert(this.reader.isDone()); } @@ -1570,7 +1568,7 @@ pub const Subprocess = struct { } } - fn closeIO(this: *Subprocess, comptime io: @Type(.EnumLiteral)) void { + fn closeIO(this: *Subprocess, comptime io: @Type(.enum_literal)) void { if (this.closed.contains(io)) return; this.closed.insert(io); diff --git a/src/bun.js/api/bun/udp_socket.zig b/src/bun.js/api/bun/udp_socket.zig index 0f71f800e3a318..e29508f3a05275 100644 --- a/src/bun.js/api/bun/udp_socket.zig +++ b/src/bun.js/api/bun/udp_socket.zig @@ -282,7 +282,7 @@ pub const UDPSocket = struct { globalThis: *JSGlobalObject, thisValue: JSValue = .zero, - ref: JSC.Ref = JSC.Ref.init(), + jsc_ref: JSC.Ref = JSC.Ref.init(), poll_ref: Async.KeepAlive = Async.KeepAlive.init(), // if marked as closed the socket pointer may be stale closed: bool = false, diff --git a/src/bun.js/api/ffi.zig b/src/bun.js/api/ffi.zig index 131842881b8668..a50392f7ce4b60 100644 --- a/src/bun.js/api/ffi.zig +++ b/src/bun.js/api/ffi.zig @@ -275,7 +275,9 @@ pub const FFI = struct { "macosx", "-show-sdk-path", }, - .envp = std.c.environ, + // ?[*:null]?[*:0]const u8 + // [*:null]?[*:0]u8 + .envp = @ptrCast(std.c.environ), }) catch return; if (process == .result) { defer process.result.deinit(); diff --git a/src/bun.js/api/filesystem_router.zig b/src/bun.js/api/filesystem_router.zig index a85e8697301155..0569d5f858d65e 100644 --- a/src/bun.js/api/filesystem_router.zig +++ b/src/bun.js/api/filesystem_router.zig @@ -1,6 +1,5 @@ const std = @import("std"); const Api = @import("../../api/schema.zig").Api; -const JavaScript = @import("../javascript.zig"); const QueryStringMap = @import("../../url.zig").QueryStringMap; const CombinedScanner = @import("../../url.zig").CombinedScanner; const bun = @import("root").bun; @@ -9,7 +8,6 @@ const JSC = bun.JSC; const js = JSC.C; const WebCore = JSC.WebCore; const Transpiler = bun.transpiler; -const VirtualMachine = JavaScript.VirtualMachine; const ScriptSrcStream = std.io.FixedBufferStream([]u8); const ZigString = JSC.ZigString; const Fs = @import("../../fs.zig"); @@ -605,7 +603,7 @@ pub const MatchedRoute = struct { var writer = stream.writer(); JSC.API.Bun.getPublicPathWithAssetPrefix( this.route.file_path, - if (this.base_dir) |base_dir| base_dir.slice() else VirtualMachine.get().transpiler.fs.top_level_dir, + if (this.base_dir) |base_dir| base_dir.slice() else JSC.VirtualMachine.get().transpiler.fs.top_level_dir, if (this.origin) |origin| URL.parse(origin.slice()) else URL{}, if (this.asset_prefix) |prefix| prefix.slice() else "", @TypeOf(&writer), diff --git a/src/bun.js/api/glob.zig b/src/bun.js/api/glob.zig index d81225f20b19fc..b75a29da95f914 100644 --- a/src/bun.js/api/glob.zig +++ b/src/bun.js/api/glob.zig @@ -324,17 +324,14 @@ pub fn finalize( } pub fn hasPendingActivity(this: *Glob) callconv(.C) bool { - @fence(.seq_cst); return this.has_pending_activity.load(.seq_cst) > 0; } fn incrPendingActivityFlag(has_pending_activity: *std.atomic.Value(usize)) void { - @fence(.seq_cst); _ = has_pending_activity.fetchAdd(1, .seq_cst); } fn decrPendingActivityFlag(has_pending_activity: *std.atomic.Value(usize)) void { - @fence(.seq_cst); _ = has_pending_activity.fetchSub(1, .seq_cst); } diff --git a/src/bun.js/api/html_rewriter.zig b/src/bun.js/api/html_rewriter.zig index da0db62909f3ae..4eaa5441e14413 100644 --- a/src/bun.js/api/html_rewriter.zig +++ b/src/bun.js/api/html_rewriter.zig @@ -18,7 +18,7 @@ pub const LOLHTMLContext = struct { document_handlers: std.ArrayListUnmanaged(*DocumentHandler) = .{}, ref_count: u32 = 1, - pub usingnamespace bun.NewRefCounted(@This(), deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); fn deinit(this: *LOLHTMLContext) void { for (this.selectors.items) |selector| { @@ -395,7 +395,7 @@ pub const HTMLRewriter = struct { bodyValueBufferer: ?JSC.WebCore.BodyValueBufferer = null, tmp_sync_error: ?*JSC.JSValue = null, ref_count: u32 = 1, - pub usingnamespace bun.NewRefCounted(BufferOutputSink, deinit); + pub usingnamespace bun.NewRefCounted(BufferOutputSink, deinit, null); // const log = bun.Output.scoped(.BufferOutputSink, false); pub fn init(context: *LOLHTMLContext, global: *JSGlobalObject, original: *Response, builder: *LOLHTML.HTMLRewriter.Builder) JSC.JSValue { @@ -1066,7 +1066,7 @@ pub const TextChunk = struct { ref_count: u32 = 1, pub usingnamespace JSC.Codegen.JSTextChunk; - pub usingnamespace bun.NewRefCounted(@This(), deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); pub fn init(text_chunk: *LOLHTML.TextChunk) *TextChunk { return TextChunk.new(.{ .text_chunk = text_chunk, .ref_count = 2 }); } @@ -1175,7 +1175,7 @@ pub const DocType = struct { return DocType.new(.{ .doctype = doctype, .ref_count = 2 }); } - pub usingnamespace bun.NewRefCounted(@This(), deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); pub usingnamespace JSC.Codegen.JSDocType; /// The doctype name. @@ -1242,7 +1242,7 @@ pub const DocEnd = struct { doc_end: ?*LOLHTML.DocEnd, ref_count: u32 = 1, - pub usingnamespace bun.NewRefCounted(@This(), deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); pub usingnamespace JSC.Codegen.JSDocEnd; pub fn init(doc_end: *LOLHTML.DocEnd) *DocEnd { @@ -1291,7 +1291,7 @@ pub const Comment = struct { comment: ?*LOLHTML.Comment = null, ref_count: u32 = 1, - pub usingnamespace bun.NewRefCounted(@This(), deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); pub usingnamespace JSC.Codegen.JSComment; pub fn init(comment: *LOLHTML.Comment) *Comment { @@ -1436,7 +1436,7 @@ pub const EndTag = struct { }; pub usingnamespace JSC.Codegen.JSEndTag; - pub usingnamespace bun.NewRefCounted(@This(), deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); fn contentHandler(this: *EndTag, comptime Callback: (fn (*LOLHTML.EndTag, []const u8, bool) LOLHTML.Error!void), thisObject: JSValue, globalObject: *JSGlobalObject, content: ZigString, contentOptions: ?ContentOptions) JSValue { if (this.end_tag == null) @@ -1554,7 +1554,7 @@ pub const AttributeIterator = struct { } pub usingnamespace JSC.Codegen.JSAttributeIterator; - pub usingnamespace bun.NewRefCounted(@This(), deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); pub fn next(this: *AttributeIterator, globalObject: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSValue { const done_label = JSC.ZigString.static("done"); @@ -1591,7 +1591,7 @@ pub const Element = struct { ref_count: u32 = 1, pub usingnamespace JSC.Codegen.JSElement; - pub usingnamespace bun.NewRefCounted(@This(), deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); pub fn init(element: *LOLHTML.Element) *Element { return Element.new(.{ .element = element, .ref_count = 2 }); diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 16a08406db2174..304f30fbece051 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -1607,7 +1607,7 @@ pub const AnyRequestContext = struct { tagged_pointer: Pointer, - pub const Null = .{ .tagged_pointer = Pointer.Null }; + pub const Null: @This() = .{ .tagged_pointer = Pointer.Null }; pub fn init(request_ctx: anytype) AnyRequestContext { return .{ .tagged_pointer = Pointer.init(request_ctx) }; @@ -2751,7 +2751,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp } } - pub fn onReadFile(this: *RequestContext, result: Blob.ReadFile.ResultType) void { + pub fn onReadFile(this: *RequestContext, result: Blob.ReadFileResultType) void { defer this.deref(); if (this.isAbortedOrEnded()) { @@ -4126,7 +4126,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp var body = this.request_body.?; var old = body.value; old.Locked.onReceiveValue = null; - var new_body = .{ .Null = {} }; + var new_body: WebCore.Body.Value = .{ .Null = {} }; old.resolve(&new_body, server.globalThis, null); body.value = new_body; } @@ -4182,13 +4182,13 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp comptime { const jsonResolve = JSC.toJSHostFunction(onResolve); - @export(jsonResolve, .{ .name = Export[0].symbol_name }); + @export(&jsonResolve, .{ .name = Export[0].symbol_name }); const jsonReject = JSC.toJSHostFunction(onReject); - @export(jsonReject, .{ .name = Export[1].symbol_name }); + @export(&jsonReject, .{ .name = Export[1].symbol_name }); const jsonResolveStream = JSC.toJSHostFunction(onResolveStream); - @export(jsonResolveStream, .{ .name = Export[2].symbol_name }); + @export(&jsonResolveStream, .{ .name = Export[2].symbol_name }); const jsonRejectStream = JSC.toJSHostFunction(onRejectStream); - @export(jsonRejectStream, .{ .name = Export[3].symbol_name }); + @export(&jsonRejectStream, .{ .name = Export[3].symbol_name }); } }; } @@ -5791,7 +5791,7 @@ const ServePlugins = struct { value: Value, ref_count: u32 = 1, - pub usingnamespace bun.NewRefCounted(ServePlugins, deinit); + pub usingnamespace bun.NewRefCounted(ServePlugins, deinit, null); pub const Value = union(enum) { pending: struct { @@ -5949,8 +5949,8 @@ const ServePlugins = struct { } comptime { - @export(onResolve, .{ .name = "BunServe__onResolvePlugins" }); - @export(onReject, .{ .name = "BunServe__onRejectPlugins" }); + @export(&onResolve, .{ .name = "BunServe__onResolvePlugins" }); + @export(&onReject, .{ .name = "BunServe__onRejectPlugins" }); } }; diff --git a/src/bun.js/api/server/HTMLBundle.zig b/src/bun.js/api/server/HTMLBundle.zig index 02db1b48968ed3..39bc5ac5198442 100644 --- a/src/bun.js/api/server/HTMLBundle.zig +++ b/src/bun.js/api/server/HTMLBundle.zig @@ -77,7 +77,7 @@ pub const HTMLBundleRoute = struct { }); } - pub usingnamespace bun.NewRefCounted(@This(), @This().deinit); + pub usingnamespace bun.NewRefCounted(@This(), _deinit, null); pub const Value = union(enum) { pending_plugins, @@ -114,7 +114,7 @@ pub const HTMLBundleRoute = struct { } }; - pub fn deinit(this: *HTMLBundleRoute) void { + fn _deinit(this: *HTMLBundleRoute) void { for (this.pending_responses.items) |pending_response| { pending_response.deref(); } @@ -512,9 +512,9 @@ pub const HTMLBundleRoute = struct { server: ?AnyServer = null, route: *HTMLBundleRoute, - pub usingnamespace bun.NewRefCounted(@This(), @This().deinit); + pub usingnamespace bun.NewRefCounted(@This(), __deinit, null); - pub fn deinit(this: *PendingResponse) void { + fn __deinit(this: *PendingResponse) void { if (this.is_response_pending) { this.resp.clearAborted(); this.resp.clearOnWritable(); @@ -544,7 +544,7 @@ pub const HTMLBundleRoute = struct { }; pub usingnamespace JSC.Codegen.JSHTMLBundle; -pub usingnamespace bun.NewRefCounted(HTMLBundle, deinit); +pub usingnamespace bun.NewRefCounted(HTMLBundle, deinit, null); const bun = @import("root").bun; const std = @import("std"); const JSC = bun.JSC; diff --git a/src/bun.js/api/server/StaticRoute.zig b/src/bun.js/api/server/StaticRoute.zig index 31336978771a8d..7270198b051b48 100644 --- a/src/bun.js/api/server/StaticRoute.zig +++ b/src/bun.js/api/server/StaticRoute.zig @@ -10,7 +10,7 @@ headers: Headers = .{ }, ref_count: u32 = 1, -pub usingnamespace bun.NewRefCounted(@This(), deinit); +pub usingnamespace bun.NewRefCounted(@This(), deinit, null); fn deinit(this: *StaticRoute) void { this.blob.detach(); diff --git a/src/bun.js/base.zig b/src/bun.js/base.zig index db0594382e46d4..8f58a2b083e71c 100644 --- a/src/bun.js/base.zig +++ b/src/bun.js/base.zig @@ -9,18 +9,11 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; -const JavaScript = @import("./javascript.zig"); const JSC = bun.JSC; -const WebCore = @import("./webcore.zig"); const Test = @import("./test/jest.zig"); -const Fetch = WebCore.Fetch; -const Response = WebCore.Response; -const Request = WebCore.Request; const Router = @import("./api/filesystem_router.zig"); const IdentityContext = @import("../identity_context.zig").IdentityContext; const uws = bun.uws; -const Body = WebCore.Body; const TaggedPointerTypes = @import("../tagged_pointer.zig"); const TaggedPointerUnion = TaggedPointerTypes.TaggedPointerUnion; @@ -41,11 +34,11 @@ pub const Lifetime = enum { pub fn toJS(globalObject: *JSC.JSGlobalObject, comptime ValueType: type, value: ValueType, comptime lifetime: Lifetime) JSC.JSValue { const Type = comptime brk: { var CurrentType = ValueType; - if (@typeInfo(ValueType) == .Optional) { - CurrentType = @typeInfo(ValueType).Optional.child; + if (@typeInfo(ValueType) == .optional) { + CurrentType = @typeInfo(ValueType).optional.child; } - break :brk if (@typeInfo(CurrentType) == .Pointer and @typeInfo(CurrentType).Pointer.size == .One) - @typeInfo(CurrentType).Pointer.child + break :brk if (@typeInfo(CurrentType) == .pointer and @typeInfo(CurrentType).pointer.size == .one) + @typeInfo(CurrentType).pointer.child else CurrentType; }; @@ -103,16 +96,16 @@ pub fn toJS(globalObject: *JSC.JSGlobalObject, comptime ValueType: type, value: return array; } - if (comptime @hasDecl(Type, "toJSNewlyCreated") and @typeInfo(@TypeOf(@field(Type, "toJSNewlyCreated"))).Fn.params.len == 2) { + if (comptime @hasDecl(Type, "toJSNewlyCreated") and @typeInfo(@TypeOf(@field(Type, "toJSNewlyCreated"))).@"fn".params.len == 2) { return value.toJSNewlyCreated(globalObject); } - if (comptime @hasDecl(Type, "toJS") and @typeInfo(@TypeOf(@field(Type, "toJS"))).Fn.params.len == 2) { + if (comptime @hasDecl(Type, "toJS") and @typeInfo(@TypeOf(@field(Type, "toJS"))).@"fn".params.len == 2) { return value.toJS(globalObject); } // must come after toJS check in case this enum implements its own serializer. - if (@typeInfo(Type) == .Enum) { + if (@typeInfo(Type) == .@"enum") { // FIXME: creates non-normalized integers (e.g. u2), which // aren't handled by `jsNumberWithType` rn return JSC.JSValue.jsNumberWithType(u32, @as(u32, @intFromEnum(value))); @@ -168,9 +161,6 @@ pub const Properties = struct { } }; -const JSValue = JSC.JSValue; -const ZigString = JSC.ZigString; - pub const PathString = bun.PathString; pub fn createError( @@ -205,7 +195,7 @@ fn toTypeErrorWithCode( args: anytype, ctx: js.JSContextRef, ) JSC.JSValue { - @setCold(true); + @branchHint(.cold); var zig_str: JSC.ZigString = undefined; if (comptime std.meta.fields(@TypeOf(args)).len == 0) { zig_str = JSC.ZigString.init(fmt); @@ -216,7 +206,7 @@ fn toTypeErrorWithCode( zig_str.detectEncoding(); zig_str.mark(); } - const code_str = ZigString.init(code); + const code_str = JSC.ZigString.init(code); return JSC.JSValue.createTypeError(&zig_str, &code_str, ctx); } @@ -235,7 +225,7 @@ pub fn throwInvalidArguments( ctx: js.JSContextRef, exception: ExceptionValueRef, ) void { - @setCold(true); + @branchHint(.cold); exception.* = JSC.Error.ERR_INVALID_ARG_TYPE.fmt(ctx, fmt, args).asObjectRef(); } @@ -244,7 +234,7 @@ pub fn toInvalidArguments( args: anytype, ctx: js.JSContextRef, ) JSC.JSValue { - @setCold(true); + @branchHint(.cold); return JSC.Error.ERR_INVALID_ARG_TYPE.fmt(ctx, fmt, args); } @@ -253,7 +243,7 @@ pub fn getAllocator(_: js.JSContextRef) std.mem.Allocator { } /// Print a JSValue to stdout; this is only meant for debugging purposes -pub fn dump(value: JSValue, globalObject: *JSC.JSGlobalObject) !void { +pub fn dump(value: JSC.WebCore.JSValue, globalObject: *JSC.JSGlobalObject) !void { var formatter = JSC.ConsoleObject.Formatter{ .globalThis = globalObject }; try Output.errorWriter().print("{}\n", .{value.toFmt(globalObject, &formatter)}); Output.flush(); @@ -389,7 +379,7 @@ pub const ArrayBuffer = extern struct { return Stream{ .pos = 0, .buf = this.slice() }; } - pub fn create(globalThis: *JSC.JSGlobalObject, bytes: []const u8, comptime kind: JSValue.JSType) JSValue { + pub fn create(globalThis: *JSC.JSGlobalObject, bytes: []const u8, comptime kind: JSC.JSValue.JSType) JSC.JSValue { JSC.markBinding(@src()); return switch (comptime kind) { .Uint8Array => Bun__createUint8ArrayForCopy(globalThis, bytes.ptr, bytes.len, false), @@ -398,7 +388,7 @@ pub const ArrayBuffer = extern struct { }; } - pub fn createEmpty(globalThis: *JSC.JSGlobalObject, comptime kind: JSC.JSValue.JSType) JSValue { + pub fn createEmpty(globalThis: *JSC.JSGlobalObject, comptime kind: JSC.JSValue.JSType) JSC.JSValue { JSC.markBinding(@src()); return switch (comptime kind) { @@ -408,18 +398,18 @@ pub const ArrayBuffer = extern struct { }; } - pub fn createBuffer(globalThis: *JSC.JSGlobalObject, bytes: []const u8) JSValue { + pub fn createBuffer(globalThis: *JSC.JSGlobalObject, bytes: []const u8) JSC.JSValue { JSC.markBinding(@src()); return Bun__createUint8ArrayForCopy(globalThis, bytes.ptr, bytes.len, true); } - pub fn createUint8Array(globalThis: *JSC.JSGlobalObject, bytes: []const u8) JSValue { + pub fn createUint8Array(globalThis: *JSC.JSGlobalObject, bytes: []const u8) JSC.JSValue { JSC.markBinding(@src()); return Bun__createUint8ArrayForCopy(globalThis, bytes.ptr, bytes.len, false); } - extern "C" fn Bun__allocUint8ArrayForCopy(*JSC.JSGlobalObject, usize, **anyopaque) JSValue; - pub fn allocBuffer(globalThis: *JSC.JSGlobalObject, len: usize) struct { JSValue, []u8 } { + extern "C" fn Bun__allocUint8ArrayForCopy(*JSC.JSGlobalObject, usize, **anyopaque) JSC.JSValue; + pub fn allocBuffer(globalThis: *JSC.JSGlobalObject, len: usize) struct { JSC.JSValue, []u8 } { var ptr: [*]u8 = undefined; const buffer = Bun__allocUint8ArrayForCopy(globalThis, len, @ptrCast(&ptr)); if (buffer.isEmpty()) { @@ -428,8 +418,8 @@ pub const ArrayBuffer = extern struct { return .{ buffer, ptr[0..len] }; } - extern "C" fn Bun__createUint8ArrayForCopy(*JSC.JSGlobalObject, ptr: ?*const anyopaque, len: usize, buffer: bool) JSValue; - extern "C" fn Bun__createArrayBufferForCopy(*JSC.JSGlobalObject, ptr: ?*const anyopaque, len: usize) JSValue; + extern "C" fn Bun__createUint8ArrayForCopy(*JSC.JSGlobalObject, ptr: ?*const anyopaque, len: usize, buffer: bool) JSC.JSValue; + extern "C" fn Bun__createArrayBufferForCopy(*JSC.JSGlobalObject, ptr: ?*const anyopaque, len: usize) JSC.JSValue; pub fn fromTypedArray(ctx: JSC.C.JSContextRef, value: JSC.JSValue) ArrayBuffer { var out = std.mem.zeroes(ArrayBuffer); @@ -664,7 +654,7 @@ pub const MarkedArrayBuffer = struct { } pub fn toNodeBuffer(this: *const MarkedArrayBuffer, ctx: js.JSContextRef) JSC.JSValue { - return JSValue.createBufferWithCtx(ctx, this.buffer.byteSlice(), this.buffer.ptr, MarkedArrayBuffer_deallocator); + return JSC.JSValue.createBufferWithCtx(ctx, this.buffer.byteSlice(), this.buffer.ptr, MarkedArrayBuffer_deallocator); } pub fn toJSObjectRef(this: *const MarkedArrayBuffer, ctx: js.JSContextRef, exception: js.ExceptionRef) js.JSObjectRef { @@ -723,7 +713,7 @@ pub const RefString = struct { pub const Hash = u32; pub const Map = std.HashMap(Hash, *JSC.RefString, IdentityContext(Hash), 80); - pub fn toJS(this: *RefString, global: *JSC.JSGlobalObject) JSValue { + pub fn toJS(this: *RefString, global: *JSC.JSGlobalObject) JSC.JSValue { return bun.String.init(this.impl).toJS(global); } @@ -795,9 +785,9 @@ const Expect = Test.Expect; const DescribeScope = Test.DescribeScope; const TestScope = Test.TestScope; const NodeFS = JSC.Node.NodeFS; -const TextEncoder = WebCore.TextEncoder; -const TextDecoder = WebCore.TextDecoder; -const TextEncoderStreamEncoder = WebCore.TextEncoderStreamEncoder; +const TextEncoder = JSC.WebCore.TextEncoder; +const TextDecoder = JSC.WebCore.TextDecoder; +const TextEncoderStreamEncoder = JSC.WebCore.TextEncoderStreamEncoder; const HTMLRewriter = JSC.Cloudflare.HTMLRewriter; const Element = JSC.Cloudflare.Element; const Comment = JSC.Cloudflare.Comment; @@ -914,7 +904,7 @@ pub const DOMEffect = struct { }; fn DOMCallArgumentType(comptime Type: type) []const u8 { - const ChildType = if (@typeInfo(Type) == .Pointer) std.meta.Child(Type) else Type; + const ChildType = if (@typeInfo(Type) == .pointer) std.meta.Child(Type) else Type; return switch (ChildType) { i8, u8, i16, u16, i32 => "JSC::SpecInt32Only", u32, i64, u64 => "JSC::SpecInt52Any", @@ -927,7 +917,7 @@ fn DOMCallArgumentType(comptime Type: type) []const u8 { } fn DOMCallArgumentTypeWrapper(comptime Type: type) []const u8 { - const ChildType = if (@typeInfo(Type) == .Pointer) std.meta.Child(Type) else Type; + const ChildType = if (@typeInfo(Type) == .pointer) std.meta.Child(Type) else Type; return switch (ChildType) { i32 => "int32_t", f64 => "double", @@ -941,7 +931,7 @@ fn DOMCallArgumentTypeWrapper(comptime Type: type) []const u8 { } fn DOMCallResultType(comptime Type: type) []const u8 { - const ChildType = if (@typeInfo(Type) == .Pointer) std.meta.Child(Type) else Type; + const ChildType = if (@typeInfo(Type) == .pointer) std.meta.Child(Type) else Type; return switch (ChildType) { i32 => "JSC::SpecInt32Only", bool => "JSC::SpecBoolean", @@ -975,7 +965,7 @@ pub fn DOMCall( thisValue: JSC.JSValue, arguments_ptr: [*]const JSC.JSValue, arguments_len: usize, - ) callconv(JSC.conv) JSValue { + ) callconv(JSC.conv) JSC.JSValue { return JSC.toJSHostValue(globalObject, @field(Container, functionName)(globalObject, thisValue, arguments_ptr[0..arguments_len])); } @@ -983,7 +973,7 @@ pub fn DOMCall( pub const Fastpath = @TypeOf(fastpath); pub const Arguments = std.meta.ArgsTuple(Fastpath); - pub fn put(globalObject: *JSC.JSGlobalObject, value: JSValue) void { + pub fn put(globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { shim.cppFn("put", .{ globalObject, value }); } @@ -992,8 +982,8 @@ pub fn DOMCall( pub const Extern = [_][]const u8{"put"}; comptime { - @export(slowpath, .{ .name = shim.symbolName("slowpath") }); - @export(fastpath, .{ .name = shim.symbolName("fastpath") }); + @export(&slowpath, .{ .name = shim.symbolName("slowpath") }); + @export(&fastpath, .{ .name = shim.symbolName("fastpath") }); } }; } @@ -1009,7 +999,7 @@ pub fn wrapInstanceMethod( ) InstanceMethodType(Container) { return struct { const FunctionType = @TypeOf(@field(Container, name)); - const FunctionTypeInfo: std.builtin.Type.Fn = @typeInfo(FunctionType).Fn; + const FunctionTypeInfo: std.builtin.Type.Fn = @typeInfo(FunctionType).@"fn"; const Args = std.meta.ArgsTuple(FunctionType); const eater = if (auto_protect) JSC.Node.ArgumentsSlice.protectEatNext else JSC.Node.ArgumentsSlice.nextEat; @@ -1091,7 +1081,7 @@ pub fn wrapInstanceMethod( args[i] = null; } }, - ZigString => { + JSC.ZigString => { var string_value = eater(&iter) orelse { iter.deinit(); return globalThis.throwInvalidArguments("Missing argument", .{}); @@ -1113,32 +1103,32 @@ pub fn wrapInstanceMethod( args[i] = null; } }, - *Response => { + *JSC.WebCore.Response => { args[i] = (eater(&iter) orelse { iter.deinit(); return globalThis.throwInvalidArguments("Missing Response object", .{}); - }).as(Response) orelse { + }).as(JSC.WebCore.Response) orelse { iter.deinit(); return globalThis.throwInvalidArguments("Expected Response object", .{}); }; }, - *Request => { + *JSC.WebCore.Request => { args[i] = (eater(&iter) orelse { iter.deinit(); return globalThis.throwInvalidArguments("Missing Request object", .{}); - }).as(Request) orelse { + }).as(JSC.WebCore.Request) orelse { iter.deinit(); return globalThis.throwInvalidArguments("Expected Request object", .{}); }; }, - JSValue => { + JSC.JSValue => { const val = eater(&iter) orelse { iter.deinit(); return globalThis.throwInvalidArguments("Missing argument", .{}); }; args[i] = val; }, - ?JSValue => { + ?JSC.JSValue => { args[i] = eater(&iter); }, JSC.C.ExceptionRef => { @@ -1170,7 +1160,7 @@ pub fn wrapStaticMethod( ) JSC.JSHostZigFunction { return struct { const FunctionType = @TypeOf(@field(Container, name)); - const FunctionTypeInfo: std.builtin.Type.Fn = @typeInfo(FunctionType).Fn; + const FunctionTypeInfo: std.builtin.Type.Fn = @typeInfo(FunctionType).@"fn"; const Args = std.meta.ArgsTuple(FunctionType); const eater = if (auto_protect) JSC.Node.ArgumentsSlice.protectEatNext else JSC.Node.ArgumentsSlice.nextEat; @@ -1244,7 +1234,7 @@ pub fn wrapStaticMethod( args[i] = null; } }, - ZigString => { + JSC.ZigString => { var string_value = eater(&iter) orelse { iter.deinit(); return globalThis.throwInvalidArguments("Missing argument", .{}); @@ -1266,32 +1256,32 @@ pub fn wrapStaticMethod( args[i] = null; } }, - *Response => { + *JSC.WebCore.Response => { args[i] = (eater(&iter) orelse { iter.deinit(); return globalThis.throwInvalidArguments("Missing Response object", .{}); - }).as(Response) orelse { + }).as(JSC.WebCore.Response) orelse { iter.deinit(); return globalThis.throwInvalidArguments("Expected Response object", .{}); }; }, - *Request => { + *JSC.WebCore.Request => { args[i] = (eater(&iter) orelse { iter.deinit(); return globalThis.throwInvalidArguments("Missing Request object", .{}); - }).as(Request) orelse { + }).as(JSC.WebCore.Request) orelse { iter.deinit(); return globalThis.throwInvalidArguments("Expected Request object", .{}); }; }, - JSValue => { + JSC.WebCore.JSValue => { const val = eater(&iter) orelse { iter.deinit(); return globalThis.throwInvalidArguments("Missing argument", .{}); }; args[i] = val; }, - ?JSValue => { + ?JSC.WebCore.JSValue => { args[i] = eater(&iter); }, else => @compileError(std.fmt.comptimePrint("Unexpected Type " ++ @typeName(ArgType) ++ " at argument {d} in {s}#{s}", .{ i, @typeName(Container), name })), @@ -1402,7 +1392,7 @@ pub const BinaryType = enum(u4) { return Map.get(input); } - pub fn fromJSValue(globalThis: *JSC.JSGlobalObject, input: JSValue) bun.JSError!?BinaryType { + pub fn fromJSValue(globalThis: *JSC.JSGlobalObject, input: JSC.JSValue) bun.JSError!?BinaryType { if (input.isString()) { return Map.getWithEql(try input.toBunString2(globalThis), bun.String.eqlComptime); } @@ -1411,7 +1401,7 @@ pub const BinaryType = enum(u4) { } /// This clones bytes - pub fn toJS(this: BinaryType, bytes: []const u8, globalThis: *JSC.JSGlobalObject) JSValue { + pub fn toJS(this: BinaryType, bytes: []const u8, globalThis: *JSC.JSGlobalObject) JSC.JSValue { switch (this) { .Buffer => return JSC.ArrayBuffer.createBuffer(globalThis, bytes), .ArrayBuffer => return JSC.ArrayBuffer.create(globalThis, bytes, .ArrayBuffer), diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig index ab9df2f32f595c..8727946521c3b5 100644 --- a/src/bun.js/bindings/bindings.zig +++ b/src/bun.js/bindings/bindings.zig @@ -13,7 +13,6 @@ const ZigStackTrace = Exports.ZigStackTrace; const ArrayBuffer = @import("../base.zig").ArrayBuffer; const JSC = bun.JSC; const Shimmer = JSC.Shimmer; -const ConsoleObject = JSC.ConsoleObject; const FFI = @import("./FFI.zig"); const NullableAllocator = bun.NullableAllocator; const MutableString = bun.MutableString; @@ -66,7 +65,7 @@ pub const JSObject = extern struct { /// prototype (`null_prototype = false`) unless you have a good reason not /// to. fn createFromStructWithPrototype(comptime T: type, pojo: T, global: *JSGlobalObject, comptime null_prototype: bool) *JSObject { - const info: std.builtin.Type.Struct = @typeInfo(T).Struct; + const info: std.builtin.Type.Struct = @typeInfo(T).@"struct"; const obj = obj: { const val = if (comptime null_prototype) @@ -2534,7 +2533,7 @@ pub const JSPromise = extern struct { const Wrapper = struct { args: Args, - pub fn call(this: *@This(), g: *JSC.JSGlobalObject) JSC.JSValue { + pub fn call(this: *@This(), g: *JSC.JSGlobalObject) callconv(.c) JSC.JSValue { return toJSHostValue(g, @call(.auto, Fn, this.args)); } }; @@ -2815,7 +2814,7 @@ pub const AnyPromise = union(enum) { const Wrapper = struct { args: Args, - pub fn call(wrap_: *@This(), global: *JSC.JSGlobalObject) JSC.JSValue { + pub fn call(wrap_: *@This(), global: *JSC.JSGlobalObject) callconv(.c) JSC.JSValue { return toJSHostValue(global, @call(.auto, Fn, wrap_.args)); } }; @@ -4084,7 +4083,6 @@ pub const JSValue = enum(i64) { if (this.isDouble()) { return this.asDouble(); } - return this.coerceToDouble(globalThis); }, i64 => { @@ -4094,13 +4092,21 @@ pub const JSValue = enum(i64) { if (this.isInt32()) { return this.asInt32(); } - if (this.getNumber()) |num| { return coerceJSValueDoubleTruncatingT(i32, num); } - return this.coerceToInt32(globalThis); }, + std.c.AI, + => { + if (this.isInt32()) { + return @bitCast(this.asInt32()); + } + if (this.getNumber()) |num| { + return @bitCast(coerceJSValueDoubleTruncatingT(i32, num)); + } + return @bitCast(this.coerceToInt32(globalThis)); + }, else => @compileError("Unsupported coercion type"), }; } @@ -4108,8 +4114,8 @@ pub const JSValue = enum(i64) { /// This does not call [Symbol.toPrimitive] or [Symbol.toStringTag]. /// This is only safe when you don't want to do conversions across non-primitive types. pub fn to(this: JSValue, comptime T: type) T { - if (@typeInfo(T) == .Enum) { - const Int = @typeInfo(T).Enum.tag_type; + if (@typeInfo(T) == .@"enum") { + const Int = @typeInfo(T).@"enum".tag_type; return @enumFromInt(this.to(Int)); } return switch (comptime T) { @@ -4248,8 +4254,8 @@ pub const JSValue = enum(i64) { pub fn put(value: JSValue, global: *JSGlobalObject, key: anytype, result: JSC.JSValue) void { const Key = @TypeOf(key); - if (comptime @typeInfo(Key) == .Pointer) { - const Elem = @typeInfo(Key).Pointer.child; + if (comptime @typeInfo(Key) == .pointer) { + const Elem = @typeInfo(Key).pointer.child; if (Elem == ZigString) { putZigString(value, global, key, result); } else if (Elem == bun.String) { @@ -4476,8 +4482,8 @@ pub const JSValue = enum(i64) { extern fn JSBuffer__bufferFromPointerAndLengthAndDeinit(*JSGlobalObject, [*]u8, usize, ?*anyopaque, JSC.C.JSTypedArrayBytesDeallocator) JSValue; pub fn jsNumberWithType(comptime Number: type, number: Number) JSValue { - if (@typeInfo(Number) == .Enum) { - return jsNumberWithType(@typeInfo(Number).Enum.tag_type, @intFromEnum(number)); + if (@typeInfo(Number) == .@"enum") { + return jsNumberWithType(@typeInfo(Number).@"enum".tag_type, @intFromEnum(number)); } return switch (comptime Number) { JSValue => number, @@ -4570,8 +4576,8 @@ pub const JSValue = enum(i64) { pub fn print( this: JSValue, globalObject: *JSGlobalObject, - message_type: ConsoleObject.MessageType, - message_level: ConsoleObject.MessageLevel, + message_type: JSC.ConsoleObject.MessageType, + message_level: JSC.ConsoleObject.MessageLevel, ) void { JSC.ConsoleObject.messageWithTypeAndLevel( undefined, diff --git a/src/bun.js/bindings/exports.zig b/src/bun.js/bindings/exports.zig index 800cde4c39541a..c94b5e7b9e0263 100644 --- a/src/bun.js/bindings/exports.zig +++ b/src/bun.js/bindings/exports.zig @@ -1,7 +1,6 @@ const JSC = bun.JSC; const Fs = @import("../../fs.zig"); const CAPI = JSC.C; -const JS = @import("../javascript.zig"); const JSBase = @import("../base.zig"); const ZigURL = @import("../../url.zig").URL; const Api = @import("../../api/schema.zig").Api; @@ -48,7 +47,7 @@ pub const ZigGlobalObject = extern struct { pub const name = "Zig::GlobalObject"; pub const include = "\"ZigGlobalObject.h\""; pub const namespace = shim.namespace; - pub const Interface: type = NewGlobalObject(JS.VirtualMachine); + pub const Interface: type = NewGlobalObject(JSC.VirtualMachine); pub fn create( vm: *JSC.VirtualMachine, @@ -113,11 +112,11 @@ pub const ZigGlobalObject = extern struct { pub const Extern = [_][]const u8{ "create", "getModuleRegistryMap", "resetModuleRegistryMap" }; comptime { - @export(import, .{ .name = Export[0].symbol_name }); - @export(resolve, .{ .name = Export[1].symbol_name }); - @export(promiseRejectionTracker, .{ .name = Export[2].symbol_name }); - @export(reportUncaughtException, .{ .name = Export[3].symbol_name }); - @export(onCrash, .{ .name = Export[4].symbol_name }); + @export(&import, .{ .name = Export[0].symbol_name }); + @export(&resolve, .{ .name = Export[1].symbol_name }); + @export(&promiseRejectionTracker, .{ .name = Export[2].symbol_name }); + @export(&reportUncaughtException, .{ .name = Export[3].symbol_name }); + @export(&onCrash, .{ .name = Export[4].symbol_name }); } }; @@ -225,7 +224,7 @@ pub const ResolvedSource = extern struct { allocator: ?*anyopaque = null, - jsvalue_for_export: JSC.JSValue = .zero, + jsvalue_for_export: JSValue = .zero, tag: Tag = Tag.javascript, @@ -413,31 +412,31 @@ pub const Process = extern struct { }); comptime { - @export(getTitle, .{ + @export(&getTitle, .{ .name = Export[0].symbol_name, }); - @export(setTitle, .{ + @export(&setTitle, .{ .name = Export[1].symbol_name, }); - @export(getArgv, .{ + @export(&getArgv, .{ .name = Export[2].symbol_name, }); - @export(getCwd, .{ + @export(&getCwd, .{ .name = Export[3].symbol_name, }); - @export(setCwd, .{ + @export(&setCwd, .{ .name = Export[4].symbol_name, }); - @export(exit, .{ + @export(&exit, .{ .name = Export[5].symbol_name, }); - @export(getArgv0, .{ + @export(&getArgv0, .{ .name = Export[6].symbol_name, }); - @export(getExecPath, .{ + @export(&getExecPath, .{ .name = Export[7].symbol_name, }); - @export(getExecArgv, .{ + @export(&getExecArgv, .{ .name = Export[8].symbol_name, }); } @@ -837,7 +836,6 @@ pub const ZigException = extern struct { } pub const shim = Shimmer("Zig", "Exception", @This()); - pub const name = "ZigException"; pub const namespace = shim.namespace; pub const Holder = extern struct { @@ -966,8 +964,8 @@ pub inline fn toGlobalContextRef(ptr: *JSGlobalObject) CAPI.JSGlobalContextRef { } comptime { - @export(ErrorCode.ParserError, .{ .name = "Zig_ErrorCodeParserError" }); - @export(ErrorCode.JSErrorObject, .{ .name = "Zig_ErrorCodeJSErrorObject" }); + @export(&ErrorCode.ParserError, .{ .name = "Zig_ErrorCodeParserError" }); + @export(&ErrorCode.JSErrorObject, .{ .name = "Zig_ErrorCodeJSErrorObject" }); } const Bun = JSC.API.Bun; diff --git a/src/bun.js/bindings/header-gen.zig b/src/bun.js/bindings/header-gen.zig index 3b4dd7ca1ba808..14a8eac7b77fc0 100644 --- a/src/bun.js/bindings/header-gen.zig +++ b/src/bun.js/bindings/header-gen.zig @@ -156,8 +156,8 @@ pub const C_Generator = struct { comptime nonnull.append(i) catch unreachable; }, else => |info| { - if (comptime info == .Pointer and @typeInfo(info.Pointer.child) == .Fn) { - self.gen_closure(comptime info.Pointer.child, comptime std.fmt.comptimePrint(" ArgFn{d}", .{i})); + if (comptime info == .pointer and @typeInfo(info.pointer.child) == .Fn) { + self.gen_closure(comptime info.pointer.child, comptime std.fmt.comptimePrint(" ArgFn{d}", .{i})); comptime nonnull.append(i) catch unreachable; } else { self.writeType(comptime arg.type.?); @@ -189,7 +189,7 @@ pub const C_Generator = struct { defer self.write(";\n"); // const ReturnTypeInfo: std.builtin.Type = comptime @typeInfo(func.return_type); // switch (comptime ReturnTypeInfo) { - // .Pointer => |Pointer| { + // .pointer => |Pointer| { // self.write(" __attribute__((returns_nonnull))"); // }, // .Optional => |Optional| {}, @@ -226,7 +226,7 @@ pub const C_Generator = struct { self.write(")"); // const ReturnTypeInfo: std.builtin.Type = comptime @typeInfo(func.return_type); // switch (comptime ReturnTypeInfo) { - // .Pointer => |Pointer| { + // .pointer => |Pointer| { // self.write(" __attribute__((returns_nonnull))"); // }, // .Optional => |Optional| {}, @@ -344,16 +344,16 @@ pub const C_Generator = struct { Type = OtherType; } } - if (@typeInfo(Type) == .Pointer and !std.meta.isManyItemPtr(Type)) { - Type = @typeInfo(Type).Pointer.child; + if (@typeInfo(Type) == .pointer and !std.meta.isManyItemPtr(Type)) { + Type = @typeInfo(Type).pointer.child; } break :brk Type; }; if (comptime (isCppObject(TT)) and @hasDecl(TT, "name")) { - if (@typeInfo(T) == .Pointer or (@hasDecl(TT, "Type") and (@TypeOf(TT.Type) == type and @typeInfo(TT.Type) == .Pointer))) { - if (@hasDecl(TT, "is_pointer") and !TT.is_pointer) {} else if (@typeInfo(T).Pointer.is_const) { + if (@typeInfo(T) == .pointer or (@hasDecl(TT, "Type") and (@TypeOf(TT.Type) == type and @typeInfo(TT.Type) == .pointer))) { + if (@hasDecl(TT, "is_pointer") and !TT.is_pointer) {} else if (@typeInfo(T).pointer.is_const) { write(self, "const "); } } @@ -401,7 +401,7 @@ pub const C_Generator = struct { write(self, comptime formatted_name); } - if (@typeInfo(T) == .Pointer or (@hasDecl(TT, "Type") and (@TypeOf(TT.Type) == type and @typeInfo(TT.Type) == .Pointer))) { + if (@typeInfo(T) == .pointer or (@hasDecl(TT, "Type") and (@TypeOf(TT.Type) == type and @typeInfo(TT.Type) == .pointer))) { if (@hasDecl(TT, "is_pointer") and !TT.is_pointer) {} else { write(self, "*"); } @@ -414,7 +414,7 @@ pub const C_Generator = struct { } else { const meta = @typeInfo(T); switch (meta) { - .Pointer => |Pointer| { + .pointer => |Pointer| { const child = Pointer.child; // if (childmeta == .Struct and childmeta.Struct.layout != .Extern) { // self.write("void"); @@ -619,9 +619,9 @@ pub fn HeaderGen(comptime first_import: type, comptime second_import: type, comp \\//-- GENERATED FILE. Do not edit -- \\// \\// To regenerate this file, run: - \\// + \\// \\// make headers - \\// + \\// \\//-- GENERATED FILE. Do not edit -- \\ ) catch unreachable; @@ -663,9 +663,9 @@ pub fn HeaderGen(comptime first_import: type, comptime second_import: type, comp \\//-- GENERATED FILE. Do not edit -- \\// \\// To regenerate this file, run: - \\// + \\// \\// make headers - \\// + \\// \\//-- GENERATED FILE. Do not edit -- \\ ) catch unreachable; @@ -726,8 +726,8 @@ pub fn HeaderGen(comptime first_import: type, comptime second_import: type, comp all_types[1] = second_import; var counter: usize = 2; inline for (first_import.DOMCalls) |Type_| { - const Type = if (@typeInfo(@TypeOf(Type_)) == .Pointer) - @typeInfo(@TypeOf(Type_)).Pointer.child + const Type = if (@typeInfo(@TypeOf(Type_)) == .pointer) + @typeInfo(@TypeOf(Type_)).pointer.child else @TypeOf(Type_); @@ -742,8 +742,8 @@ pub fn HeaderGen(comptime first_import: type, comptime second_import: type, comp }; inline for (first_import.DOMCalls) |Type_| { - const Type = if (@typeInfo(@TypeOf(Type_)) == .Pointer) - @typeInfo(@TypeOf(Type_)).Pointer.child + const Type = if (@typeInfo(@TypeOf(Type_)) == .pointer) + @typeInfo(@TypeOf(Type_)).pointer.child else @TypeOf(Type_); @@ -962,16 +962,16 @@ pub fn HeaderGen(comptime first_import: type, comptime second_import: type, comp generated.writer().print( \\ #include "root.h" \\ #include "headers.h" - \\ + \\ \\ #include \\ #include "DOMJITIDLConvert.h" \\ #include "DOMJITIDLType.h" \\ #include "DOMJITIDLTypeFilter.h" \\ #include "DOMJITHelpers.h" \\ #include - \\ + \\ \\ #include "JSDOMConvertBufferSource.h" - \\ + \\ \\ using namespace JSC; \\ using namespace WebCore; \\ diff --git a/src/bun.js/bindings/shimmer.zig b/src/bun.js/bindings/shimmer.zig index ac5c44c963ed1b..4b306e2d015452 100644 --- a/src/bun.js/bindings/shimmer.zig +++ b/src/bun.js/bindings/shimmer.zig @@ -5,8 +5,7 @@ const Sizes = @import("./sizes.zig"); const headers = @import("./headers.zig"); fn isNullableType(comptime Type: type) bool { - return @typeInfo(Type) == .Optional or - (@typeInfo(Type) == .Pointer and @typeInfo(Type).Pointer.is_allowzero); + return @typeInfo(Type) == .optional or (@typeInfo(Type) == .pointer and @typeInfo(Type).pointer.is_allowzero); } const log = @import("../../output.zig").scoped(.CPP, true); @@ -17,7 +16,7 @@ pub fn Shimmer(comptime _namespace: []const u8, comptime _name: []const u8, comp pub fn assertJSFunction(comptime funcs: anytype) void { inline for (funcs) |func| { - if (@typeInfo(@TypeOf(func)) != .Fn) { + if (@typeInfo(@TypeOf(func)) != .@"fn") { @compileError("Expected " ++ @typeName(Parent) ++ "." ++ @typeName(func) ++ " to be a function but received " ++ @tagName(@typeInfo(@TypeOf(func)))); } } @@ -46,15 +45,15 @@ pub fn Shimmer(comptime _namespace: []const u8, comptime _name: []const u8, comp // var ReturnTypeInfo: std.builtin.Type = @typeInfo(FromType); - // if (ReturnTypeInfo == .Pointer and NewReturnType != *anyopaque) { - // NewReturnType = ReturnTypeInfo.Pointer.child; + // if (ReturnTypeInfo == .pointer and NewReturnType != *anyopaque) { + // NewReturnType = ReturnTypeInfo.pointer.child; // ReturnTypeInfo = @typeInfo(NewReturnType); // } // switch (ReturnTypeInfo) { // .Union, // .Struct, - // .Enum, + // .@"enum", // => { // if (@hasDecl(ReturnTypeInfo., "Type")) { // return NewReturnType; @@ -94,8 +93,8 @@ pub fn Shimmer(comptime _namespace: []const u8, comptime _name: []const u8, comp }; fn pointerChild(comptime Type: type) type { - if (@typeInfo(Type) == .Pointer) { - return @typeInfo(Type).Pointer.child_type; + if (@typeInfo(Type) == .pointer) { + return @typeInfo(Type).pointer.child_type; } return Type; @@ -115,17 +114,17 @@ pub fn Shimmer(comptime _namespace: []const u8, comptime _name: []const u8, comp var functions: [std.meta.fieldNames(FunctionsType).len]StaticExport = undefined; for (std.meta.fieldNames(FunctionsType), 0..) |fn_name, i| { const Function = @TypeOf(@field(Functions, fn_name)); - if (@typeInfo(Function) != .Fn) { + if (@typeInfo(Function) != .@"fn") { @compileError("Expected " ++ @typeName(Parent) ++ "." ++ @typeName(Function) ++ " to be a function but received " ++ @tagName(@typeInfo(Function))); } - const Fn: std.builtin.Type.Fn = @typeInfo(Function).Fn; + const Fn: std.builtin.Type.Fn = @typeInfo(Function).@"fn"; if (Function == bun.JSC.JSHostFunctionTypeWithCCallConvForAssertions and bun.JSC.conv != .C) { @compileError("Expected " ++ bun.meta.typeName(Function) ++ " to have a JSC.conv Calling Convention."); } else if (Function == bun.JSC.JSHostFunctionType) { // } else if (Function == bun.JSC.JSHostZigFunction) { // - } else if (Fn.calling_convention != .C) { + } else if (std.meta.activeTag(Fn.calling_convention) != std.meta.activeTag(std.builtin.CallingConvention.c)) { @compileError("Expected " ++ @typeName(Parent) ++ "." ++ @typeName(Function) ++ " to have a C Calling Convention."); } @@ -150,10 +149,10 @@ pub fn Shimmer(comptime _namespace: []const u8, comptime _name: []const u8, comp for (Functions) |thenable| { for ([_][]const u8{ "resolve", "reject" }) |fn_name| { const Function = @TypeOf(@field(thenable, fn_name)); - if (@typeInfo(Function) != .Fn) { + if (@typeInfo(Function) != .@"fn") { @compileError("Expected " ++ @typeName(Parent) ++ "." ++ @typeName(Function) ++ " to be a function but received " ++ @tagName(@typeInfo(Function))); } - const Fn: std.builtin.Type.Fn = @typeInfo(Function).Fn; + const Fn: std.builtin.Type.@"fn" = @typeInfo(Function).@"fn"; if (Fn.calling_convention != .C) { @compileError("Expected " ++ @typeName(Parent) ++ "." ++ @typeName(Function) ++ " to have a C Calling Convention."); } @@ -176,7 +175,7 @@ pub fn Shimmer(comptime _namespace: []const u8, comptime _name: []const u8, comp pub inline fn matchNullable(comptime ExpectedReturnType: type, comptime ExternReturnType: type, value: ExternReturnType) ExpectedReturnType { if (comptime isNullableType(ExpectedReturnType) != isNullableType(ExternReturnType)) { return value.?; - } else if (comptime (@typeInfo(ExpectedReturnType) == .Enum) and (@typeInfo(ExternReturnType) != .Enum)) { + } else if (comptime (@typeInfo(ExpectedReturnType) == .@"enum") and (@typeInfo(ExternReturnType) != .@"enum")) { return @as(ExpectedReturnType, @enumFromInt(value)); } else { return value; @@ -188,22 +187,22 @@ pub fn Shimmer(comptime _namespace: []const u8, comptime _name: []const u8, comp if (!@hasDecl(Parent, typeName)) { @compileError(@typeName(Parent) ++ " is missing cppFn: " ++ typeName); } - break :ret @typeInfo(@TypeOf(@field(Parent, typeName))).Fn.return_type.?; + break :ret @typeInfo(@TypeOf(@field(Parent, typeName))).@"fn".return_type.?; }) { log(comptime name ++ "__" ++ typeName, .{}); @setEvalBranchQuota(99999); { const Fn = comptime @field(headers, symbolName(typeName)); - if (@typeInfo(@TypeOf(Fn)).Fn.params.len > 0) + if (@typeInfo(@TypeOf(Fn)).@"fn".params.len > 0) return matchNullable( - comptime @typeInfo(@TypeOf(@field(Parent, typeName))).Fn.return_type.?, - comptime @typeInfo(@TypeOf(Fn)).Fn.return_type.?, + comptime @typeInfo(@TypeOf(@field(Parent, typeName))).@"fn".return_type.?, + comptime @typeInfo(@TypeOf(Fn)).@"fn".return_type.?, @call(.auto, Fn, args), ); return matchNullable( - comptime @typeInfo(@TypeOf(@field(Parent, typeName))).Fn.return_type.?, - comptime @typeInfo(@TypeOf(Fn)).Fn.return_type.?, + comptime @typeInfo(@TypeOf(@field(Parent, typeName))).@"fn".return_type.?, + comptime @typeInfo(@TypeOf(Fn)).@"fn".return_type.?, Fn(), ); } diff --git a/src/bun.js/event_loop.zig b/src/bun.js/event_loop.zig index d09359bfa62c88..59e3347d26f9f9 100644 --- a/src/bun.js/event_loop.zig +++ b/src/bun.js/event_loop.zig @@ -1,29 +1,25 @@ const std = @import("std"); const JSC = bun.JSC; -const JSGlobalObject = JSC.JSGlobalObject; -const VirtualMachine = JSC.VirtualMachine; +const VirtualMachine = bun.JSC.VirtualMachine; const Allocator = std.mem.Allocator; const Lock = bun.Mutex; const bun = @import("root").bun; const Environment = bun.Environment; const Fetch = JSC.WebCore.Fetch; -const WebCore = JSC.WebCore; const Bun = JSC.API.Bun; const TaggedPointerUnion = @import("../tagged_pointer.zig").TaggedPointerUnion; const typeBaseName = @import("../meta.zig").typeBaseName; const AsyncGlobWalkTask = JSC.API.Glob.WalkTask.AsyncGlobWalkTask; -const CopyFilePromiseTask = WebCore.Blob.Store.CopyFile.CopyFilePromiseTask; +const CopyFilePromiseTask = bun.JSC.WebCore.Blob.Store.CopyFilePromiseTask; const AsyncTransformTask = JSC.API.JSTranspiler.TransformTask.AsyncTransformTask; -const ReadFileTask = WebCore.Blob.ReadFile.ReadFileTask; -const WriteFileTask = WebCore.Blob.WriteFile.WriteFileTask; +const ReadFileTask = bun.JSC.WebCore.Blob.ReadFileTask; +const WriteFileTask = bun.JSC.WebCore.Blob.WriteFileTask; const napi_async_work = JSC.napi.napi_async_work; const FetchTasklet = Fetch.FetchTasklet; const S3 = bun.S3; const S3HttpSimpleTask = S3.S3HttpSimpleTask; const S3HttpDownloadStreamingTask = S3.S3HttpDownloadStreamingTask; -const JSValue = JSC.JSValue; -const js = JSC.C; const Waker = bun.Async.Waker; pub const WorkPool = @import("../work_pool.zig").WorkPool; @@ -40,7 +36,7 @@ pub fn ConcurrentPromiseTask(comptime Context: type) type { event_loop: *JSC.EventLoop, allocator: std.mem.Allocator, promise: JSC.JSPromise.Strong = .{}, - globalThis: *JSGlobalObject, + globalThis: *JSC.JSGlobalObject, concurrent_task: JSC.ConcurrentTask = .{}, // This is a poll because we want it to enter the uSockets loop @@ -48,7 +44,7 @@ pub fn ConcurrentPromiseTask(comptime Context: type) type { pub usingnamespace bun.New(@This()); - pub fn createOnJSThread(allocator: std.mem.Allocator, globalThis: *JSGlobalObject, value: *Context) !*This { + pub fn createOnJSThread(allocator: std.mem.Allocator, globalThis: *JSC.JSGlobalObject, value: *Context) !*This { var this = This.new(.{ .event_loop = VirtualMachine.get().event_loop, .ctx = value, @@ -101,7 +97,7 @@ pub fn WorkTask(comptime Context: type) type { task: TaskType = .{ .callback = &runFromThreadPool }, event_loop: *JSC.EventLoop, allocator: std.mem.Allocator, - globalThis: *JSGlobalObject, + globalThis: *JSC.JSGlobalObject, concurrent_task: ConcurrentTask = .{}, async_task_tracker: JSC.AsyncTaskTracker, @@ -110,7 +106,7 @@ pub fn WorkTask(comptime Context: type) type { pub usingnamespace bun.New(@This()); - pub fn createOnJSThread(allocator: std.mem.Allocator, globalThis: *JSGlobalObject, value: *Context) !*This { + pub fn createOnJSThread(allocator: std.mem.Allocator, globalThis: *JSC.JSGlobalObject, value: *Context) !*This { var vm = globalThis.bunVM(); var this = This.new(.{ .event_loop = vm.eventLoop(), @@ -298,8 +294,8 @@ pub const AnyTaskWithExtraContext = struct { }; pub const CppTask = opaque { - extern fn Bun__performTask(globalObject: *JSGlobalObject, task: *CppTask) void; - pub fn run(this: *CppTask, global: *JSGlobalObject) void { + extern fn Bun__performTask(globalObject: *JSC.JSGlobalObject, task: *CppTask) void; + pub fn run(this: *CppTask, global: *JSC.JSGlobalObject) void { JSC.markBinding(@src()); Bun__performTask(global, this); } @@ -311,7 +307,7 @@ pub const ConcurrentCppTask = struct { const EventLoopTaskNoContext = opaque { extern fn Bun__EventLoopTaskNoContext__performTask(task: *EventLoopTaskNoContext) void; - extern fn Bun__EventLoopTaskNoContext__createdInBunVm(task: *const EventLoopTaskNoContext) ?*JSC.VirtualMachine; + extern fn Bun__EventLoopTaskNoContext__createdInBunVm(task: *const EventLoopTaskNoContext) ?*VirtualMachine; /// Deallocates `this` pub fn run(this: *EventLoopTaskNoContext) void { @@ -319,7 +315,7 @@ pub const ConcurrentCppTask = struct { } /// Get the VM that created this task - pub fn getVM(this: *const EventLoopTaskNoContext) ?*JSC.VirtualMachine { + pub fn getVM(this: *const EventLoopTaskNoContext) ?*VirtualMachine { return Bun__EventLoopTaskNoContext__createdInBunVm(this); } }; @@ -658,7 +654,7 @@ pub const GarbageCollectionController = struct { // // When the heap size is increasing, we always switch to fast mode // When the heap size has been the same or less for 30 seconds, we switch to slow mode - pub fn updateGCRepeatTimer(this: *GarbageCollectionController, comptime setting: @Type(.EnumLiteral)) void { + pub fn updateGCRepeatTimer(this: *GarbageCollectionController, comptime setting: @Type(.enum_literal)) void { if (setting == .fast and !this.gc_repeating_timer_fast) { this.gc_repeating_timer_fast = true; this.gc_repeating_timer.set(this, onGCRepeatingTimer, this.gc_timer_interval, this.gc_timer_interval); @@ -744,7 +740,7 @@ pub const GarbageCollectionController = struct { export fn Bun__tickWhilePaused(paused: *bool) void { JSC.markBinding(@src()); - JSC.VirtualMachine.get().eventLoop().tickWhilePaused(paused); + VirtualMachine.get().eventLoop().tickWhilePaused(paused); } comptime { @@ -831,8 +827,8 @@ pub const EventLoop = struct { next_immediate_tasks: Queue = undefined, concurrent_tasks: ConcurrentTask.Queue = ConcurrentTask.Queue{}, - global: *JSGlobalObject = undefined, - virtual_machine: *JSC.VirtualMachine = undefined, + global: *JSC.JSGlobalObject = undefined, + virtual_machine: *VirtualMachine = undefined, waker: ?Waker = null, forever_timer: ?*uws.Timer = null, deferred_tasks: DeferredTaskQueue = .{}, @@ -847,7 +843,7 @@ pub const EventLoop = struct { pub export fn Bun__ensureSignalHandler() void { if (Environment.isPosix) { - if (JSC.VirtualMachine.getMainThreadVM()) |vm| { + if (VirtualMachine.getMainThreadVM()) |vm| { const this = vm.eventLoop(); if (this.signal_handler == null) { this.signal_handler = PosixSignalHandle.new(.{}); @@ -904,7 +900,7 @@ pub const EventLoop = struct { this.entered_event_loop_count -= 1; } - pub inline fn getVmImpl(this: *EventLoop) *JSC.VirtualMachine { + pub inline fn getVmImpl(this: *EventLoop) *VirtualMachine { return this.virtual_machine; } @@ -1007,77 +1003,75 @@ pub const EventLoop = struct { while (@field(this, queue_name).readItem()) |task| { defer counter += 1; switch (task.tag()) { - @field(Task.Tag, typeBaseName(@typeName(ShellAsync))) => { + @field(Task.Tag, @typeName(ShellAsync)) => { var shell_ls_task: *ShellAsync = task.get(ShellAsync).?; shell_ls_task.runFromMainThread(); }, - @field(Task.Tag, typeBaseName(@typeName(ShellAsyncSubprocessDone))) => { + @field(Task.Tag, @typeName(ShellAsyncSubprocessDone)) => { var shell_ls_task: *ShellAsyncSubprocessDone = task.get(ShellAsyncSubprocessDone).?; shell_ls_task.runFromMainThread(); }, - @field(Task.Tag, typeBaseName(@typeName(ShellIOWriterAsyncDeinit))) => { + @field(Task.Tag, @typeName(ShellIOWriterAsyncDeinit)) => { var shell_ls_task: *ShellIOWriterAsyncDeinit = task.get(ShellIOWriterAsyncDeinit).?; shell_ls_task.runFromMainThread(); }, - @field(Task.Tag, typeBaseName(@typeName(ShellIOReaderAsyncDeinit))) => { + @field(Task.Tag, @typeName(ShellIOReaderAsyncDeinit)) => { var shell_ls_task: *ShellIOReaderAsyncDeinit = task.get(ShellIOReaderAsyncDeinit).?; shell_ls_task.runFromMainThread(); }, - @field(Task.Tag, typeBaseName(@typeName(ShellCondExprStatTask))) => { + @field(Task.Tag, @typeName(ShellCondExprStatTask)) => { var shell_ls_task: *ShellCondExprStatTask = task.get(ShellCondExprStatTask).?; shell_ls_task.task.runFromMainThread(); }, - @field(Task.Tag, typeBaseName(@typeName(ShellCpTask))) => { + @field(Task.Tag, @typeName(ShellCpTask)) => { var shell_ls_task: *ShellCpTask = task.get(ShellCpTask).?; shell_ls_task.runFromMainThread(); }, - @field(Task.Tag, typeBaseName(@typeName(ShellTouchTask))) => { + @field(Task.Tag, @typeName(ShellTouchTask)) => { var shell_ls_task: *ShellTouchTask = task.get(ShellTouchTask).?; shell_ls_task.runFromMainThread(); }, - @field(Task.Tag, typeBaseName(@typeName(ShellMkdirTask))) => { + @field(Task.Tag, @typeName(ShellMkdirTask)) => { var shell_ls_task: *ShellMkdirTask = task.get(ShellMkdirTask).?; shell_ls_task.runFromMainThread(); }, - @field(Task.Tag, typeBaseName(@typeName(ShellLsTask))) => { + @field(Task.Tag, @typeName(ShellLsTask)) => { var shell_ls_task: *ShellLsTask = task.get(ShellLsTask).?; shell_ls_task.runFromMainThread(); }, - @field(Task.Tag, typeBaseName(@typeName(ShellMvBatchedTask))) => { + @field(Task.Tag, @typeName(ShellMvBatchedTask)) => { var shell_mv_batched_task: *ShellMvBatchedTask = task.get(ShellMvBatchedTask).?; shell_mv_batched_task.task.runFromMainThread(); }, - @field(Task.Tag, typeBaseName(@typeName(ShellMvCheckTargetTask))) => { + @field(Task.Tag, @typeName(ShellMvCheckTargetTask)) => { var shell_mv_check_target_task: *ShellMvCheckTargetTask = task.get(ShellMvCheckTargetTask).?; shell_mv_check_target_task.task.runFromMainThread(); }, - @field(Task.Tag, typeBaseName(@typeName(ShellRmTask))) => { + @field(Task.Tag, @typeName(ShellRmTask)) => { var shell_rm_task: *ShellRmTask = task.get(ShellRmTask).?; shell_rm_task.runFromMainThread(); }, - @field(Task.Tag, typeBaseName(@typeName(ShellRmDirTask))) => { + @field(Task.Tag, @typeName(ShellRmDirTask)) => { var shell_rm_task: *ShellRmDirTask = task.get(ShellRmDirTask).?; shell_rm_task.runFromMainThread(); }, - - @field(Task.Tag, typeBaseName(@typeName(ShellGlobTask))) => { + @field(Task.Tag, @typeName(ShellGlobTask)) => { var shell_glob_task: *ShellGlobTask = task.get(ShellGlobTask).?; shell_glob_task.runFromMainThread(); shell_glob_task.deinit(); }, - .FetchTasklet => { + @field(Task.Tag, @typeName(FetchTasklet)) => { var fetch_task: *Fetch.FetchTasklet = task.get(Fetch.FetchTasklet).?; fetch_task.onProgressUpdate(); }, - .S3HttpSimpleTask => { + @field(Task.Tag, @typeName(S3HttpSimpleTask)) => { var s3_task: *S3HttpSimpleTask = task.get(S3HttpSimpleTask).?; s3_task.onResponse(); }, - .S3HttpDownloadStreamingTask => { + @field(Task.Tag, @typeName(S3HttpDownloadStreamingTask)) => { var s3_task: *S3HttpDownloadStreamingTask = task.get(S3HttpDownloadStreamingTask).?; s3_task.onResponse(); }, - @field(Task.Tag, @typeName(AsyncGlobWalkTask)) => { var globWalkTask: *AsyncGlobWalkTask = task.get(AsyncGlobWalkTask).?; globWalkTask.*.runFromJS(); @@ -1093,11 +1087,11 @@ pub const EventLoop = struct { transform_task.*.runFromJS(); transform_task.deinit(); }, - @field(Task.Tag, typeBaseName(@typeName(JSC.napi.napi_async_work))) => { + @field(Task.Tag, @typeName(JSC.napi.napi_async_work)) => { const transform_task: *JSC.napi.napi_async_work = task.get(JSC.napi.napi_async_work).?; transform_task.*.runFromJS(); }, - .ThreadSafeFunction => { + @field(Task.Tag, @typeName(ThreadSafeFunction)) => { var transform_task: *ThreadSafeFunction = task.as(ThreadSafeFunction); transform_task.onDispatch(); }, @@ -1106,7 +1100,7 @@ pub const EventLoop = struct { transform_task.*.runFromJS(); transform_task.deinit(); }, - @field(Task.Tag, bun.meta.typeBaseName(@typeName(JSCDeferredWorkTask))) => { + @field(Task.Tag, @typeName(JSCDeferredWorkTask)) => { var jsc_task: *JSCDeferredWorkTask = task.get(JSCDeferredWorkTask).?; JSC.markBinding(@src()); jsc_task.run(); @@ -1123,239 +1117,238 @@ pub const EventLoop = struct { // special case: we return return 0; }, - @field(Task.Tag, typeBaseName(@typeName(bun.bake.DevServer.HotReloadEvent))) => { + @field(Task.Tag, @typeName(bun.bake.DevServer.HotReloadEvent)) => { const hmr_task: *bun.bake.DevServer.HotReloadEvent = task.get(bun.bake.DevServer.HotReloadEvent).?; hmr_task.run(); }, - @field(Task.Tag, typeBaseName(@typeName(FSWatchTask))) => { + @field(Task.Tag, @typeName(FSWatchTask)) => { var transform_task: *FSWatchTask = task.get(FSWatchTask).?; transform_task.*.run(); transform_task.deinit(); }, - @field(Task.Tag, typeBaseName(@typeName(AnyTask))) => { + @field(Task.Tag, @typeName(AnyTask)) => { var any: *AnyTask = task.get(AnyTask).?; any.run(); }, - @field(Task.Tag, typeBaseName(@typeName(ManagedTask))) => { + @field(Task.Tag, @typeName(ManagedTask)) => { var any: *ManagedTask = task.get(ManagedTask).?; any.run(); }, - @field(Task.Tag, typeBaseName(@typeName(CppTask))) => { + @field(Task.Tag, @typeName(CppTask)) => { var any: *CppTask = task.get(CppTask).?; any.run(global); }, - @field(Task.Tag, typeBaseName(@typeName(PollPendingModulesTask))) => { + @field(Task.Tag, @typeName(PollPendingModulesTask)) => { virtual_machine.modules.onPoll(); }, - @field(Task.Tag, typeBaseName(@typeName(GetAddrInfoRequestTask))) => { + @field(Task.Tag, @typeName(GetAddrInfoRequestTask)) => { if (Environment.os == .windows) @panic("This should not be reachable on Windows"); var any: *GetAddrInfoRequestTask = task.get(GetAddrInfoRequestTask).?; any.runFromJS(); any.deinit(); }, - @field(Task.Tag, typeBaseName(@typeName(Stat))) => { + @field(Task.Tag, @typeName(Stat)) => { var any: *Stat = task.get(Stat).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Lstat))) => { + @field(Task.Tag, @typeName(Lstat)) => { var any: *Lstat = task.get(Lstat).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Fstat))) => { + @field(Task.Tag, @typeName(Fstat)) => { var any: *Fstat = task.get(Fstat).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Open))) => { + @field(Task.Tag, @typeName(Open)) => { var any: *Open = task.get(Open).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(ReadFile))) => { + @field(Task.Tag, @typeName(ReadFile)) => { var any: *ReadFile = task.get(ReadFile).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(WriteFile))) => { + @field(Task.Tag, @typeName(WriteFile)) => { var any: *WriteFile = task.get(WriteFile).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(CopyFile))) => { + @field(Task.Tag, @typeName(CopyFile)) => { var any: *CopyFile = task.get(CopyFile).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Read))) => { + @field(Task.Tag, @typeName(Read)) => { var any: *Read = task.get(Read).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Write))) => { + @field(Task.Tag, @typeName(Write)) => { var any: *Write = task.get(Write).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Truncate))) => { + @field(Task.Tag, @typeName(Truncate)) => { var any: *Truncate = task.get(Truncate).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Writev))) => { + @field(Task.Tag, @typeName(Writev)) => { var any: *Writev = task.get(Writev).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Readv))) => { + @field(Task.Tag, @typeName(Readv)) => { var any: *Readv = task.get(Readv).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Rename))) => { + @field(Task.Tag, @typeName(Rename)) => { var any: *Rename = task.get(Rename).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(FTruncate))) => { + @field(Task.Tag, @typeName(FTruncate)) => { var any: *FTruncate = task.get(FTruncate).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Readdir))) => { + @field(Task.Tag, @typeName(Readdir)) => { var any: *Readdir = task.get(Readdir).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(ReaddirRecursive))) => { + @field(Task.Tag, @typeName(ReaddirRecursive)) => { var any: *ReaddirRecursive = task.get(ReaddirRecursive).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Close))) => { + @field(Task.Tag, @typeName(Close)) => { var any: *Close = task.get(Close).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Rm))) => { + @field(Task.Tag, @typeName(Rm)) => { var any: *Rm = task.get(Rm).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Rmdir))) => { + @field(Task.Tag, @typeName(Rmdir)) => { var any: *Rmdir = task.get(Rmdir).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Chown))) => { + @field(Task.Tag, @typeName(Chown)) => { var any: *Chown = task.get(Chown).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(FChown))) => { + @field(Task.Tag, @typeName(FChown)) => { var any: *FChown = task.get(FChown).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Utimes))) => { + @field(Task.Tag, @typeName(Utimes)) => { var any: *Utimes = task.get(Utimes).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Lutimes))) => { + @field(Task.Tag, @typeName(Lutimes)) => { var any: *Lutimes = task.get(Lutimes).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Chmod))) => { + @field(Task.Tag, @typeName(Chmod)) => { var any: *Chmod = task.get(Chmod).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Fchmod))) => { + @field(Task.Tag, @typeName(Fchmod)) => { var any: *Fchmod = task.get(Fchmod).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Link))) => { + @field(Task.Tag, @typeName(Link)) => { var any: *Link = task.get(Link).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Symlink))) => { + @field(Task.Tag, @typeName(Symlink)) => { var any: *Symlink = task.get(Symlink).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Readlink))) => { + @field(Task.Tag, @typeName(Readlink)) => { var any: *Readlink = task.get(Readlink).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Realpath))) => { + @field(Task.Tag, @typeName(Realpath)) => { var any: *Realpath = task.get(Realpath).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(RealpathNonNative))) => { + @field(Task.Tag, @typeName(RealpathNonNative)) => { var any: *RealpathNonNative = task.get(RealpathNonNative).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Mkdir))) => { + @field(Task.Tag, @typeName(Mkdir)) => { var any: *Mkdir = task.get(Mkdir).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Fsync))) => { + @field(Task.Tag, @typeName(Fsync)) => { var any: *Fsync = task.get(Fsync).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Fdatasync))) => { + @field(Task.Tag, @typeName(Fdatasync)) => { var any: *Fdatasync = task.get(Fdatasync).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Access))) => { + @field(Task.Tag, @typeName(Access)) => { var any: *Access = task.get(Access).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(AppendFile))) => { + @field(Task.Tag, @typeName(AppendFile)) => { var any: *AppendFile = task.get(AppendFile).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Mkdtemp))) => { + @field(Task.Tag, @typeName(Mkdtemp)) => { var any: *Mkdtemp = task.get(Mkdtemp).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Exists))) => { + @field(Task.Tag, @typeName(Exists)) => { var any: *Exists = task.get(Exists).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Futimes))) => { + @field(Task.Tag, @typeName(Futimes)) => { var any: *Futimes = task.get(Futimes).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Lchmod))) => { + @field(Task.Tag, @typeName(Lchmod)) => { var any: *Lchmod = task.get(Lchmod).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Lchown))) => { + @field(Task.Tag, @typeName(Lchown)) => { var any: *Lchown = task.get(Lchown).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(Unlink))) => { + @field(Task.Tag, @typeName(Unlink)) => { var any: *Unlink = task.get(Unlink).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(NativeZlib))) => { + @field(Task.Tag, @typeName(NativeZlib)) => { var any: *NativeZlib = task.get(NativeZlib).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(NativeBrotli))) => { + @field(Task.Tag, @typeName(NativeBrotli)) => { var any: *NativeBrotli = task.get(NativeBrotli).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(ProcessWaiterThreadTask))) => { + @field(Task.Tag, @typeName(ProcessWaiterThreadTask)) => { bun.markPosixOnly(); var any: *ProcessWaiterThreadTask = task.get(ProcessWaiterThreadTask).?; any.runFromJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(RuntimeTranspilerStore))) => { + @field(Task.Tag, @typeName(RuntimeTranspilerStore)) => { var any: *RuntimeTranspilerStore = task.get(RuntimeTranspilerStore).?; any.drain(); }, - @field(Task.Tag, typeBaseName(@typeName(TimerObject))) => { + @field(Task.Tag, @typeName(TimerObject)) => { var any: *TimerObject = task.get(TimerObject).?; any.runImmediateTask(virtual_machine); }, - @field(Task.Tag, typeBaseName(@typeName(ServerAllConnectionsClosedTask))) => { + @field(Task.Tag, @typeName(ServerAllConnectionsClosedTask)) => { var any: *ServerAllConnectionsClosedTask = task.get(ServerAllConnectionsClosedTask).?; any.runFromJSThread(virtual_machine); }, - @field(Task.Tag, typeBaseName(@typeName(bun.bundle_v2.DeferredBatchTask))) => { + @field(Task.Tag, @typeName(bun.bundle_v2.DeferredBatchTask)) => { var any: *bun.bundle_v2.DeferredBatchTask = task.get(bun.bundle_v2.DeferredBatchTask).?; any.runOnJSThread(); }, - @field(Task.Tag, typeBaseName(@typeName(PosixSignalTask))) => { + @field(Task.Tag, @typeName(PosixSignalTask)) => { PosixSignalTask.runFromJSThread(@intCast(task.asUintptr()), global); }, - @field(Task.Tag, typeBaseName(@typeName(StatFS))) => { + @field(Task.Tag, @typeName(StatFS)) => { var any: *StatFS = task.get(StatFS).?; any.runFromJSThread(); }, - - @field(Task.Tag, typeBaseName(@typeName(FlushPendingFileSinkTask))) => { + @field(Task.Tag, @typeName(FlushPendingFileSinkTask)) => { var any: *FlushPendingFileSinkTask = task.get(FlushPendingFileSinkTask).?; any.runFromJSThread(); }, @@ -1691,7 +1684,7 @@ pub const EventLoop = struct { const task = Task.from(timer.as(*anyopaque)); defer timer.deinit(true); - JSC.VirtualMachine.get().enqueueTask(task); + VirtualMachine.get().enqueueTask(task); } pub fn ensureWaker(this: *EventLoop) void { @@ -1771,9 +1764,9 @@ pub const EventLoop = struct { }; pub const JsVM = struct { - vm: *JSC.VirtualMachine, + vm: *VirtualMachine, - pub inline fn init(inner: *JSC.VirtualMachine) JsVM { + pub inline fn init(inner: *VirtualMachine) JsVM { return .{ .vm = inner, }; @@ -1847,25 +1840,25 @@ pub const EventLoopKind = enum { pub fn refType(comptime this: EventLoopKind) type { return switch (this) { - .js => *JSC.VirtualMachine, + .js => *VirtualMachine, .mini => *JSC.MiniEventLoop, }; } pub fn getVm(comptime this: EventLoopKind) EventLoopKind.refType(this) { return switch (this) { - .js => JSC.VirtualMachine.get(), + .js => VirtualMachine.get(), .mini => JSC.MiniEventLoop.global, }; } }; pub fn AbstractVM(inner: anytype) switch (@TypeOf(inner)) { - *JSC.VirtualMachine => JsVM, + *VirtualMachine => JsVM, *JSC.MiniEventLoop => MiniVM, else => @compileError("Invalid event loop ctx: " ++ @typeName(@TypeOf(inner))), } { - if (comptime @TypeOf(inner) == *JSC.VirtualMachine) return JsVM.init(inner); + if (comptime @TypeOf(inner) == *VirtualMachine) return JsVM.init(inner); if (comptime @TypeOf(inner) == *JSC.MiniEventLoop) return MiniVM.init(inner); @compileError("Invalid event loop ctx: " ++ @typeName(@TypeOf(inner))); } @@ -1887,8 +1880,8 @@ pub const MiniEventLoop = struct { after_event_loop_callback_ctx: ?*anyopaque = null, after_event_loop_callback: ?JSC.OpaqueCallback = null, pipe_read_buffer: ?*PipeReadBuffer = null, - stdout_store: ?*JSC.WebCore.Blob.Store = null, - stderr_store: ?*JSC.WebCore.Blob.Store = null, + stdout_store: ?*bun.JSC.WebCore.Blob.Store = null, + stderr_store: ?*bun.JSC.WebCore.Blob.Store = null, const PipeReadBuffer = [256 * 1024]u8; pub threadlocal var globalInitialized: bool = false; @@ -2267,7 +2260,7 @@ pub const EventLoopHandle = union(enum) { }; } - pub fn bunVM(this: EventLoopHandle) ?*JSC.VirtualMachine { + pub fn bunVM(this: EventLoopHandle) ?*VirtualMachine { if (this == .js) { return this.js.virtual_machine; } @@ -2282,7 +2275,7 @@ pub const EventLoopHandle = union(enum) { }; } - pub fn cast(this: EventLoopHandle, comptime as: @Type(.EnumLiteral)) if (as == .js) *JSC.EventLoop else *MiniEventLoop { + pub fn cast(this: EventLoopHandle, comptime as: @Type(.enum_literal)) if (as == .js) *JSC.EventLoop else *MiniEventLoop { if (as == .js) { if (this != .js) @panic("Expected *JSC.EventLoop but got *MiniEventLoop"); return this.js; @@ -2313,7 +2306,7 @@ pub const EventLoopHandle = union(enum) { pub fn init(context: anytype) EventLoopHandle { const Context = @TypeOf(context); return switch (Context) { - *JSC.VirtualMachine => .{ .js = context.eventLoop() }, + *VirtualMachine => .{ .js = context.eventLoop() }, *JSC.EventLoop => .{ .js = context }, *JSC.MiniEventLoop => .{ .mini = context }, *AnyEventLoop => switch (context.*) { @@ -2374,7 +2367,7 @@ pub const EventLoopHandle = union(enum) { this.loop().unref(); } - pub inline fn createNullDelimitedEnvMap(this: @This(), alloc: Allocator) ![:null]?[*:0]u8 { + pub inline fn createNullDelimitedEnvMap(this: @This(), alloc: Allocator) ![:null]?[*:0]const u8 { return switch (this) { .js => this.js.virtual_machine.transpiler.env.map.createNullDelimitedEnvMap(alloc), .mini => this.mini.env.?.map.createNullDelimitedEnvMap(alloc), @@ -2407,7 +2400,7 @@ pub const EventLoopTask = union { js: ConcurrentTask, mini: JSC.AnyTaskWithExtraContext, - pub fn init(comptime kind: @TypeOf(.EnumLiteral)) EventLoopTask { + pub fn init(comptime kind: @TypeOf(.enum_literal)) EventLoopTask { switch (kind) { .js => return .{ .js = ConcurrentTask{} }, .mini => return .{ .mini = JSC.AnyTaskWithExtraContext{} }, @@ -2467,7 +2460,7 @@ pub const PosixSignalHandle = struct { // Publish the new tail (Release so that the consumer sees the updated tail). this.tail.store(old_tail +% 1, .release); - JSC.VirtualMachine.getMainThreadVM().?.eventLoop().wakeup(); + VirtualMachine.getMainThreadVM().?.eventLoop().wakeup(); return true; } @@ -2475,7 +2468,7 @@ pub const PosixSignalHandle = struct { /// This is the signal handler entry point. Calls enqueue on the ring buffer. /// Note: Must be minimal logic here. Only do atomics & signal‐safe calls. export fn Bun__onPosixSignal(number: i32) void { - const vm = JSC.VirtualMachine.getMainThreadVM().?; + const vm = VirtualMachine.getMainThreadVM().?; _ = vm.eventLoop().signal_handler.?.enqueue(@intCast(number)); } diff --git a/src/bun.js/ipc.zig b/src/bun.js/ipc.zig index 83e93bd1335848..456bf943362691 100644 --- a/src/bun.js/ipc.zig +++ b/src/bun.js/ipc.zig @@ -695,8 +695,8 @@ fn NewSocketIPCHandler(comptime Context: type) type { // In the VirtualMachine case, `globalThis` is an optional, in case // the vm is freed before the socket closes. const globalThis = switch (@typeInfo(@TypeOf(this.globalThis))) { - .Pointer => this.globalThis, - .Optional => brk: { + .pointer => this.globalThis, + .optional => brk: { if (this.globalThis) |global| { break :brk global; } @@ -842,8 +842,8 @@ fn NewNamedPipeIPCHandler(comptime Context: type) type { bun.assert(bun.isSliceInBuffer(buffer, ipc.incoming.allocatedSlice())); const globalThis = switch (@typeInfo(@TypeOf(this.globalThis))) { - .Pointer => this.globalThis, - .Optional => brk: { + .pointer => this.globalThis, + .optional => brk: { if (this.globalThis) |global| { break :brk global; } diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index bf1f5bb8b4a5ed..3d9010143db9f2 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -12,7 +12,6 @@ const default_allocator = bun.default_allocator; const StoredFileDescriptorType = bun.StoredFileDescriptorType; const ErrorableString = bun.JSC.ErrorableString; const Arena = @import("../allocators/mimalloc_arena.zig").Arena; -const C = bun.C; const Exception = bun.JSC.Exception; const Allocator = std.mem.Allocator; @@ -41,14 +40,7 @@ const ImportRecord = ast.ImportRecord; const DotEnv = @import("../env_loader.zig"); const PackageJSON = @import("../resolver/package_json.zig").PackageJSON; const MacroRemap = @import("../resolver/package_json.zig").MacroMap; -const WebCore = bun.JSC.WebCore; -const Request = WebCore.Request; -const Response = WebCore.Response; -const Headers = WebCore.Headers; const String = bun.String; -const Fetch = WebCore.Fetch; -const FetchEvent = WebCore.FetchEvent; -const js = bun.JSC.C; const JSC = bun.JSC; const JSError = @import("./base.zig").JSError; const d = @import("./base.zig").d; @@ -66,18 +58,17 @@ const ZigException = bun.JSC.ZigException; const ZigStackTrace = bun.JSC.ZigStackTrace; const ErrorableResolvedSource = bun.JSC.ErrorableResolvedSource; const ResolvedSource = bun.JSC.ResolvedSource; -const JSPromise = bun.JSC.JSPromise; const JSInternalPromise = bun.JSC.JSInternalPromise; const JSModuleLoader = bun.JSC.JSModuleLoader; const JSPromiseRejectionOperation = bun.JSC.JSPromiseRejectionOperation; const ErrorableZigString = bun.JSC.ErrorableZigString; const ZigGlobalObject = bun.JSC.ZigGlobalObject; -const VM = bun.JSC.VM; +const VM = JSC.VM; const JSFunction = bun.JSC.JSFunction; const Config = @import("./config.zig"); const URL = @import("../url.zig").URL; const Bun = JSC.API.Bun; -const EventLoop = JSC.EventLoop; +const EventLoop = bun.JSC.EventLoop; const PendingResolution = @import("../resolver/resolver.zig").PendingResolution; const ThreadSafeFunction = JSC.napi.ThreadSafeFunction; const PackageManager = @import("../install/install.zig").PackageManager; @@ -93,8 +84,6 @@ const Task = JSC.Task; pub const Buffer = MarkedArrayBuffer; const Lock = bun.Mutex; -const BuildMessage = JSC.BuildMessage; -const ResolveMessage = JSC.ResolveMessage; const Async = bun.Async; const Ordinal = bun.Ordinal; @@ -321,13 +310,13 @@ pub const SavedSourceMap = struct { }; switch (Value.from(mapping.value_ptr.*).tag()) { - Value.Tag.ParsedSourceMap => { + @field(Value.Tag, @typeName(ParsedSourceMap)) => { defer this.unlock(); const map = Value.from(mapping.value_ptr.*).as(ParsedSourceMap); map.ref(); return .{ .map = map }; }, - Value.Tag.SavedMappings => { + @field(Value.Tag, @typeName(SavedMappings)) => { defer this.unlock(); var saved = SavedMappings{ .data = @as([*]u8, @ptrCast(Value.from(mapping.value_ptr.*).as(ParsedSourceMap))) }; defer saved.deinit(); @@ -340,7 +329,7 @@ pub const SavedSourceMap = struct { return .{ .map = result }; }, - Value.Tag.SourceProviderMap => { + @field(Value.Tag, @typeName(SourceProviderMap)) => { const ptr: *SourceProviderMap = Value.from(mapping.value_ptr.*).as(SourceProviderMap); this.unlock(); @@ -423,17 +412,17 @@ export fn Bun__readOriginTimerStart(vm: *JSC.VirtualMachine) f64 { return @as(f64, @floatCast((@as(f64, @floatFromInt(vm.origin_timestamp)) + JSC.VirtualMachine.origin_relative_epoch) / 1_000_000.0)); } -pub export fn Bun__GlobalObject__hasIPC(global: *JSC.JSGlobalObject) bool { +pub export fn Bun__GlobalObject__hasIPC(global: *JSGlobalObject) bool { return global.bunVM().ipc != null; } -extern fn Bun__Process__queueNextTick1(*JSC.ZigGlobalObject, JSC.JSValue, JSC.JSValue) void; +extern fn Bun__Process__queueNextTick1(*ZigGlobalObject, JSValue, JSValue) void; comptime { const Bun__Process__send = JSC.toJSHostFunction(Bun__Process__send_); - @export(Bun__Process__send, .{ .name = "Bun__Process__send" }); + @export(&Bun__Process__send, .{ .name = "Bun__Process__send" }); } -pub fn Bun__Process__send_(globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { +pub fn Bun__Process__send_(globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue { JSC.markBinding(@src()); var message, var handle, var options_, var callback = callFrame.argumentsAsArray(4); @@ -449,7 +438,7 @@ pub fn Bun__Process__send_(globalObject: *JSGlobalObject, callFrame: *JSC.CallFr } const S = struct { - fn impl(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + fn impl(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const arguments_ = callframe.arguments_old(1).slice(); const ex = arguments_[0]; VirtualMachine.Process__emitErrorEvent(globalThis, ex); @@ -458,13 +447,13 @@ pub fn Bun__Process__send_(globalObject: *JSGlobalObject, callFrame: *JSC.CallFr }; const vm = globalObject.bunVM(); - const zigGlobal: *JSC.ZigGlobalObject = @ptrCast(globalObject); + const zigGlobal: *ZigGlobalObject = @ptrCast(globalObject); const ipc_instance = vm.getIPCInstance() orelse { const ex = globalObject.ERR_IPC_CHANNEL_CLOSED("Channel closed.", .{}).toJS(); if (callback.isFunction()) { Bun__Process__queueNextTick1(zigGlobal, callback, ex); } else { - const fnvalue = JSC.JSFunction.create(globalObject, "", S.impl, 1, .{}); + const fnvalue = JSFunction.create(globalObject, "", S.impl, 1, .{}); Bun__Process__queueNextTick1(zigGlobal, fnvalue, ex); } return .false; @@ -489,7 +478,7 @@ pub fn Bun__Process__send_(globalObject: *JSGlobalObject, callFrame: *JSC.CallFr if (callback.isFunction()) { Bun__Process__queueNextTick1(zigGlobal, callback, ex); } else { - const fnvalue = JSC.JSFunction.create(globalObject, "", S.impl, 1, .{}); + const fnvalue = JSFunction.create(globalObject, "", S.impl, 1, .{}); Bun__Process__queueNextTick1(zigGlobal, fnvalue, ex); } } @@ -578,7 +567,7 @@ const WindowsOnly = struct { comptime { if (Environment.isWindows) { - @export(WindowsOnly.Bun__ZigGlobalObject__uvLoop, .{ .name = "Bun__ZigGlobalObject__uvLoop" }); + @export(&WindowsOnly.Bun__ZigGlobalObject__uvLoop, .{ .name = "Bun__ZigGlobalObject__uvLoop" }); } } @@ -593,8 +582,8 @@ pub const ExitHandler = struct { vm.exit_handler.exit_code = code; } - extern fn Process__dispatchOnBeforeExit(*JSC.JSGlobalObject, code: u8) void; - extern fn Process__dispatchOnExit(*JSC.JSGlobalObject, code: u8) void; + extern fn Process__dispatchOnBeforeExit(*JSGlobalObject, code: u8) void; + extern fn Process__dispatchOnExit(*JSGlobalObject, code: u8) void; extern fn Bun__closeAllSQLiteDatabasesForTermination() void; pub fn dispatchOnExit(this: *ExitHandler) void { @@ -779,7 +768,7 @@ pub const VirtualMachine = struct { main: string = "", main_resolved_path: bun.String = bun.String.empty, main_hash: u32 = 0, - process: js.JSObjectRef = null, + process: bun.JSC.C.JSObjectRef = null, entry_point: ServerEntryPoint = undefined, origin: URL = URL{}, node_fs: ?*Node.NodeFS = null, @@ -787,13 +776,13 @@ pub const VirtualMachine = struct { event_loop_handle: ?*PlatformEventLoop = null, pending_unref_counter: i32 = 0, preload: []const string = &[_][]const u8{}, - unhandled_pending_rejection_to_capture: ?*JSC.JSValue = null, + unhandled_pending_rejection_to_capture: ?*JSValue = null, standalone_module_graph: ?*bun.StandaloneModuleGraph = null, smol: bool = false, dns_result_order: DNSResolver.Order = .verbatim, hot_reload: bun.CLI.Command.HotReload = .none, - jsc: *JSC.VM = undefined, + jsc: *VM = undefined, /// hide bun:wrap from stack traces /// bun:wrap is very noisy @@ -873,7 +862,7 @@ pub const VirtualMachine = struct { rare_data: ?*JSC.RareData = null, is_us_loop_entered: bool = false, - pending_internal_promise: *JSC.JSInternalPromise = undefined, + pending_internal_promise: *JSInternalPromise = undefined, entry_point_result: struct { value: JSC.Strong = .{}, cjs_set_value: bool = false, @@ -907,7 +896,7 @@ pub const VirtualMachine = struct { is_inside_deferred_task_queue: bool = false, - pub const OnUnhandledRejection = fn (*VirtualMachine, globalObject: *JSC.JSGlobalObject, JSC.JSValue) void; + pub const OnUnhandledRejection = fn (*VirtualMachine, globalObject: *JSGlobalObject, JSValue) void; pub const OnException = fn (*ZigException) void; @@ -1102,11 +1091,11 @@ pub const VirtualMachine = struct { } }; - pub fn onQuietUnhandledRejectionHandler(this: *VirtualMachine, _: *JSC.JSGlobalObject, _: JSC.JSValue) void { + pub fn onQuietUnhandledRejectionHandler(this: *VirtualMachine, _: *JSGlobalObject, _: JSValue) void { this.unhandled_error_counter += 1; } - pub fn onQuietUnhandledRejectionHandlerCaptureValue(this: *VirtualMachine, _: *JSC.JSGlobalObject, value: JSC.JSValue) void { + pub fn onQuietUnhandledRejectionHandlerCaptureValue(this: *VirtualMachine, _: *JSGlobalObject, value: JSValue) void { this.unhandled_error_counter += 1; value.ensureStillAlive(); if (this.unhandled_pending_rejection_to_capture) |ptr| { @@ -1192,15 +1181,15 @@ pub const VirtualMachine = struct { } } - extern fn Bun__handleUncaughtException(*JSC.JSGlobalObject, err: JSC.JSValue, is_rejection: c_int) c_int; - extern fn Bun__handleUnhandledRejection(*JSC.JSGlobalObject, reason: JSC.JSValue, promise: JSC.JSValue) c_int; - extern fn Bun__Process__exit(*JSC.JSGlobalObject, code: c_int) noreturn; + extern fn Bun__handleUncaughtException(*JSGlobalObject, err: JSValue, is_rejection: c_int) c_int; + extern fn Bun__handleUnhandledRejection(*JSGlobalObject, reason: JSValue, promise: JSValue) c_int; + extern fn Bun__Process__exit(*JSGlobalObject, code: c_int) noreturn; export fn Bun__VirtualMachine__exitDuringUncaughtException(this: *JSC.VirtualMachine) void { this.exit_on_uncaught_exception = true; } - pub fn unhandledRejection(this: *JSC.VirtualMachine, globalObject: *JSC.JSGlobalObject, reason: JSC.JSValue, promise: JSC.JSValue) bool { + pub fn unhandledRejection(this: *JSC.VirtualMachine, globalObject: *JSGlobalObject, reason: JSValue, promise: JSValue) bool { if (this.isShuttingDown()) { Output.debugWarn("unhandledRejection during shutdown.", .{}); return true; @@ -1220,7 +1209,7 @@ pub const VirtualMachine = struct { return handled; } - pub fn uncaughtException(this: *JSC.VirtualMachine, globalObject: *JSC.JSGlobalObject, err: JSC.JSValue, is_rejection: bool) bool { + pub fn uncaughtException(this: *JSC.VirtualMachine, globalObject: *JSGlobalObject, err: JSValue, is_rejection: bool) bool { if (this.isShuttingDown()) { Output.debugWarn("uncaughtException during shutdown.", .{}); return true; @@ -1261,7 +1250,7 @@ pub const VirtualMachine = struct { } } - pub fn defaultOnUnhandledRejection(this: *JSC.VirtualMachine, _: *JSC.JSGlobalObject, value: JSC.JSValue) void { + pub fn defaultOnUnhandledRejection(this: *JSC.VirtualMachine, _: *JSGlobalObject, value: JSValue) void { this.runErrorHandler(value, this.onUnhandledRejectionExceptionList); } @@ -1270,7 +1259,7 @@ pub const VirtualMachine = struct { } pub fn garbageCollect(this: *const VirtualMachine, sync: bool) usize { - @setCold(true); + @branchHint(.cold); Global.mimalloc_cleanup(false); if (sync) return this.global.vm().runGC(true); @@ -1405,10 +1394,10 @@ pub const VirtualMachine = struct { } comptime { - @export(scriptExecutionStatus, .{ .name = "Bun__VM__scriptExecutionStatus" }); - @export(setEntryPointEvalResultESM, .{ .name = "Bun__VM__setEntryPointEvalResultESM" }); - @export(setEntryPointEvalResultCJS, .{ .name = "Bun__VM__setEntryPointEvalResultCJS" }); - @export(specifierIsEvalEntryPoint, .{ .name = "Bun__VM__specifierIsEvalEntryPoint" }); + @export(&scriptExecutionStatus, .{ .name = "Bun__VM__scriptExecutionStatus" }); + @export(&setEntryPointEvalResultESM, .{ .name = "Bun__VM__setEntryPointEvalResultESM" }); + @export(&setEntryPointEvalResultCJS, .{ .name = "Bun__VM__setEntryPointEvalResultCJS" }); + @export(&specifierIsEvalEntryPoint, .{ .name = "Bun__VM__specifierIsEvalEntryPoint" }); } pub fn onExit(this: *VirtualMachine) void { @@ -1516,7 +1505,7 @@ pub const VirtualMachine = struct { pub const Handle = opaque { extern "c" fn Bun__LifecycleAgentReportReload(agent: *Handle) void; - extern "c" fn Bun__LifecycleAgentReportError(agent: *Handle, exception: *JSC.ZigException) void; + extern "c" fn Bun__LifecycleAgentReportError(agent: *Handle, exception: *ZigException) void; extern "c" fn Bun__LifecycleAgentPreventExit(agent: *Handle) void; extern "c" fn Bun__LifecycleAgentStopPreventingExit(agent: *Handle) void; @@ -1533,7 +1522,7 @@ pub const VirtualMachine = struct { Bun__LifecycleAgentReportReload(this); } - pub fn reportError(this: *Handle, exception: *JSC.ZigException) void { + pub fn reportError(this: *Handle, exception: *ZigException) void { debug("reportError", .{}); Bun__LifecycleAgentReportError(this, exception); } @@ -1560,7 +1549,7 @@ pub const VirtualMachine = struct { } } - pub fn reportError(this: *LifecycleAgent, exception: *JSC.ZigException) void { + pub fn reportError(this: *LifecycleAgent, exception: *ZigException) void { if (this.handle) |handle| { handle.reportError(exception); } @@ -1595,9 +1584,9 @@ pub const VirtualMachine = struct { pub const log = Output.scoped(.debugger, false); - extern "C" fn Bun__createJSDebugger(*JSC.JSGlobalObject) u32; + extern "C" fn Bun__createJSDebugger(*JSGlobalObject) u32; extern "C" fn Bun__ensureDebugger(u32, bool) void; - extern "C" fn Bun__startJSDebuggerThread(*JSC.JSGlobalObject, u32, *bun.String, c_int, bool) void; + extern "C" fn Bun__startJSDebuggerThread(*JSGlobalObject, u32, *bun.String, c_int, bool) void; var futex_atomic: std.atomic.Value(u32) = undefined; pub fn waitForDebuggerIfNecessary(this: *VirtualMachine) void { @@ -1826,7 +1815,7 @@ pub const VirtualMachine = struct { this.eventLoop().waitForTasks(); } - pub const MacroMap = std.AutoArrayHashMap(i32, js.JSObjectRef); + pub const MacroMap = std.AutoArrayHashMap(i32, bun.JSC.C.JSObjectRef); pub fn enableMacroMode(this: *VirtualMachine) void { JSC.markBinding(@src()); @@ -1944,7 +1933,7 @@ pub const VirtualMachine = struct { vm.transpiler.resolver.onWakePackageManager = .{ .context = &vm.modules, .handler = ModuleLoader.AsyncModule.Queue.onWakeHandler, - .onDependencyError = JSC.ModuleLoader.AsyncModule.Queue.onDependencyError, + .onDependencyError = ModuleLoader.AsyncModule.Queue.onDependencyError, }; vm.transpiler.resolver.standalone_module_graph = opts.graph.?; @@ -2066,7 +2055,7 @@ pub const VirtualMachine = struct { vm.transpiler.resolver.onWakePackageManager = .{ .context = &vm.modules, .handler = ModuleLoader.AsyncModule.Queue.onWakeHandler, - .onDependencyError = JSC.ModuleLoader.AsyncModule.Queue.onDependencyError, + .onDependencyError = ModuleLoader.AsyncModule.Queue.onDependencyError, }; vm.transpiler.configureLinker(); @@ -2226,7 +2215,7 @@ pub const VirtualMachine = struct { vm.transpiler.resolver.onWakePackageManager = .{ .context = &vm.modules, .handler = ModuleLoader.AsyncModule.Queue.onWakeHandler, - .onDependencyError = JSC.ModuleLoader.AsyncModule.Queue.onDependencyError, + .onDependencyError = ModuleLoader.AsyncModule.Queue.onDependencyError, }; vm.transpiler.resolver.standalone_module_graph = opts.graph; @@ -2321,7 +2310,7 @@ pub const VirtualMachine = struct { vm.transpiler.resolver.onWakePackageManager = .{ .context = &vm.modules, .handler = ModuleLoader.AsyncModule.Queue.onWakeHandler, - .onDependencyError = JSC.ModuleLoader.AsyncModule.Queue.onDependencyError, + .onDependencyError = ModuleLoader.AsyncModule.Queue.onDependencyError, }; vm.transpiler.configureLinker(); @@ -2411,7 +2400,7 @@ pub const VirtualMachine = struct { pub fn fetchWithoutOnLoadPlugins( jsc_vm: *VirtualMachine, - globalObject: *JSC.JSGlobalObject, + globalObject: *JSGlobalObject, _specifier: String, referrer: String, log: *logger.Log, @@ -2728,7 +2717,7 @@ pub const VirtualMachine = struct { defer specifier_utf8.deinit(); const source_utf8 = source.toUTF8(bun.default_allocator); defer source_utf8.deinit(); - const printed = ResolveMessage.fmt( + const printed = JSC.ResolveMessage.fmt( bun.default_allocator, specifier_utf8.slice(), source_utf8.slice(), @@ -2741,7 +2730,7 @@ pub const VirtualMachine = struct { printed, ), }; - res.* = ErrorableString.err(error.NameTooLong, ResolveMessage.create(global, VirtualMachine.get().allocator, msg, source_utf8.slice()).asVoid()); + res.* = ErrorableString.err(error.NameTooLong, JSC.ResolveMessage.create(global, VirtualMachine.get().allocator, msg, source_utf8.slice()).asVoid()); return; } @@ -2809,7 +2798,7 @@ pub const VirtualMachine = struct { } } - const printed = ResolveMessage.fmt( + const printed = JSC.ResolveMessage.fmt( jsc_vm.allocator, specifier_utf8.slice(), source_utf8.slice(), @@ -2829,7 +2818,7 @@ pub const VirtualMachine = struct { }; { - res.* = ErrorableString.err(err, ResolveMessage.create(global, VirtualMachine.get().allocator, msg, source_utf8.slice()).asVoid()); + res.* = ErrorableString.err(err, JSC.ResolveMessage.create(global, VirtualMachine.get().allocator, msg, source_utf8.slice()).asVoid()); } return; @@ -2867,7 +2856,7 @@ pub const VirtualMachine = struct { }; }; { - ret.* = ErrorableResolvedSource.err(err, BuildMessage.create(globalThis, globalThis.allocator(), msg).asVoid()); + ret.* = ErrorableResolvedSource.err(err, JSC.BuildMessage.create(globalThis, globalThis.allocator(), msg).asVoid()); } return; }, @@ -2875,8 +2864,8 @@ pub const VirtualMachine = struct { 1 => { const msg = log.msgs.items[0]; ret.* = ErrorableResolvedSource.err(err, switch (msg.metadata) { - .build => BuildMessage.create(globalThis, globalThis.allocator(), msg).asVoid(), - .resolve => ResolveMessage.create( + .build => JSC.BuildMessage.create(globalThis, globalThis.allocator(), msg).asVoid(), + .resolve => JSC.ResolveMessage.create( globalThis, globalThis.allocator(), msg, @@ -2894,8 +2883,8 @@ pub const VirtualMachine = struct { for (logs, errors) |msg, *current| { current.* = switch (msg.metadata) { - .build => BuildMessage.create(globalThis, globalThis.allocator(), msg), - .resolve => ResolveMessage.create( + .build => JSC.BuildMessage.create(globalThis, globalThis.allocator(), msg), + .resolve => JSC.ResolveMessage.create( globalThis, globalThis.allocator(), msg, @@ -2967,7 +2956,7 @@ pub const VirtualMachine = struct { } pub noinline fn runErrorHandler(this: *VirtualMachine, result: JSValue, exception_list: ?*ExceptionList) void { - @setCold(true); + @branchHint(.cold); if (!result.isEmptyOrUndefinedOrNull()) this.last_reported_error_for_dedupe = result; @@ -3007,7 +2996,7 @@ pub const VirtualMachine = struct { } } - export fn Bun__logUnhandledException(exception: JSC.JSValue) void { + export fn Bun__logUnhandledException(exception: JSValue) void { get().runErrorHandler(exception, null); } @@ -3125,20 +3114,20 @@ pub const VirtualMachine = struct { if (!this.transpiler.options.disable_transpilation) { if (try this.loadPreloads()) |promise| { - JSC.JSValue.fromCell(promise).ensureStillAlive(); - JSC.JSValue.fromCell(promise).protect(); + JSValue.fromCell(promise).ensureStillAlive(); + JSValue.fromCell(promise).protect(); this.pending_internal_promise = promise; return promise; } const promise = JSModuleLoader.loadAndEvaluateModule(this.global, &String.init(main_file_name)) orelse return error.JSError; this.pending_internal_promise = promise; - JSC.JSValue.fromCell(promise).ensureStillAlive(); + JSValue.fromCell(promise).ensureStillAlive(); return promise; } else { const promise = JSModuleLoader.loadAndEvaluateModule(this.global, &String.fromBytes(this.main)) orelse return error.JSError; this.pending_internal_promise = promise; - JSC.JSValue.fromCell(promise).ensureStillAlive(); + JSValue.fromCell(promise).ensureStillAlive(); return promise; } @@ -3155,9 +3144,9 @@ pub const VirtualMachine = struct { if (!this.transpiler.options.disable_transpilation) { if (try this.loadPreloads()) |promise| { - JSC.JSValue.fromCell(promise).ensureStillAlive(); + JSValue.fromCell(promise).ensureStillAlive(); this.pending_internal_promise = promise; - JSC.JSValue.fromCell(promise).protect(); + JSValue.fromCell(promise).protect(); return promise; } @@ -3165,7 +3154,7 @@ pub const VirtualMachine = struct { const promise = JSModuleLoader.loadAndEvaluateModule(this.global, &String.fromBytes(this.main)) orelse return error.JSError; this.pending_internal_promise = promise; - JSC.JSValue.fromCell(promise).ensureStillAlive(); + JSValue.fromCell(promise).ensureStillAlive(); return promise; } @@ -3393,7 +3382,7 @@ pub const VirtualMachine = struct { fn printErrorFromMaybePrivateData( this: *VirtualMachine, - value: JSC.JSValue, + value: JSValue, exception_list: ?*ExceptionList, formatter: *ConsoleObject.Formatter, comptime Writer: type, @@ -3462,7 +3451,7 @@ pub const VirtualMachine = struct { return false; } - pub fn reportUncaughtException(globalObject: *JSGlobalObject, exception: *JSC.Exception) JSValue { + pub fn reportUncaughtException(globalObject: *JSGlobalObject, exception: *Exception) JSValue { var jsc_vm = globalObject.bunVM(); _ = jsc_vm.uncaughtException(globalObject, exception.value(), false); return .undefined; @@ -3553,7 +3542,7 @@ pub const VirtualMachine = struct { } } - pub export fn Bun__remapStackFramePositions(globalObject: *JSC.JSGlobalObject, frames: [*]JSC.ZigStackFrame, frames_count: usize) void { + pub export fn Bun__remapStackFramePositions(globalObject: *JSGlobalObject, frames: [*]JSC.ZigStackFrame, frames_count: usize) void { globalObject.bunVM().remapStackFramePositions(frames, frames_count); } @@ -3764,11 +3753,11 @@ pub const VirtualMachine = struct { if (strings.getLinesInText( code.slice(), @intCast(last_line), - JSC.ZigException.Holder.source_lines_count, + ZigException.Holder.source_lines_count, )) |lines_buf| { var lines = lines_buf.slice(); - var source_lines = exception.stack.source_lines_ptr[0..JSC.ZigException.Holder.source_lines_count]; - var source_line_numbers = exception.stack.source_lines_numbers[0..JSC.ZigException.Holder.source_lines_count]; + var source_lines = exception.stack.source_lines_ptr[0..ZigException.Holder.source_lines_count]; + var source_line_numbers = exception.stack.source_lines_numbers[0..ZigException.Holder.source_lines_count]; @memset(source_lines, String.empty); @memset(source_line_numbers, 0); @@ -4007,7 +3996,7 @@ pub const VirtualMachine = struct { } // This is usually unsafe to do, but we are protecting them each time first - var errors_to_append = std.ArrayList(JSC.JSValue).init(this.allocator); + var errors_to_append = std.ArrayList(JSValue).init(this.allocator); defer { for (errors_to_append.items) |err| { err.unprotect(); @@ -4211,8 +4200,8 @@ pub const VirtualMachine = struct { // In Github Actions, emit an annotation that renders the error and location. // https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-error-message - pub noinline fn printGithubAnnotation(exception: *JSC.ZigException) void { - @setCold(true); + pub noinline fn printGithubAnnotation(exception: *ZigException) void { + @branchHint(.cold); const name = exception.name; const message = exception.message; const frames = exception.stack.frames(); @@ -4437,7 +4426,7 @@ pub const VirtualMachine = struct { this.destroy(); } - extern fn Bun__setChannelRef(*JSC.JSGlobalObject, bool) void; + extern fn Bun__setChannelRef(*JSGlobalObject, bool) void; export fn Bun__closeChildIPC(global: *JSGlobalObject) void { if (global.bunVM().ipc) |*current_ipc| { @@ -4533,8 +4522,8 @@ pub const VirtualMachine = struct { } }; -pub const HotReloader = NewHotReloader(VirtualMachine, JSC.EventLoop, false); -pub const WatchReloader = NewHotReloader(VirtualMachine, JSC.EventLoop, true); +pub const HotReloader = NewHotReloader(VirtualMachine, EventLoop, false); +pub const WatchReloader = NewHotReloader(VirtualMachine, EventLoop, true); extern fn BunDebugger__willHotReload() void; pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime reload_immediately: bool) type { @@ -4751,7 +4740,7 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime } else { return this.ctx.bun_watcher.hot; } - } else if (@typeInfo(@TypeOf(this.ctx.bun_watcher)) == .Optional) { + } else if (@typeInfo(@TypeOf(this.ctx.bun_watcher)) == .optional) { return this.ctx.bun_watcher.?; } else { return this.ctx.bun_watcher; @@ -4964,11 +4953,10 @@ pub var synthetic_allocation_limit: usize = std.math.maxInt(u32); pub var string_allocation_limit: usize = std.math.maxInt(u32); comptime { - @export(synthetic_allocation_limit, .{ .name = "Bun__syntheticAllocationLimit" }); - @export(string_allocation_limit, .{ .name = "Bun__stringSyntheticAllocationLimit" }); + @export(&string_allocation_limit, .{ .name = "Bun__stringSyntheticAllocationLimit" }); } -pub fn Bun__setSyntheticAllocationLimitForTesting(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { +pub fn Bun__setSyntheticAllocationLimitForTesting(globalObject: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const args = callframe.arguments_old(1).slice(); if (args.len < 1) { return globalObject.throwNotEnoughArguments("setSyntheticAllocationLimitForTesting", 1, args.len); diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index f3f2f324cd4630..191b346f28bb12 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -39,12 +39,6 @@ const ImportRecord = ast.ImportRecord; const DotEnv = @import("../env_loader.zig"); const PackageJSON = @import("../resolver/package_json.zig").PackageJSON; const MacroRemap = @import("../resolver/package_json.zig").MacroMap; -const WebCore = bun.JSC.WebCore; -const Request = WebCore.Request; -const Response = WebCore.Response; -const Headers = WebCore.Headers; -const Fetch = WebCore.Fetch; -const FetchEvent = WebCore.FetchEvent; const js = bun.JSC.C; const JSC = bun.JSC; const JSError = @import("./base.zig").JSError; @@ -58,13 +52,10 @@ const JSGlobalObject = bun.JSC.JSGlobalObject; const ExceptionValueRef = bun.JSC.ExceptionValueRef; const JSPrivateDataPtr = bun.JSC.JSPrivateDataPtr; const ConsoleObject = bun.JSC.ConsoleObject; -const Node = bun.JSC.Node; const ZigException = bun.JSC.ZigException; const ZigStackTrace = bun.JSC.ZigStackTrace; -const ErrorableResolvedSource = bun.JSC.ErrorableResolvedSource; const ResolvedSource = bun.JSC.ResolvedSource; const JSPromise = bun.JSC.JSPromise; -const JSInternalPromise = bun.JSC.JSInternalPromise; const JSModuleLoader = bun.JSC.JSModuleLoader; const JSPromiseRejectionOperation = bun.JSC.JSPromiseRejectionOperation; const ErrorableZigString = bun.JSC.ErrorableZigString; @@ -79,7 +70,7 @@ const PendingResolution = @import("../resolver/resolver.zig").PendingResolution; const ThreadSafeFunction = JSC.napi.ThreadSafeFunction; const PackageManager = @import("../install/install.zig").PackageManager; const Install = @import("../install/install.zig"); -const VirtualMachine = JSC.VirtualMachine; +const VirtualMachine = bun.JSC.VirtualMachine; const Dependency = @import("../install/dependency.zig"); const Async = bun.Async; const String = bun.String; @@ -231,7 +222,7 @@ pub const RuntimeTranspilerStore = struct { } else { return; } - var vm: *JSC.VirtualMachine = @fieldParentPtr("transpiler_store", this); + var vm: *VirtualMachine = @fieldParentPtr("transpiler_store", this); const event_loop = vm.eventLoop(); const global = vm.global; const jsc_vm = vm.jsc; @@ -246,8 +237,8 @@ pub const RuntimeTranspilerStore = struct { pub fn transpile( this: *RuntimeTranspilerStore, - vm: *JSC.VirtualMachine, - globalObject: *JSC.JSGlobalObject, + vm: *VirtualMachine, + globalObject: *JSGlobalObject, input_specifier: bun.String, path: Fs.Path, referrer: bun.String, @@ -263,7 +254,7 @@ pub const RuntimeTranspilerStore = struct { .vm = vm, .log = logger.Log.init(bun.default_allocator), .loader = vm.transpiler.options.loader(owned_path.name.ext), - .promise = JSC.Strong.create(JSC.JSValue.fromCell(promise), globalObject), + .promise = JSC.Strong.create(JSValue.fromCell(promise), globalObject), .poll_ref = .{}, .fetcher = TranspilerJob.Fetcher{ .file = {}, @@ -281,8 +272,8 @@ pub const RuntimeTranspilerStore = struct { non_threadsafe_referrer: String, loader: options.Loader, promise: JSC.Strong = .{}, - vm: *JSC.VirtualMachine, - globalThis: *JSC.JSGlobalObject, + vm: *VirtualMachine, + globalThis: *JSGlobalObject, fetcher: Fetcher, poll_ref: Async.KeepAlive = .{}, generation_number: u32 = 0, @@ -713,7 +704,7 @@ pub const ModuleLoader = struct { } } - pub fn resolveEmbeddedFile(vm: *JSC.VirtualMachine, input_path: []const u8, extname: []const u8) ?[]const u8 { + pub fn resolveEmbeddedFile(vm: *VirtualMachine, input_path: []const u8, extname: []const u8) ?[]const u8 { if (input_path.len == 0) return null; var graph = vm.standalone_module_graph orelse return null; const file = graph.find(input_path) orelse return null; @@ -739,7 +730,7 @@ pub const ModuleLoader = struct { .{ .data = .{ - .encoded_slice = JSC.ZigString.Slice.fromUTF8NeverFree(file.contents), + .encoded_slice = ZigString.Slice.fromUTF8NeverFree(file.contents), }, .dirfd = bun.toFD(tmpdir.fd), .file = .{ @@ -769,7 +760,7 @@ pub const ModuleLoader = struct { package_json: ?*PackageJSON = null, loader: Api.Loader, hash: u32 = std.math.maxInt(u32), - globalThis: *JSC.JSGlobalObject = undefined, + globalThis: *JSGlobalObject = undefined, arena: *bun.ArenaAllocator, // This is the specific state for making it async @@ -805,7 +796,7 @@ pub const ModuleLoader = struct { pub const Map = std.ArrayListUnmanaged(AsyncModule); - pub fn enqueue(this: *Queue, globalObject: *JSC.JSGlobalObject, opts: anytype) void { + pub fn enqueue(this: *Queue, globalObject: *JSGlobalObject, opts: anytype) void { debug("enqueue: {s}", .{opts.specifier}); var module = AsyncModule.init(opts, globalObject) catch unreachable; module.poll_ref.ref(this.vm()); @@ -1063,7 +1054,7 @@ pub const ModuleLoader = struct { } }; - pub fn init(opts: anytype, globalObject: *JSC.JSGlobalObject) !AsyncModule { + pub fn init(opts: anytype, globalObject: *JSGlobalObject) !AsyncModule { var promise = JSC.Strong{}; // var stmt_blocks = js_ast.Stmt.Data.toOwnedSlice(); // var expr_blocks = js_ast.Expr.Data.toOwnedSlice(); @@ -1098,7 +1089,7 @@ pub const ModuleLoader = struct { }; } - pub fn done(this: *AsyncModule, jsc_vm: *JSC.VirtualMachine) void { + pub fn done(this: *AsyncModule, jsc_vm: *VirtualMachine) void { var clone = jsc_vm.allocator.create(AsyncModule) catch unreachable; clone.* = this.*; jsc_vm.modules.scheduled += 1; @@ -1115,11 +1106,11 @@ pub const ModuleLoader = struct { } var log = logger.Log.init(jsc_vm.allocator); defer log.deinit(); - var errorable: ErrorableResolvedSource = undefined; + var errorable: JSC.ErrorableResolvedSource = undefined; this.poll_ref.unref(jsc_vm); outer: { - errorable = ErrorableResolvedSource.ok(this.resumeLoadingModule(&log) catch |err| { - JSC.VirtualMachine.processFetchLog( + errorable = JSC.ErrorableResolvedSource.ok(this.resumeLoadingModule(&log) catch |err| { + VirtualMachine.processFetchLog( this.globalThis, bun.String.init(this.specifier), bun.String.init(this.referrer), @@ -1145,8 +1136,8 @@ pub const ModuleLoader = struct { } pub fn fulfill( - globalThis: *JSC.JSGlobalObject, - promise: JSC.JSValue, + globalThis: *JSGlobalObject, + promise: JSValue, resolved_source: ResolvedSource, err: ?anyerror, specifier_: bun.String, @@ -1161,9 +1152,9 @@ pub const ModuleLoader = struct { referrer.deref(); } - var errorable: ErrorableResolvedSource = undefined; + var errorable: JSC.ErrorableResolvedSource = undefined; if (err) |e| { - JSC.VirtualMachine.processFetchLog( + VirtualMachine.processFetchLog( globalThis, specifier, referrer, @@ -1172,7 +1163,7 @@ pub const ModuleLoader = struct { e, ); } else { - errorable = ErrorableResolvedSource.ok(resolved_source); + errorable = JSC.ErrorableResolvedSource.ok(resolved_source); } log.deinit(); @@ -1187,7 +1178,7 @@ pub const ModuleLoader = struct { ); } - pub fn resolveError(this: *AsyncModule, vm: *JSC.VirtualMachine, import_record_id: u32, result: PackageResolveError) !void { + pub fn resolveError(this: *AsyncModule, vm: *VirtualMachine, import_record_id: u32, result: PackageResolveError) !void { const globalThis = this.globalThis; const msg: []u8 = try switch (result.err) { @@ -1279,7 +1270,7 @@ pub const ModuleLoader = struct { this.deinit(); promise.rejectAsHandled(globalThis, error_instance); } - pub fn downloadError(this: *AsyncModule, vm: *JSC.VirtualMachine, import_record_id: u32, result: PackageDownloadError) !void { + pub fn downloadError(this: *AsyncModule, vm: *VirtualMachine, import_record_id: u32, result: PackageDownloadError) !void { const globalThis = this.globalThis; const msg_args = .{ @@ -1378,7 +1369,7 @@ pub const ModuleLoader = struct { debug("resumeLoadingModule: {s}", .{this.specifier}); var parse_result = this.parse_result; const path = this.path; - var jsc_vm = JSC.VirtualMachine.get(); + var jsc_vm = VirtualMachine.get(); const specifier = this.specifier; const old_log = jsc_vm.log; @@ -1469,15 +1460,15 @@ pub const ModuleLoader = struct { } extern "C" fn Bun__onFulfillAsyncModule( - globalObject: *JSC.JSGlobalObject, - promiseValue: JSC.JSValue, + globalObject: *JSGlobalObject, + promiseValue: JSValue, res: *JSC.ErrorableResolvedSource, specifier: *bun.String, referrer: *bun.String, ) void; }; - pub export fn Bun__getDefaultLoader(global: *JSC.JSGlobalObject, str: *const bun.String) Api.Loader { + pub export fn Bun__getDefaultLoader(global: *JSGlobalObject, str: *const bun.String) Api.Loader { var jsc_vm = global.bunVM(); const filename = str.toUTF8(jsc_vm.allocator); defer filename.deinit(); @@ -1500,7 +1491,7 @@ pub const ModuleLoader = struct { virtual_source: ?*const logger.Source, promise_ptr: ?*?*JSC.JSInternalPromise, source_code_printer: *js_printer.BufferPrinter, - globalObject: ?*JSC.JSGlobalObject, + globalObject: ?*JSGlobalObject, comptime flags: FetchFlags, ) !ResolvedSource { const disable_transpilying = comptime flags.disableTranspiling(); @@ -1764,7 +1755,7 @@ pub const ModuleLoader = struct { .specifier = input_specifier, .source_url = input_specifier.createIfDifferent(path.text), .hash = 0, - .jsvalue_for_export = JSC.JSValue.createEmptyObject(jsc_vm.global, 0), + .jsvalue_for_export = JSValue.createEmptyObject(jsc_vm.global, 0), .tag = .exports_object, }; } @@ -2007,10 +1998,10 @@ pub const ModuleLoader = struct { const encoded = JSC.EncodedJSValue{ .asPtr = globalThis, }; - const globalValue = @as(JSC.JSValue, @enumFromInt(encoded.asInt64)); + const globalValue = @as(JSValue, @enumFromInt(encoded.asInt64)); globalValue.put( globalThis, - JSC.ZigString.static("wasmSourceBytes"), + ZigString.static("wasmSourceBytes"), JSC.ArrayBuffer.create(globalThis, source.contents, .Uint8Array), ); } @@ -2225,10 +2216,10 @@ pub const ModuleLoader = struct { pub export fn Bun__fetchBuiltinModule( jsc_vm: *VirtualMachine, - globalObject: *JSC.JSGlobalObject, + globalObject: *JSGlobalObject, specifier: *bun.String, referrer: *bun.String, - ret: *ErrorableResolvedSource, + ret: *JSC.ErrorableResolvedSource, ) bool { JSC.markBinding(@src()); var log = logger.Log.init(jsc_vm.transpiler.allocator); @@ -2245,7 +2236,7 @@ pub const ModuleLoader = struct { VirtualMachine.processFetchLog(globalObject, specifier.*, referrer.*, &log, ret, err); return true; }) |builtin| { - ret.* = ErrorableResolvedSource.ok(builtin); + ret.* = JSC.ErrorableResolvedSource.ok(builtin); return true; } else { return false; @@ -2254,11 +2245,11 @@ pub const ModuleLoader = struct { pub export fn Bun__transpileFile( jsc_vm: *VirtualMachine, - globalObject: *JSC.JSGlobalObject, + globalObject: *JSGlobalObject, specifier_ptr: *bun.String, referrer: *bun.String, type_attribute: ?*const bun.String, - ret: *ErrorableResolvedSource, + ret: *JSC.ErrorableResolvedSource, allow_promise: bool, ) ?*anyopaque { JSC.markBinding(@src()); @@ -2326,7 +2317,7 @@ pub const ModuleLoader = struct { virtual_source = &virtual_source_to_use.?; } } else { - ret.* = ErrorableResolvedSource.err(error.JSErrorObject, globalObject.MODULE_NOT_FOUND("Blob not found", .{}).toJS().asVoid()); + ret.* = JSC.ErrorableResolvedSource.err(error.JSErrorObject, globalObject.MODULE_NOT_FOUND("Blob not found", .{}).toJS().asVoid()); return null; } } @@ -2409,7 +2400,7 @@ pub const ModuleLoader = struct { defer jsc_vm.module_loader.resetArena(jsc_vm); var promise: ?*JSC.JSInternalPromise = null; - ret.* = ErrorableResolvedSource.ok( + ret.* = JSC.ErrorableResolvedSource.ok( ModuleLoader.transpileSourceCode( jsc_vm, specifier, @@ -2440,7 +2431,7 @@ pub const ModuleLoader = struct { return promise; } - export fn Bun__runVirtualModule(globalObject: *JSC.JSGlobalObject, specifier_ptr: *const bun.String) JSValue { + export fn Bun__runVirtualModule(globalObject: *JSGlobalObject, specifier_ptr: *const bun.String) JSValue { JSC.markBinding(@src()); if (globalObject.bunVM().plugin_runner == null) return JSValue.zero; @@ -2627,12 +2618,12 @@ pub const ModuleLoader = struct { } export fn Bun__transpileVirtualModule( - globalObject: *JSC.JSGlobalObject, + globalObject: *JSGlobalObject, specifier_ptr: *const bun.String, referrer_ptr: *const bun.String, source_code: *ZigString, loader_: Api.Loader, - ret: *ErrorableResolvedSource, + ret: *JSC.ErrorableResolvedSource, ) bool { JSC.markBinding(@src()); const jsc_vm = globalObject.bunVM(); @@ -2664,7 +2655,7 @@ pub const ModuleLoader = struct { defer log.deinit(); defer jsc_vm.module_loader.resetArena(jsc_vm); - ret.* = ErrorableResolvedSource.ok( + ret.* = JSC.ErrorableResolvedSource.ok( ModuleLoader.transpileSourceCode( jsc_vm, specifier_slice.slice(), @@ -3104,7 +3095,7 @@ pub const HardcodedModule = enum { }; /// Support embedded .node files -export fn Bun__resolveEmbeddedNodeFile(vm: *JSC.VirtualMachine, in_out_str: *bun.String) bool { +export fn Bun__resolveEmbeddedNodeFile(vm: *VirtualMachine, in_out_str: *bun.String) bool { if (vm.standalone_module_graph == null) return false; const input_path = in_out_str.toUTF8(bun.default_allocator); diff --git a/src/bun.js/node/assert/myers_diff.zig b/src/bun.js/node/assert/myers_diff.zig index d6eb316c217182..4b51c63dd65c32 100644 --- a/src/bun.js/node/assert/myers_diff.zig +++ b/src/bun.js/node/assert/myers_diff.zig @@ -255,7 +255,7 @@ pub fn DifferWithEql(comptime Line: type, comptime opts: Options, comptime areLi try result.ensureUnusedCapacity(u(@max(x - prev_x, y - prev_y))); while (x > prev_x and y > prev_y) { const line: Line = blk: { - if (@typeInfo(Line) == .Pointer and comptime opts.check_comma_disparity) { + if (@typeInfo(Line) == .pointer and comptime opts.check_comma_disparity) { const actual_el = actual[u(x) - 1]; // actual[x-1].endsWith(',') break :blk if (actual_el[actual_el.len - 1] == ',') @@ -347,7 +347,7 @@ fn areStrLinesEqual(comptime T: type, a: T, b: T, comptime check_comma_disparity // []const u8 -> u8 const info = @typeInfo(T); - const ChildType = info.Pointer.child; + const ChildType = info.pointer.child; if (comptime !check_comma_disparity) { return mem.eql(ChildType, a, b); @@ -498,7 +498,7 @@ test StrDiffer { \\ 4, \\ 5, \\ 6, - \\ 7 + \\ 7 \\] , // expected @@ -509,7 +509,7 @@ test StrDiffer { \\ 4, \\ 5, \\ 9, - \\ 7 + \\ 7 \\] }, // // remove line diff --git a/src/bun.js/node/buffer.zig b/src/bun.js/node/buffer.zig index 970307825969d1..0bccfa0b9d02b8 100644 --- a/src/bun.js/node/buffer.zig +++ b/src/bun.js/node/buffer.zig @@ -82,5 +82,5 @@ pub const BufferVectorized = struct { }; comptime { - @export(BufferVectorized.fill, .{ .name = "Bun__Buffer_fill" }); + @export(&BufferVectorized.fill, .{ .name = "Bun__Buffer_fill" }); } diff --git a/src/bun.js/node/fs_events.zig b/src/bun.js/node/fs_events.zig index 9953527a6ebfb6..483da52d0ad732 100644 --- a/src/bun.js/node/fs_events.zig +++ b/src/bun.js/node/fs_events.zig @@ -79,13 +79,6 @@ pub const kFSEventStreamEventFlagRootChanged: c_int = 32; pub const kFSEventStreamEventFlagUnmount: c_int = 128; pub const kFSEventStreamEventFlagUserDropped: c_int = 2; -// Lazy function call binding. -const RTLD_LAZY = 0x1; -// Symbols exported from this image (dynamic library or bundle) -// are generally hidden and only availble to dlsym() when -// directly using the handle returned by this call to dlopen(). -const RTLD_LOCAL = 0x4; - pub const kFSEventsModified: c_int = kFSEventStreamEventFlagItemChangeOwner | kFSEventStreamEventFlagItemFinderInfoMod | @@ -191,7 +184,7 @@ var fsevents_cf: ?CoreFoundation = null; var fsevents_cs: ?CoreServices = null; fn InitLibrary() void { - const fsevents_cf_handle = bun.C.dlopen("/System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation", RTLD_LAZY | RTLD_LOCAL); + const fsevents_cf_handle = bun.C.dlopen("/System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation", .{ .LAZY = true, .LOCAL = true }); if (fsevents_cf_handle == null) @panic("Cannot Load CoreFoundation"); fsevents_cf = CoreFoundation{ @@ -210,7 +203,7 @@ fn InitLibrary() void { .RunLoopDefaultMode = dlsym(fsevents_cf_handle, *CFStringRef, "kCFRunLoopDefaultMode") orelse @panic("Cannot Load CoreFoundation"), }; - const fsevents_cs_handle = bun.C.dlopen("/System/Library/Frameworks/CoreServices.framework/Versions/A/CoreServices", RTLD_LAZY | RTLD_LOCAL); + const fsevents_cs_handle = bun.C.dlopen("/System/Library/Frameworks/CoreServices.framework/Versions/A/CoreServices", .{ .LAZY = true, .LOCAL = true }); if (fsevents_cs_handle == null) @panic("Cannot Load CoreServices"); fsevents_cs = CoreServices{ diff --git a/src/bun.js/node/node_fs.zig b/src/bun.js/node/node_fs.zig index 31effabf8a0326..d2dd5eb6232bd6 100644 --- a/src/bun.js/node/node_fs.zig +++ b/src/bun.js/node/node_fs.zig @@ -43,10 +43,6 @@ const StringOrBuffer = JSC.Node.StringOrBuffer; const NodeFSFunctionEnum = std.meta.DeclEnum(JSC.Node.NodeFS); const UvFsCallback = fn (*uv.fs_t) callconv(.C) void; -const Stats = JSC.Node.Stats; -const Dirent = JSC.Node.Dirent; -const StatFS = JSC.Node.StatFS; - pub const default_permission = if (Environment.isPosix) Syscall.S.IRUSR | Syscall.S.IWUSR | @@ -1015,7 +1011,7 @@ pub const AsyncReaddirRecursiveTask = struct { pub const ResultListEntry = struct { pub const Value = union(Return.Readdir.Tag) { - with_file_types: std.ArrayList(Dirent), + with_file_types: std.ArrayList(bun.JSC.Node.Dirent), buffers: std.ArrayList(Buffer), files: std.ArrayList(bun.String), @@ -1096,7 +1092,7 @@ pub const AsyncReaddirRecursiveTask = struct { .root_path = PathString.init(bun.default_allocator.dupeZ(u8, args.path.slice()) catch bun.outOfMemory()), .result_list = switch (args.tag()) { .files => .{ .files = std.ArrayList(bun.String).init(bun.default_allocator) }, - .with_file_types => .{ .with_file_types = std.ArrayList(Dirent).init(bun.default_allocator) }, + .with_file_types => .{ .with_file_types = .init(bun.default_allocator) }, .buffers => .{ .buffers = std.ArrayList(Buffer).init(bun.default_allocator) }, }, }); @@ -1114,7 +1110,7 @@ pub const AsyncReaddirRecursiveTask = struct { inline else => |tag| { const ResultType = comptime switch (tag) { .files => bun.String, - .with_file_types => Dirent, + .with_file_types => bun.JSC.Node.Dirent, .buffers => Buffer, }; var stack = std.heap.stackFallback(8192, bun.default_allocator); @@ -1137,7 +1133,7 @@ pub const AsyncReaddirRecursiveTask = struct { for (entries.items) |*item| { switch (ResultType) { bun.String => item.deref(), - Dirent => item.deref(), + bun.JSC.Node.Dirent => item.deref(), Buffer => bun.default_allocator.free(item.buffer.byteSlice()), else => @compileError("unreachable"), } @@ -1174,7 +1170,7 @@ pub const AsyncReaddirRecursiveTask = struct { if (result.items.len > 0) { const Field = switch (ResultType) { bun.String => .files, - Dirent => .with_file_types, + bun.JSC.Node.Dirent => .with_file_types, Buffer => .buffers, else => @compileError("unreachable"), }; @@ -1604,7 +1600,7 @@ pub const Arguments = struct { }; fn wrapTo(T: type, in: i64) T { - comptime bun.assert(@typeInfo(T).Int.signedness == .unsigned); + comptime bun.assert(@typeInfo(T).int.signedness == .unsigned); return @intCast(@mod(in, std.math.maxInt(T))); } @@ -3066,16 +3062,16 @@ pub const Arguments = struct { }; errdefer dest.deinit(); - var mode: Mode = 0; + var mode: Constants.Copyfile = @enumFromInt(0); if (arguments.next()) |arg| { arguments.eat(); - mode = @intFromEnum(try FileSystemFlags.fromJSNumberOnly(ctx, arg, .copy_file)); + mode = @enumFromInt(@intFromEnum(try FileSystemFlags.fromJSNumberOnly(ctx, arg, .copy_file))); } return CopyFile{ .src = src, .dest = dest, - .mode = @enumFromInt(mode), + .mode = mode, }; } }; @@ -3187,7 +3183,7 @@ pub const Arguments = struct { }; pub const StatOrNotFound = union(enum) { - stats: Stats, + stats: bun.JSC.Node.Stats, not_found: void, pub fn toJS(this: *StatOrNotFound, globalObject: *JSC.JSGlobalObject) JSC.JSValue { @@ -3235,7 +3231,7 @@ const Return = struct { pub const Chmod = void; pub const Fchown = void; pub const Fdatasync = void; - pub const Fstat = Stats; + pub const Fstat = bun.JSC.Node.Stats; pub const Rm = void; pub const Fsync = void; pub const Ftruncate = void; @@ -3249,7 +3245,7 @@ const Return = struct { pub const Open = FDImpl; pub const WriteFile = void; pub const Readv = Read; - pub const StatFS = JSC.Node.StatFS; + pub const StatFS = bun.JSC.Node.StatFS; pub const Read = struct { bytes_read: u52, @@ -3315,7 +3311,7 @@ const Return = struct { } }; pub const Readdir = union(Tag) { - with_file_types: []Dirent, + with_file_types: []bun.JSC.Node.Dirent, buffers: []Buffer, files: []const bun.String, @@ -3357,7 +3353,7 @@ const Return = struct { pub const ReadFileWithOptions = union(enum) { string: string, transcoded_string: bun.String, - buffer: JSC.Node.Buffer, + buffer: Buffer, null_terminated: [:0]const u8, }; pub const Readlink = StringOrBuffer; @@ -3639,7 +3635,7 @@ pub const NodeFS = struct { // we fallback to copyfile() when the file is > 128 KB and clonefile fails // clonefile() isn't supported on all devices // nor is it supported across devices - var mode: Mode = C.darwin.COPYFILE_ACL | C.darwin.COPYFILE_DATA; + var mode: u32 = C.darwin.COPYFILE_ACL | C.darwin.COPYFILE_DATA; if (args.mode.shouldntOverwrite()) { mode |= C.darwin.COPYFILE_EXCL; } @@ -3670,7 +3666,7 @@ pub const NodeFS = struct { return Maybe(Return.CopyFile){ .err = .{ .errno = @intFromEnum(C.SystemErrno.ENOTSUP), .syscall = .copyfile } }; } - var flags: Mode = bun.O.CREAT | bun.O.WRONLY; + var flags: i32 = bun.O.CREAT | bun.O.WRONLY; var wrote: usize = 0; if (args.mode.shouldntOverwrite()) { flags |= bun.O.EXCL; @@ -3846,7 +3842,7 @@ pub const NodeFS = struct { pub fn fstat(_: *NodeFS, args: Arguments.Fstat, _: Flavor) Maybe(Return.Fstat) { return switch (Syscall.fstat(args.fd)) { - .result => |result| .{ .result = Stats.init(result, args.big_int) }, + .result => |result| .{ .result = .init(result, args.big_int) }, .err => |err| .{ .err = err }, }; } @@ -3917,13 +3913,13 @@ pub const NodeFS = struct { }; } - return Maybe(Return.Link).errnoSysPD(system.link(from, to, 0), .link, args.old_path.slice(), args.new_path.slice()) orelse + return Maybe(Return.Link).errnoSysPD(system.link(from, to), .link, args.old_path.slice(), args.new_path.slice()) orelse Maybe(Return.Link).success; } pub fn lstat(this: *NodeFS, args: Arguments.Lstat, _: Flavor) Maybe(Return.Lstat) { return switch (Syscall.lstat(args.path.sliceZ(&this.sync_error_buf))) { - .result => |result| Maybe(Return.Lstat){ .result = .{ .stats = Stats.init(result, args.big_int) } }, + .result => |result| Maybe(Return.Lstat){ .result = .{ .stats = .init(result, args.big_int) } }, .err => |err| brk: { if (!args.throw_if_no_entry and err.getErrno() == .NOENT) { return Maybe(Return.Lstat){ .result = .{ .not_found = {} } }; @@ -3994,7 +3990,7 @@ pub const NodeFS = struct { }; const Char = bun.OSPathChar; - const len = @as(u16, @truncate(path.len)); + const len: u16 = @truncate(path.len); // First, attempt to create the desired directory // If that fails, then walk back up the path until we have a match @@ -4443,7 +4439,7 @@ pub const NodeFS = struct { const maybe = switch (args.recursive) { inline else => |recursive| switch (args.tag()) { .buffers => readdirInner(&this.sync_error_buf, args, Buffer, recursive, flavor), - .with_file_types => readdirInner(&this.sync_error_buf, args, Dirent, recursive, flavor), + .with_file_types => readdirInner(&this.sync_error_buf, args, bun.JSC.Node.Dirent, recursive, flavor), .files => readdirInner(&this.sync_error_buf, args, bun.String, recursive, flavor), }, }; @@ -4465,7 +4461,7 @@ pub const NodeFS = struct { entries: *std.ArrayList(ExpectedType), ) Maybe(void) { const dir = fd.asDir(); - const is_u16 = comptime Environment.isWindows and (ExpectedType == bun.String or ExpectedType == Dirent); + const is_u16 = comptime Environment.isWindows and (ExpectedType == bun.String or ExpectedType == bun.JSC.Node.Dirent); var dirent_path: bun.String = bun.String.dead; defer { @@ -4486,7 +4482,7 @@ pub const NodeFS = struct { .err => |err| { for (entries.items) |*item| { switch (ExpectedType) { - Dirent => { + bun.JSC.Node.Dirent => { item.deref(); }, Buffer => { @@ -4507,7 +4503,7 @@ pub const NodeFS = struct { }, .result => |ent| ent, }) |current| : (entry = iterator.next()) { - if (ExpectedType == Dirent) { + if (ExpectedType == JSC.Node.Dirent) { if (dirent_path.isEmpty()) { dirent_path = JSC.WebCore.Encoder.toBunString(strings.withoutNTPrefix(std.meta.Child(@TypeOf(basename)), basename), args.encoding); } @@ -4515,7 +4511,7 @@ pub const NodeFS = struct { if (comptime !is_u16) { const utf8_name = current.name.slice(); switch (ExpectedType) { - Dirent => { + JSC.Node.Dirent => { dirent_path.ref(); entries.append(.{ .name = JSC.WebCore.Encoder.toBunString(utf8_name, args.encoding), @@ -4534,7 +4530,7 @@ pub const NodeFS = struct { } else { const utf16_name = current.name.slice(); switch (ExpectedType) { - Dirent => { + JSC.Node.Dirent => { dirent_path.ref(); entries.append(.{ .name = bun.String.createUTF16(utf16_name), @@ -4669,7 +4665,7 @@ pub const NodeFS = struct { } switch (comptime ExpectedType) { - Dirent => { + bun.JSC.Node.Dirent => { const path_u8 = bun.path.dirname(bun.path.join(&[_]string{ root_basename, name_to_copy }, .auto), .auto); if (dirent_path_prev.isEmpty() or !bun.strings.eql(dirent_path_prev.byteSlice(), path_u8)) { dirent_path_prev.deref(); @@ -4809,7 +4805,7 @@ pub const NodeFS = struct { } switch (comptime ExpectedType) { - Dirent => { + bun.JSC.Node.Dirent => { const path_u8 = bun.path.dirname(bun.path.join(&[_]string{ root_basename, name_to_copy }, .auto), .auto); if (dirent_path_prev.isEmpty() or !bun.strings.eql(dirent_path_prev.byteSlice(), path_u8)) { dirent_path_prev.deref(); @@ -4867,7 +4863,7 @@ pub const NodeFS = struct { comptime flavor: Flavor, ) Maybe(Return.Readdir) { const file_type = switch (ExpectedType) { - Dirent => "with_file_types", + bun.JSC.Node.Dirent => "with_file_types", bun.String => "files", Buffer => "buffers", else => @compileError("unreachable"), @@ -4883,7 +4879,7 @@ pub const NodeFS = struct { .err => |err| { for (entries.items) |*result| { switch (ExpectedType) { - Dirent => { + bun.JSC.Node.Dirent => { result.name.deref(); }, Buffer => { @@ -5383,7 +5379,7 @@ pub const NodeFS = struct { // If this errors, we silently ignore it. // Not all files are seekable (and thus, not all files can be truncated). if (Environment.isWindows) { - _ = std.os.windows.kernel32.SetEndOfFile(fd.cast()); + _ = bun.windows.SetEndOfFile(fd.cast()); } else { _ = Syscall.ftruncate(fd, @intCast(@as(u63, @truncate(written)))); } @@ -5754,7 +5750,7 @@ pub const NodeFS = struct { const path = args.path.sliceZ(&this.sync_error_buf); return switch (Syscall.stat(path)) { .result => |result| .{ - .result = .{ .stats = Stats.init(result, args.big_int) }, + .result = .{ .stats = .init(result, args.big_int) }, }, .err => |err| brk: { if (!args.throw_if_no_entry and err.getErrno() == .NOENT) { @@ -5941,8 +5937,8 @@ pub const NodeFS = struct { Maybe(Return.Utimes).success; } - bun.assert(args.mtime.tv_nsec <= 1e9); - bun.assert(args.atime.tv_nsec <= 1e9); + bun.assert(args.mtime.nsec <= 1e9); + bun.assert(args.atime.nsec <= 1e9); return switch (Syscall.utimens( args.path.sliceZ(&this.sync_error_buf), @@ -5976,8 +5972,8 @@ pub const NodeFS = struct { Maybe(Return.Utimes).success; } - bun.assert(args.mtime.tv_nsec <= 1e9); - bun.assert(args.atime.tv_nsec <= 1e9); + bun.assert(args.mtime.nsec <= 1e9); + bun.assert(args.atime.nsec <= 1e9); return switch (Syscall.lutimes(args.path.sliceZ(&this.sync_error_buf), args.atime, args.mtime)) { .err => |err| .{ .err = err.withPath(args.path.slice()) }, @@ -6254,7 +6250,7 @@ pub const NodeFS = struct { if (!posix.S.ISREG(stat_.mode)) { if (posix.S.ISLNK(stat_.mode)) { - var mode_: Mode = C.darwin.COPYFILE_ACL | C.darwin.COPYFILE_DATA | C.darwin.COPYFILE_NOFOLLOW_SRC; + var mode_: u32 = C.darwin.COPYFILE_ACL | C.darwin.COPYFILE_DATA | C.darwin.COPYFILE_NOFOLLOW_SRC; if (mode.shouldntOverwrite()) { mode_ |= C.darwin.COPYFILE_EXCL; } @@ -6341,7 +6337,7 @@ pub const NodeFS = struct { // we fallback to copyfile() when the file is > 128 KB and clonefile fails // clonefile() isn't supported on all devices // nor is it supported across devices - var mode_: Mode = C.darwin.COPYFILE_ACL | C.darwin.COPYFILE_DATA | C.darwin.COPYFILE_NOFOLLOW_SRC; + var mode_: u32 = C.darwin.COPYFILE_ACL | C.darwin.COPYFILE_DATA | C.darwin.COPYFILE_NOFOLLOW_SRC; if (mode.shouldntOverwrite()) { mode_ |= C.darwin.COPYFILE_EXCL; } @@ -6388,7 +6384,7 @@ pub const NodeFS = struct { } }; } - var flags: Mode = bun.O.CREAT | bun.O.WRONLY; + var flags: i32 = bun.O.CREAT | bun.O.WRONLY; var wrote: usize = 0; if (mode.shouldntOverwrite()) { flags |= bun.O.EXCL; @@ -6850,7 +6846,7 @@ fn zigDeleteTreeMinStackSizeWithKindHint(self: std.fs.Dir, sub_path: []const u8, // Valid use of MAX_PATH_BYTES because dir_name_buf will only // ever store a single path component that was returned from the // filesystem. - var dir_name_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined; + var dir_name_buf: [std.fs.max_path_bytes]u8 = undefined; var dir_name: []const u8 = sub_path; // Here we must avoid recursion, in order to provide O(1) memory guarantee of this function. diff --git a/src/bun.js/node/node_fs_binding.zig b/src/bun.js/node/node_fs_binding.zig index e5787b7ed4c233..feb633c2e0eacf 100644 --- a/src/bun.js/node/node_fs_binding.zig +++ b/src/bun.js/node/node_fs_binding.zig @@ -18,7 +18,7 @@ const NodeFSFunctionEnum = std.meta.DeclEnum(JSC.Node.NodeFS); /// Async calls use a thread pool. fn Bindings(comptime function_name: NodeFSFunctionEnum) type { const function = @field(JSC.Node.NodeFS, @tagName(function_name)); - const fn_info = @typeInfo(@TypeOf(function)).Fn; + const fn_info = @typeInfo(@TypeOf(function)).@"fn"; if (fn_info.params.len != 3) { @compileError("Expected fn(NodeFS, Arguments) Return for NodeFS." ++ @tagName(function_name)); } diff --git a/src/bun.js/node/node_fs_stat_watcher.zig b/src/bun.js/node/node_fs_stat_watcher.zig index 7579b5f7a6221d..5fac2538b4d81c 100644 --- a/src/bun.js/node/node_fs_stat_watcher.zig +++ b/src/bun.js/node/node_fs_stat_watcher.zig @@ -307,8 +307,6 @@ pub const StatWatcher = struct { } pub fn hasPendingActivity(this: *StatWatcher) bool { - @fence(.acquire); - return this.used_by_scheduler_thread.load(.acquire); } diff --git a/src/bun.js/node/node_fs_watcher.zig b/src/bun.js/node/node_fs_watcher.zig index bd511c0e92bf8c..55c2af8c892853 100644 --- a/src/bun.js/node/node_fs_watcher.zig +++ b/src/bun.js/node/node_fs_watcher.zig @@ -572,7 +572,6 @@ pub const FSWatcher = struct { // this can be called from Watcher Thread or JS Context Thread pub fn refTask(this: *FSWatcher) bool { - @fence(.acquire); this.mutex.lock(); defer this.mutex.unlock(); if (this.closed) return false; @@ -582,7 +581,6 @@ pub const FSWatcher = struct { } pub fn hasPendingActivity(this: *FSWatcher) bool { - @fence(.acquire); return this.pending_activity_count.load(.acquire) > 0; } diff --git a/src/bun.js/node/node_os.zig b/src/bun.js/node/node_os.zig index fc60648fee1385..ee4eaf3aa9f637 100644 --- a/src/bun.js/node/node_os.zig +++ b/src/bun.js/node/node_os.zig @@ -88,7 +88,7 @@ fn cpusImplLinux(globalThis: *JSC.JSGlobalObject) !JSC.JSValue { // Read each CPU line while (line_iter.next()) |line| { // CPU lines are formatted as `cpu0 user nice sys idle iowait irq softirq` - var toks = std.mem.tokenize(u8, line, " \t"); + var toks = std.mem.tokenizeAny(u8, line, " \t"); const cpu_name = toks.next(); if (cpu_name == null or !std.mem.startsWith(u8, cpu_name.?, "cpu")) break; // done with CPUs diff --git a/src/bun.js/node/node_zlib_binding.zig b/src/bun.js/node/node_zlib_binding.zig index cd233554c775b3..ceb1a605f3ecb1 100644 --- a/src/bun.js/node/node_zlib_binding.zig +++ b/src/bun.js/node/node_zlib_binding.zig @@ -310,7 +310,7 @@ const CountedKeepAlive = struct { }; pub const SNativeZlib = struct { - pub usingnamespace bun.NewRefCounted(@This(), deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); pub usingnamespace JSC.Codegen.JSNativeZlib; pub usingnamespace CompressionStream(@This()); @@ -676,7 +676,7 @@ const ZlibContext = struct { pub const NativeBrotli = JSC.Codegen.JSNativeBrotli.getConstructor; pub const SNativeBrotli = struct { - pub usingnamespace bun.NewRefCounted(@This(), deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); pub usingnamespace JSC.Codegen.JSNativeBrotli; pub usingnamespace CompressionStream(@This()); diff --git a/src/bun.js/node/path.zig b/src/bun.js/node/path.zig index b2635e55018f4b..d12b1e74ce1bf2 100644 --- a/src/bun.js/node/path.zig +++ b/src/bun.js/node/path.zig @@ -2976,17 +2976,17 @@ pub fn toNamespacedPath(globalObject: *JSC.JSGlobalObject, isWindows: bool, args pub const Extern = [_][]const u8{"create"}; comptime { - @export(Path.basename, .{ .name = "Bun__Path__basename" }); - @export(Path.dirname, .{ .name = "Bun__Path__dirname" }); - @export(Path.extname, .{ .name = "Bun__Path__extname" }); - @export(path_format, .{ .name = "Bun__Path__format" }); - @export(Path.isAbsolute, .{ .name = "Bun__Path__isAbsolute" }); - @export(Path.join, .{ .name = "Bun__Path__join" }); - @export(Path.normalize, .{ .name = "Bun__Path__normalize" }); - @export(Path.parse, .{ .name = "Bun__Path__parse" }); - @export(Path.relative, .{ .name = "Bun__Path__relative" }); - @export(Path.resolve, .{ .name = "Bun__Path__resolve" }); - @export(Path.toNamespacedPath, .{ .name = "Bun__Path__toNamespacedPath" }); + @export(&Path.basename, .{ .name = "Bun__Path__basename" }); + @export(&Path.dirname, .{ .name = "Bun__Path__dirname" }); + @export(&Path.extname, .{ .name = "Bun__Path__extname" }); + @export(&path_format, .{ .name = "Bun__Path__format" }); + @export(&Path.isAbsolute, .{ .name = "Bun__Path__isAbsolute" }); + @export(&Path.join, .{ .name = "Bun__Path__join" }); + @export(&Path.normalize, .{ .name = "Bun__Path__normalize" }); + @export(&Path.parse, .{ .name = "Bun__Path__parse" }); + @export(&Path.relative, .{ .name = "Bun__Path__relative" }); + @export(&Path.resolve, .{ .name = "Bun__Path__resolve" }); + @export(&Path.toNamespacedPath, .{ .name = "Bun__Path__toNamespacedPath" }); } fn path_format(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [*]JSC.JSValue, args_len: u16) callconv(JSC.conv) JSC.JSValue { diff --git a/src/bun.js/node/path_watcher.zig b/src/bun.js/node/path_watcher.zig index 71cfd3c631e3d7..9e647386c3350b 100644 --- a/src/bun.js/node/path_watcher.zig +++ b/src/bun.js/node/path_watcher.zig @@ -51,7 +51,6 @@ pub const PathWatcherManager = struct { }; fn refPendingTask(this: *PathWatcherManager) bool { - @fence(.release); this.mutex.lock(); defer this.mutex.unlock(); if (this.deinit_on_last_task) return false; @@ -61,12 +60,10 @@ pub const PathWatcherManager = struct { } fn hasPendingTasks(this: *PathWatcherManager) callconv(.C) bool { - @fence(.acquire); return this.has_pending_tasks.load(.acquire); } fn unrefPendingTask(this: *PathWatcherManager) void { - @fence(.release); this.mutex.lock(); defer this.mutex.unlock(); this.pending_tasks -= 1; @@ -830,7 +827,6 @@ pub const PathWatcher = struct { } pub fn refPendingDirectory(this: *PathWatcher) bool { - @fence(.release); this.mutex.lock(); defer this.mutex.unlock(); if (this.isClosed()) return false; @@ -840,24 +836,20 @@ pub const PathWatcher = struct { } pub fn hasPendingDirectories(this: *PathWatcher) callconv(.C) bool { - @fence(.acquire); return this.has_pending_directories.load(.acquire); } pub fn isClosed(this: *PathWatcher) bool { - @fence(.acquire); return this.closed.load(.acquire); } pub fn setClosed(this: *PathWatcher) void { this.mutex.lock(); defer this.mutex.unlock(); - @fence(.release); this.closed.store(true, .release); } pub fn unrefPendingDirectory(this: *PathWatcher) void { - @fence(.release); this.mutex.lock(); defer this.mutex.unlock(); this.pending_directories -= 1; diff --git a/src/bun.js/node/types.zig b/src/bun.js/node/types.zig index 5939857dee191c..a065cf635f1842 100644 --- a/src/bun.js/node/types.zig +++ b/src/bun.js/node/types.zig @@ -199,9 +199,9 @@ pub fn Maybe(comptime ReturnTypeT: type, comptime ErrorTypeT: type) type { []u8 => JSC.ArrayBuffer.fromBytes(r, .ArrayBuffer).toJS(globalObject, null), else => switch (@typeInfo(ReturnType)) { - .Int, .Float, .ComptimeInt, .ComptimeFloat => JSC.JSValue.jsNumber(r), - .Struct, .Enum, .Opaque, .Union => r.toJS(globalObject), - .Pointer => { + .int, .float, .comptime_int, .comptime_float => JSC.JSValue.jsNumber(r), + .@"struct", .@"enum", .@"opaque", .@"union" => r.toJS(globalObject), + .pointer => { if (bun.trait.isZigString(ReturnType)) JSC.ZigString.init(bun.asByteSlice(r)).withEncoding().toJS(globalObject); @@ -1285,8 +1285,8 @@ fn timeLikeFromSeconds(seconds: f64) TimeLike { return seconds; } return .{ - .tv_sec = @intFromFloat(seconds), - .tv_nsec = @intFromFloat(@mod(seconds, 1) * std.time.ns_per_s), + .sec = @intFromFloat(seconds), + .nsec = @intFromFloat(@mod(seconds, 1) * std.time.ns_per_s), }; } @@ -1304,8 +1304,8 @@ fn timeLikeFromMilliseconds(milliseconds: f64) TimeLike { } return .{ - .tv_sec = @intFromFloat(sec), - .tv_nsec = @intFromFloat(nsec), + .sec = @intFromFloat(sec), + .nsec = @intFromFloat(nsec), }; } @@ -1335,15 +1335,15 @@ fn timeLikeFromNow() TimeLike { // ownership or permission checks are performed, and the file // timestamps are not modified, but other error conditions may still return .{ - .tv_sec = 0, - .tv_nsec = if (Environment.isLinux) std.os.linux.UTIME.NOW else bun.C.translated.UTIME_NOW, + .sec = 0, + .nsec = if (Environment.isLinux) std.os.linux.UTIME.NOW else bun.C.translated.UTIME_NOW, }; } pub fn modeFromJS(ctx: JSC.C.JSContextRef, value: JSC.JSValue) bun.JSError!?Mode { const mode_int = if (value.isNumber()) brk: { const m = try validators.validateUint32(ctx, value, "mode", .{}, false); - break :brk @as(Mode, @as(u24, @truncate(m))); + break :brk @as(Mode, @truncate(m)); } else brk: { if (value.isUndefinedOrNull()) return null; @@ -1438,8 +1438,8 @@ pub const PathOrFileDescriptor = union(Tag) { } }; -pub const FileSystemFlags = enum(if (Environment.isWindows) c_int else c_uint) { - pub const tag_type = @typeInfo(FileSystemFlags).Enum.tag_type; +pub const FileSystemFlags = enum(c_int) { + pub const tag_type = @typeInfo(FileSystemFlags).@"enum".tag_type; const O = bun.O; /// Open file for appending. The file is created if it does not exist. @@ -1473,7 +1473,7 @@ pub const FileSystemFlags = enum(if (Environment.isWindows) c_int else c_uint) { _, - const map = bun.ComptimeStringMap(Mode, .{ + const map = bun.ComptimeStringMap(i32, .{ .{ "r", O.RDONLY }, .{ "rs", O.RDONLY | O.SYNC }, .{ "sr", O.RDONLY | O.SYNC }, @@ -1549,7 +1549,7 @@ pub const FileSystemFlags = enum(if (Environment.isWindows) c_int else c_uint) { return ctx.throwInvalidArguments("Invalid flag '{any}'. Learn more at https://nodejs.org/api/fs.html#fs_file_system_flags", .{str}); } - const flags = brk: { + const flags: i32 = brk: { switch (str.is16Bit()) { inline else => |is_16bit| { const chars = if (is_16bit) str.utf16SliceAligned() else str.slice(); @@ -1560,20 +1560,20 @@ pub const FileSystemFlags = enum(if (Environment.isWindows) c_int else c_uint) { const slice = str.toSlice(bun.default_allocator); defer slice.deinit(); - break :brk std.fmt.parseInt(Mode, slice.slice(), 10) catch null; + break :brk @as(i32, @intCast(std.fmt.parseInt(Mode, slice.slice(), 10) catch break :brk null)); } else { - break :brk std.fmt.parseInt(Mode, chars, 10) catch null; + break :brk @as(i32, @intCast(std.fmt.parseInt(Mode, chars, 10) catch break :brk null)); } } }, } - break :brk map.getWithEql(str, JSC.ZigString.eqlComptime); + break :brk map.getWithEql(str, JSC.ZigString.eqlComptime) orelse break :brk null; } orelse { return ctx.throwInvalidArguments("Invalid flag '{any}'. Learn more at https://nodejs.org/api/fs.html#fs_file_system_flags", .{str}); }; - return @as(FileSystemFlags, @enumFromInt(@as(Mode, @intCast(flags)))); + return @enumFromInt(flags); } return null; @@ -1622,16 +1622,16 @@ pub fn StatType(comptime big: bool) type { const Float = if (big) i64 else f64; inline fn toNanoseconds(ts: StatTimespec) u64 { - if (ts.tv_sec < 0) { + if (ts.sec < 0) { return @intCast(@max(bun.timespec.nsSigned(&bun.timespec{ - .sec = @intCast(ts.tv_sec), - .nsec = @intCast(ts.tv_nsec), + .sec = @intCast(ts.sec), + .nsec = @intCast(ts.nsec), }), 0)); } return bun.timespec.ns(&bun.timespec{ - .sec = @intCast(ts.tv_sec), - .nsec = @intCast(ts.tv_nsec), + .sec = @intCast(ts.sec), + .nsec = @intCast(ts.nsec), }); } @@ -1641,8 +1641,8 @@ pub fn StatType(comptime big: bool) type { // > libuv calculates tv_sec and tv_nsec from it and converts to signed long, // > which causes Y2038 overflow. On the other platforms it is safe to treat // > negative values as pre-epoch time. - const tv_sec = if (Environment.isWindows) @as(u32, @bitCast(ts.tv_sec)) else ts.tv_sec; - const tv_nsec = if (Environment.isWindows) @as(u32, @bitCast(ts.tv_nsec)) else ts.tv_nsec; + const tv_sec = if (Environment.isWindows) @as(u32, @bitCast(ts.sec)) else ts.sec; + const tv_nsec = if (Environment.isWindows) @as(u32, @bitCast(ts.nsec)) else ts.nsec; if (big) { const sec: i64 = tv_sec; const nsec: i64 = tv_nsec; @@ -2132,7 +2132,7 @@ pub const Process = struct { comptime { if (Environment.export_cpp_apis and Environment.isWindows) { - @export(Bun__Process__editWindowsEnvVar, .{ .name = "Bun__Process__editWindowsEnvVar" }); + @export(&Bun__Process__editWindowsEnvVar, .{ .name = "Bun__Process__editWindowsEnvVar" }); } } @@ -2194,13 +2194,13 @@ pub fn StatFSType(comptime big: bool) type { pub usingnamespace bun.New(@This()); // Common fields between Linux and macOS - fstype: Int, - bsize: Int, - blocks: Int, - bfree: Int, - bavail: Int, - files: Int, - ffree: Int, + _fstype: Int, + _bsize: Int, + _blocks: Int, + _bfree: Int, + _bavail: Int, + _files: Int, + _ffree: Int, const This = @This(); @@ -2211,7 +2211,7 @@ pub fn StatFSType(comptime big: bool) type { pub fn callback(this: *This, globalObject: *JSC.JSGlobalObject) JSC.JSValue { const value = @field(this, @tagName(field)); const Type = @TypeOf(value); - if (comptime big and @typeInfo(Type) == .Int) { + if (comptime big and @typeInfo(Type) == .int) { return JSC.JSValue.fromInt64NoTruncate(globalObject, value); } @@ -2224,13 +2224,13 @@ pub fn StatFSType(comptime big: bool) type { }.callback; } - pub const fstype = getter(.fstype); - pub const bsize = getter(.bsize); - pub const blocks = getter(.blocks); - pub const bfree = getter(.bfree); - pub const bavail = getter(.bavail); - pub const files = getter(.files); - pub const ffree = getter(.ffree); + pub const fstype = getter(._fstype); + pub const bsize = getter(._bsize); + pub const blocks = getter(._blocks); + pub const bfree = getter(._bfree); + pub const bavail = getter(._bavail); + pub const files = getter(._files); + pub const ffree = getter(._ffree); pub fn finalize(this: *This) void { this.destroy(); @@ -2259,13 +2259,13 @@ pub fn StatFSType(comptime big: bool) type { else => @compileError("Unsupported OS"), }; return .{ - .fstype = @truncate(@as(i64, @intCast(fstype_))), - .bsize = @truncate(@as(i64, @intCast(bsize_))), - .blocks = @truncate(@as(i64, @intCast(blocks_))), - .bfree = @truncate(@as(i64, @intCast(bfree_))), - .bavail = @truncate(@as(i64, @intCast(bavail_))), - .files = @truncate(@as(i64, @intCast(files_))), - .ffree = @truncate(@as(i64, @intCast(ffree_))), + ._fstype = @truncate(@as(i64, @intCast(fstype_))), + ._bsize = @truncate(@as(i64, @intCast(bsize_))), + ._blocks = @truncate(@as(i64, @intCast(blocks_))), + ._bfree = @truncate(@as(i64, @intCast(bfree_))), + ._bavail = @truncate(@as(i64, @intCast(bavail_))), + ._files = @truncate(@as(i64, @intCast(files_))), + ._ffree = @truncate(@as(i64, @intCast(ffree_))), }; } @@ -2277,13 +2277,13 @@ pub fn StatFSType(comptime big: bool) type { var args = callFrame.arguments(); const this = This.new(.{ - .fstype = if (args.len > 0 and args[0].isNumber()) args[0].toInt32() else 0, - .bsize = if (args.len > 1 and args[1].isNumber()) args[1].toInt32() else 0, - .blocks = if (args.len > 2 and args[2].isNumber()) args[2].toInt32() else 0, - .bfree = if (args.len > 3 and args[3].isNumber()) args[3].toInt32() else 0, - .bavail = if (args.len > 4 and args[4].isNumber()) args[4].toInt32() else 0, - .files = if (args.len > 5 and args[5].isNumber()) args[5].toInt32() else 0, - .ffree = if (args.len > 6 and args[6].isNumber()) args[6].toInt32() else 0, + ._fstype = if (args.len > 0 and args[0].isNumber()) args[0].toInt32() else 0, + ._bsize = if (args.len > 1 and args[1].isNumber()) args[1].toInt32() else 0, + ._blocks = if (args.len > 2 and args[2].isNumber()) args[2].toInt32() else 0, + ._bfree = if (args.len > 3 and args[3].isNumber()) args[3].toInt32() else 0, + ._bavail = if (args.len > 4 and args[4].isNumber()) args[4].toInt32() else 0, + ._files = if (args.len > 5 and args[5].isNumber()) args[5].toInt32() else 0, + ._ffree = if (args.len > 6 and args[6].isNumber()) args[6].toInt32() else 0, }); return this; diff --git a/src/bun.js/node/util/parse_args.zig b/src/bun.js/node/util/parse_args.zig index 3c1e61e1601bbd..599873d157a145 100644 --- a/src/bun.js/node/util/parse_args.zig +++ b/src/bun.js/node/util/parse_args.zig @@ -65,7 +65,7 @@ const TokenKind = enum { option, @"option-terminator", - const COUNT = @typeInfo(TokenKind).Enum.fields.len; + const COUNT = @typeInfo(TokenKind).@"enum".fields.len; }; const Token = union(TokenKind) { positional: struct { index: u32, value: ValueRef }, @@ -652,7 +652,7 @@ pub fn parseArgs( comptime { const parseArgsFn = JSC.toJSHostFunction(parseArgs); - @export(parseArgsFn, .{ .name = "Bun__NodeUtil__jsParseArgs" }); + @export(&parseArgsFn, .{ .name = "Bun__NodeUtil__jsParseArgs" }); } pub fn parseArgsImpl(globalThis: *JSGlobalObject, config_obj: JSValue) bun.JSError!JSValue { diff --git a/src/bun.js/node/util/validators.zig b/src/bun.js/node/util/validators.zig index 699419a3cb7ea1..e6c003ef59da8c 100644 --- a/src/bun.js/node/util/validators.zig +++ b/src/bun.js/node/util/validators.zig @@ -19,7 +19,7 @@ pub fn throwErrInvalidArgValue( comptime fmt: [:0]const u8, args: anytype, ) bun.JSError { - @setCold(true); + @branchHint(.cold); return globalThis.ERR_INVALID_ARG_VALUE(fmt, args).throw(); } @@ -28,7 +28,7 @@ pub fn throwErrInvalidArgTypeWithMessage( comptime fmt: [:0]const u8, args: anytype, ) bun.JSError { - @setCold(true); + @branchHint(.cold); return globalThis.ERR_INVALID_ARG_TYPE(fmt, args).throw(); } @@ -39,7 +39,7 @@ pub fn throwErrInvalidArgType( comptime expected_type: []const u8, value: JSValue, ) bun.JSError { - @setCold(true); + @branchHint(.cold); const actual_type = getTypeName(globalThis, value); return throwErrInvalidArgTypeWithMessage(globalThis, "The \"" ++ name_fmt ++ "\" property must be of type {s}, got {s}", name_args ++ .{ expected_type, actual_type }); } @@ -49,7 +49,7 @@ pub fn throwRangeError( comptime fmt: [:0]const u8, args: anytype, ) bun.JSError { - @setCold(true); + @branchHint(.cold); return globalThis.ERR_OUT_OF_RANGE(fmt, args).throw(); } @@ -261,14 +261,14 @@ pub fn validateUndefined(globalThis: *JSGlobalObject, value: JSValue, comptime n pub fn validateStringEnum(comptime T: type, globalThis: *JSGlobalObject, value: JSValue, comptime name_fmt: string, name_args: anytype) bun.JSError!T { const str = try value.toBunString2(globalThis); defer str.deref(); - inline for (@typeInfo(T).Enum.fields) |enum_field| { + inline for (@typeInfo(T).@"enum".fields) |enum_field| { if (str.eqlComptime(enum_field.name)) return @field(T, enum_field.name); } const values_info = comptime blk: { var out: []const u8 = ""; - for (@typeInfo(T).Enum.fields, 0..) |enum_field, i| { + for (@typeInfo(T).@"enum".fields, 0..) |enum_field, i| { out = out ++ (if (i > 0) "|" else "") ++ enum_field.name; } break :blk out; diff --git a/src/bun.js/test/expect.zig b/src/bun.js/test/expect.zig index 9315790ff73fa1..f55568b2e3e27f 100644 --- a/src/bun.js/test/expect.zig +++ b/src/bun.js/test/expect.zig @@ -4696,7 +4696,7 @@ pub const Expect = struct { }; fn throwInvalidMatcherError(globalThis: *JSGlobalObject, matcher_name: bun.String, result: JSValue) bun.JSError { - @setCold(true); + @branchHint(.cold); var formatter = JSC.ConsoleObject.Formatter{ .globalThis = globalThis, @@ -4981,7 +4981,7 @@ pub const ExpectStatic = struct { } fn asyncChainingError(globalThis: *JSGlobalObject, flags: Expect.Flags, name: string) bun.JSError!JSValue { - @setCold(true); + @branchHint(.cold); const str = switch (flags.promise) { .resolves => "resolvesTo", .rejects => "rejectsTo", @@ -5619,9 +5619,9 @@ extern fn Expect__getPrototype(globalThis: *JSGlobalObject) JSValue; extern fn ExpectStatic__getPrototype(globalThis: *JSGlobalObject) JSValue; comptime { - @export(ExpectMatcherUtils.createSingleton, .{ .name = "ExpectMatcherUtils_createSigleton" }); - @export(Expect.readFlagsAndProcessPromise, .{ .name = "Expect_readFlagsAndProcessPromise" }); - @export(ExpectCustomAsymmetricMatcher.execute, .{ .name = "ExpectCustomAsymmetricMatcher__execute" }); + @export(&ExpectMatcherUtils.createSingleton, .{ .name = "ExpectMatcherUtils_createSigleton" }); + @export(&Expect.readFlagsAndProcessPromise, .{ .name = "Expect_readFlagsAndProcessPromise" }); + @export(&ExpectCustomAsymmetricMatcher.execute, .{ .name = "ExpectCustomAsymmetricMatcher__execute" }); } fn incrementExpectCallCounter() void { @@ -5674,7 +5674,7 @@ test "Expect.trimLeadingWhitespaceForInlineSnapshot" { try testTrimLeadingWhitespaceForSnapshot( \\ \\ Hello, world! - \\ + \\ , \\ \\Hello, world! @@ -5699,7 +5699,7 @@ test "Expect.trimLeadingWhitespaceForInlineSnapshot" { \\ key: value \\ \\ } - \\ + \\ , \\ \\Object{ @@ -5713,13 +5713,13 @@ test "Expect.trimLeadingWhitespaceForInlineSnapshot" { \\ Object{ \\ key: value \\ } - \\ + \\ , \\ \\ Object{ \\ key: value \\ } - \\ + \\ ); try testTrimLeadingWhitespaceForSnapshot( \\ diff --git a/src/bun.js/test/jest.zig b/src/bun.js/test/jest.zig index 4220c41e11b693..137c286a0e6327 100644 --- a/src/bun.js/test/jest.zig +++ b/src/bun.js/test/jest.zig @@ -559,8 +559,8 @@ pub const Jest = struct { } comptime { - @export(Bun__Jest__createTestModuleObject, .{ .name = "Bun__Jest__createTestModuleObject" }); - @export(Bun__Jest__createTestPreloadObject, .{ .name = "Bun__Jest__createTestPreloadObject" }); + @export(&Bun__Jest__createTestModuleObject, .{ .name = "Bun__Jest__createTestModuleObject" }); + @export(&Bun__Jest__createTestPreloadObject, .{ .name = "Bun__Jest__createTestPreloadObject" }); } }; @@ -808,10 +808,10 @@ pub const TestScope = struct { pub const name = "TestScope"; pub const shim = JSC.Shimmer("Bun", name, @This()); comptime { - @export(jsOnResolve, .{ + @export(&jsOnResolve, .{ .name = shim.symbolName("onResolve"), }); - @export(jsOnReject, .{ + @export(&jsOnReject, .{ .name = shim.symbolName("onReject"), }); } @@ -820,10 +820,10 @@ pub const TestScope = struct { pub const DescribeScope = struct { label: string = "", parent: ?*DescribeScope = null, - beforeAll: std.ArrayListUnmanaged(JSValue) = .{}, - beforeEach: std.ArrayListUnmanaged(JSValue) = .{}, - afterEach: std.ArrayListUnmanaged(JSValue) = .{}, - afterAll: std.ArrayListUnmanaged(JSValue) = .{}, + beforeAlls: std.ArrayListUnmanaged(JSValue) = .{}, + beforeEachs: std.ArrayListUnmanaged(JSValue) = .{}, + afterEachs: std.ArrayListUnmanaged(JSValue) = .{}, + afterAlls: std.ArrayListUnmanaged(JSValue) = .{}, test_id_start: TestRunner.Test.ID = 0, test_id_len: TestRunner.Test.ID = 0, tests: std.ArrayListUnmanaged(TestScope) = .{}, @@ -904,7 +904,7 @@ pub const DescribeScope = struct { } cb.protect(); - @field(DescribeScope.active.?, @tagName(hook)).append(getAllocator(globalThis), cb) catch unreachable; + @field(DescribeScope.active.?, @tagName(hook) ++ "s").append(getAllocator(globalThis), cb) catch unreachable; return JSValue.jsBoolean(true); } }.run; @@ -939,7 +939,7 @@ pub const DescribeScope = struct { pub const beforeEach = createCallback(.beforeEach); pub fn execCallback(this: *DescribeScope, globalObject: *JSGlobalObject, comptime hook: LifecycleHook) ?JSValue { - var hooks = &@field(this, @tagName(hook)); + var hooks = &@field(this, @tagName(hook) ++ "s"); defer { if (comptime hook == .beforeAll or hook == .afterAll) { hooks.clearAndFree(getAllocator(globalObject)); diff --git a/src/bun.js/test/pretty_format.zig b/src/bun.js/test/pretty_format.zig index d463598aae33cf..5d7fd28884c31b 100644 --- a/src/bun.js/test/pretty_format.zig +++ b/src/bun.js/test/pretty_format.zig @@ -12,7 +12,6 @@ const string = bun.string; const JSLexer = bun.js_lexer; const JSPrinter = bun.js_printer; const JSPrivateDataPtr = JSC.JSPrivateDataPtr; -const JS = @import("../javascript.zig"); const JSPromise = JSC.JSPromise; const expect = @import("./expect.zig"); @@ -1986,7 +1985,7 @@ pub const JestPrettyFormat = struct { // comptime var so we have to repeat it here. The rationale there is // it _should_ limit the stack usage because each version of the // function will be relatively small - return try switch (result.tag) { + return switch (result.tag) { .StringPossiblyFormatted => this.printAs(.StringPossiblyFormatted, Writer, writer, value, result.cell, enable_ansi_colors), .String => this.printAs(.String, Writer, writer, value, result.cell, enable_ansi_colors), .Undefined => this.printAs(.Undefined, Writer, writer, value, result.cell, enable_ansi_colors), diff --git a/src/bun.js/test/snapshot.zig b/src/bun.js/test/snapshot.zig index fb5ce9a888c7a7..becdc040399807 100644 --- a/src/bun.js/test/snapshot.zig +++ b/src/bun.js/test/snapshot.zig @@ -510,7 +510,7 @@ pub const Snapshots = struct { remain[0] = 0; const snapshot_file_path = snapshot_file_path_buf[0 .. snapshot_file_path_buf.len - remain.len :0]; - var flags: bun.Mode = bun.O.CREAT | bun.O.RDWR; + var flags: i32 = bun.O.CREAT | bun.O.RDWR; if (this.update_snapshots) flags |= bun.O.TRUNC; const fd = switch (bun.sys.open(snapshot_file_path, flags, 0o644)) { .result => |_fd| _fd, diff --git a/src/bun.js/web_worker.zig b/src/bun.js/web_worker.zig index 76520cbb39ff75..3cd99e7ebe907d 100644 --- a/src/bun.js/web_worker.zig +++ b/src/bun.js/web_worker.zig @@ -535,9 +535,9 @@ pub const WebWorker = struct { } comptime { - @export(create, .{ .name = "WebWorker__create" }); - @export(requestTerminate, .{ .name = "WebWorker__requestTerminate" }); - @export(setRef, .{ .name = "WebWorker__setRef" }); + @export(&create, .{ .name = "WebWorker__create" }); + @export(&requestTerminate, .{ .name = "WebWorker__requestTerminate" }); + @export(&setRef, .{ .name = "WebWorker__setRef" }); _ = WebWorker__updatePtr; } }; diff --git a/src/bun.js/webcore.zig b/src/bun.js/webcore.zig index 55ecdeb63f1333..ab76f70729daf9 100644 --- a/src/bun.js/webcore.zig +++ b/src/bun.js/webcore.zig @@ -533,7 +533,7 @@ pub const Crypto = struct { return globalThis.ERR_CRYPTO_SCRYPT_INVALID_PARAMETER("Invalid scrypt parameters", .{}).throw(); } - fn throwInvalidParams(globalThis: *JSC.JSGlobalObject, comptime error_type: @Type(.EnumLiteral), comptime message: [:0]const u8, fmt: anytype) bun.JSError { + fn throwInvalidParams(globalThis: *JSC.JSGlobalObject, comptime error_type: @Type(.enum_literal), comptime message: [:0]const u8, fmt: anytype) bun.JSError { if (error_type != .RangeError) @compileError("Error type not added!"); BoringSSL.ERR_clear_error(); return globalThis.ERR_CRYPTO_INVALID_SCRYPT_PARAMS(message, fmt).throw(); @@ -644,7 +644,7 @@ pub const Crypto = struct { comptime { const Bun__randomUUIDv7 = JSC.toJSHostFunction(Bun__randomUUIDv7_); - @export(Bun__randomUUIDv7, .{ .name = "Bun__randomUUIDv7" }); + @export(&Bun__randomUUIDv7, .{ .name = "Bun__randomUUIDv7" }); } pub fn Bun__randomUUIDv7_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { const arguments = callframe.argumentsUndef(2).slice(); @@ -736,9 +736,9 @@ pub const Crypto = struct { comptime { const js_alert = JSC.toJSHostFunction(alert); - @export(js_alert, .{ .name = "WebCore__alert" }); + @export(&js_alert, .{ .name = "WebCore__alert" }); const js_prompt = JSC.toJSHostFunction(Prompt.call); - @export(js_prompt, .{ .name = "WebCore__prompt" }); + @export(&js_prompt, .{ .name = "WebCore__prompt" }); const js_confirm = JSC.toJSHostFunction(confirm); - @export(js_confirm, .{ .name = "WebCore__confirm" }); + @export(&js_confirm, .{ .name = "WebCore__confirm" }); } diff --git a/src/bun.js/webcore/ObjectURLRegistry.zig b/src/bun.js/webcore/ObjectURLRegistry.zig index 5bf2ed7803e868..65234b632a703f 100644 --- a/src/bun.js/webcore/ObjectURLRegistry.zig +++ b/src/bun.js/webcore/ObjectURLRegistry.zig @@ -91,7 +91,7 @@ pub fn has(this: *ObjectURLRegistry, pathname: []const u8) bool { comptime { const Bun__createObjectURL = JSC.toJSHostFunction(Bun__createObjectURL_); - @export(Bun__createObjectURL, .{ .name = "Bun__createObjectURL" }); + @export(&Bun__createObjectURL, .{ .name = "Bun__createObjectURL" }); } fn Bun__createObjectURL_(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { const arguments = callframe.arguments_old(1); @@ -109,7 +109,7 @@ fn Bun__createObjectURL_(globalObject: *JSC.JSGlobalObject, callframe: *JSC.Call comptime { const Bun__revokeObjectURL = JSC.toJSHostFunction(Bun__revokeObjectURL_); - @export(Bun__revokeObjectURL, .{ .name = "Bun__revokeObjectURL" }); + @export(&Bun__revokeObjectURL, .{ .name = "Bun__revokeObjectURL" }); } fn Bun__revokeObjectURL_(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { const arguments = callframe.arguments_old(1); @@ -138,7 +138,7 @@ fn Bun__revokeObjectURL_(globalObject: *JSC.JSGlobalObject, callframe: *JSC.Call comptime { const jsFunctionResolveObjectURL = JSC.toJSHostFunction(jsFunctionResolveObjectURL_); - @export(jsFunctionResolveObjectURL, .{ .name = "jsFunctionResolveObjectURL" }); + @export(&jsFunctionResolveObjectURL, .{ .name = "jsFunctionResolveObjectURL" }); } fn jsFunctionResolveObjectURL_(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { const arguments = callframe.arguments_old(1); diff --git a/src/bun.js/webcore/S3File.zig b/src/bun.js/webcore/S3File.zig index 03aca7800b7d55..8309c0fe8885a0 100644 --- a/src/bun.js/webcore/S3File.zig +++ b/src/bun.js/webcore/S3File.zig @@ -603,11 +603,11 @@ pub fn hasInstance(_: JSC.JSValue, _: *JSC.JSGlobalObject, value: JSC.JSValue) c } comptime { - @export(exports.JSS3File__presign, .{ .name = "JSS3File__presign" }); - @export(construct, .{ .name = "JSS3File__construct" }); - @export(hasInstance, .{ .name = "JSS3File__hasInstance" }); - @export(getBucket, .{ .name = "JSS3File__bucket" }); - @export(getStat, .{ .name = "JSS3File__stat" }); + @export(&exports.JSS3File__presign, .{ .name = "JSS3File__presign" }); + @export(&construct, .{ .name = "JSS3File__construct" }); + @export(&hasInstance, .{ .name = "JSS3File__hasInstance" }); + @export(&getBucket, .{ .name = "JSS3File__bucket" }); + @export(&getStat, .{ .name = "JSS3File__stat" }); } pub const exports = struct { diff --git a/src/bun.js/webcore/S3Stat.zig b/src/bun.js/webcore/S3Stat.zig index 53deb25bcb4c94..361c639d3f2150 100644 --- a/src/bun.js/webcore/S3Stat.zig +++ b/src/bun.js/webcore/S3Stat.zig @@ -1,5 +1,5 @@ -const bun = @import("../../bun.zig"); -const JSC = @import("../../jsc.zig"); +const bun = @import("root").bun; +const JSC = bun.JSC; pub const S3Stat = struct { const log = bun.Output.scoped(.S3Stat, false); diff --git a/src/bun.js/webcore/blob.zig b/src/bun.js/webcore/blob.zig index 125f4f1e23e024..c39153baa01acd 100644 --- a/src/bun.js/webcore/blob.zig +++ b/src/bun.js/webcore/blob.zig @@ -49,9 +49,6 @@ const PathOrBlob = JSC.Node.PathOrBlob; const WriteFilePromise = @import("./blob/WriteFile.zig").WriteFilePromise; const WriteFileWaitFromLockedValueTask = @import("./blob/WriteFile.zig").WriteFileWaitFromLockedValueTask; const NewReadFileHandler = @import("./blob/ReadFile.zig").NewReadFileHandler; -const WriteFile = @import("./blob/WriteFile.zig").WriteFile; -const ReadFile = @import("./blob/ReadFile.zig").ReadFile; -const WriteFileWindows = @import("./blob/WriteFile.zig").WriteFileWindows; const S3File = @import("./S3File.zig"); @@ -60,8 +57,20 @@ pub const Blob = struct { pub usingnamespace bun.New(@This()); pub usingnamespace JSC.Codegen.JSBlob; - pub usingnamespace @import("./blob/WriteFile.zig"); - pub usingnamespace @import("./blob/ReadFile.zig"); + + // pub usingnamespace @import("./blob/ReadFile.zig"); + const rf = @import("./blob/ReadFile.zig"); + pub const ReadFile = rf.ReadFile; + pub const ReadFileUV = rf.ReadFileUV; + pub const ReadFileTask = rf.ReadFileTask; + pub const ReadFileResultType = rf.ReadFileResultType; + + // pub usingnamespace @import("./blob/WriteFile.zig"); + const wf = @import("./blob/WriteFile.zig"); + pub const WriteFile = wf.WriteFile; + pub const WriteFileWindows = wf.WriteFileWindows; + pub const WriteFileTask = wf.WriteFileTask; + pub const ClosingState = enum(u8) { running, closing, @@ -142,7 +151,6 @@ pub const Blob = struct { return store.data == .file; } - const ReadFileUV = @import("./blob/ReadFile.zig").ReadFileUV; pub fn doReadFromS3(this: *Blob, comptime Function: anytype, global: *JSGlobalObject) JSValue { bloblog("doReadFromS3", .{}); @@ -183,7 +191,7 @@ pub const Blob = struct { handler, Handler.run, ) catch bun.outOfMemory(); - var read_file_task = ReadFile.ReadFileTask.createOnJSThread(bun.default_allocator, global, file_read) catch bun.outOfMemory(); + var read_file_task = ReadFileTask.createOnJSThread(bun.default_allocator, global, file_read) catch bun.outOfMemory(); // Create the Promise only after the store has been ref()'d. // The garbage collector runs on memory allocations @@ -202,7 +210,7 @@ pub const Blob = struct { pub fn NewInternalReadFileHandler(comptime Context: type, comptime Function: anytype) type { return struct { - pub fn run(handler: *anyopaque, bytes_: ReadFile.ResultType) void { + pub fn run(handler: *anyopaque, bytes_: ReadFileResultType) void { Function(bun.cast(Context, handler), bytes_); } }; @@ -221,7 +229,7 @@ pub const Blob = struct { this.offset, this.size, ) catch bun.outOfMemory(); - var read_file_task = ReadFile.ReadFileTask.createOnJSThread(bun.default_allocator, global, file_read) catch bun.outOfMemory(); + var read_file_task = ReadFileTask.createOnJSThread(bun.default_allocator, global, file_read) catch bun.outOfMemory(); read_file_task.schedule(); } @@ -973,7 +981,7 @@ pub const Blob = struct { WriteFilePromise.run, options.mkdirp_if_not_exists orelse true, ) catch unreachable; - var task = WriteFile.WriteFileTask.createOnJSThread(bun.default_allocator, ctx, file_copier) catch bun.outOfMemory(); + var task = WriteFileTask.createOnJSThread(bun.default_allocator, ctx, file_copier) catch bun.outOfMemory(); // Defer promise creation until we're just about to schedule the task var promise = JSC.JSPromise.create(ctx); const promise_value = promise.asValue(ctx); @@ -2294,10 +2302,7 @@ pub const Blob = struct { this.update(); } - pub fn doClose( - this: *This, - is_allowed_to_close_fd: bool, - ) bool { + pub fn doClose(this: *This, is_allowed_to_close_fd: bool) bool { if (@hasField(This, "io_request")) { if (this.close_after_io) { this.state.store(ClosingState.closing, .seq_cst); @@ -2827,7 +2832,7 @@ pub const Blob = struct { fn truncate(this: *CopyFileWindows) void { // TODO: optimize this - @setCold(true); + @branchHint(.cold); var node_fs: JSC.Node.NodeFS = .{}; _ = node_fs.truncate( @@ -2903,6 +2908,9 @@ pub const Blob = struct { .syscall = bun.String.static("fstat"), }; + pub const CopyFilePromiseTask = JSC.ConcurrentPromiseTask(CopyFile); + pub const CopyFilePromiseTaskEventLoopTask = CopyFilePromiseTask.EventLoopTask; + // blocking, but off the main thread pub const CopyFile = struct { destination_file_store: FileStore, @@ -2927,8 +2935,6 @@ pub const Blob = struct { pub const ResultType = anyerror!SizeType; pub const Callback = *const fn (ctx: *anyopaque, len: ResultType) void; - pub const CopyFilePromiseTask = JSC.ConcurrentPromiseTask(CopyFile); - pub const CopyFilePromiseTaskEventLoopTask = CopyFilePromiseTask.EventLoopTask; pub fn create( allocator: std.mem.Allocator, @@ -3234,7 +3240,7 @@ pub const Blob = struct { } pub fn doFCopyFileWithReadWriteLoopFallback(this: *CopyFile) anyerror!void { - switch (bun.sys.fcopyfile(this.source_fd, this.destination_fd, posix.system.COPYFILE_DATA)) { + switch (bun.sys.fcopyfile(this.source_fd, this.destination_fd, posix.system.COPYFILE{ .DATA = true })) { .err => |errno| { switch (errno.getErrno()) { // If the file type doesn't support seeking, it may return EBADF @@ -3293,6 +3299,7 @@ pub const Blob = struct { } pub fn runAsync(this: *CopyFile) void { + if (Environment.isWindows) return; //why // defer task.onFinish(); var stat_: ?bun.Stat = null; @@ -4083,9 +4090,9 @@ pub const Blob = struct { }); comptime { const jsonResolveRequestStream = JSC.toJSHostFunction(onFileStreamResolveRequestStream); - @export(jsonResolveRequestStream, .{ .name = Export[0].symbol_name }); + @export(&jsonResolveRequestStream, .{ .name = Export[0].symbol_name }); const jsonRejectRequestStream = JSC.toJSHostFunction(onFileStreamRejectRequestStream); - @export(jsonRejectRequestStream, .{ .name = Export[1].symbol_name }); + @export(&jsonRejectRequestStream, .{ .name = Export[1].symbol_name }); } pub fn pipeReadableStreamToBlob(this: *Blob, globalThis: *JSC.JSGlobalObject, readable_stream: JSC.WebCore.ReadableStream, extra_options: ?JSValue) JSC.JSValue { var store = this.store orelse { @@ -4824,7 +4831,7 @@ pub const Blob = struct { Blob.max_size; store.data.file.mode = @intCast(stat.mode); store.data.file.seekable = bun.isRegularFile(stat.mode); - store.data.file.last_modified = JSC.toJSTime(stat.mtime().tv_sec, stat.mtime().tv_nsec); + store.data.file.last_modified = JSC.toJSTime(stat.mtime().sec, stat.mtime().nsec); }, // the file may not exist yet. Thats's okay. else => {}, @@ -4838,7 +4845,7 @@ pub const Blob = struct { Blob.max_size; store.data.file.mode = @intCast(stat.mode); store.data.file.seekable = bun.isRegularFile(stat.mode); - store.data.file.last_modified = JSC.toJSTime(stat.mtime().tv_sec, stat.mtime().tv_nsec); + store.data.file.last_modified = JSC.toJSTime(stat.mtime().sec, stat.mtime().nsec); }, // the file may not exist yet. Thats's okay. else => {}, diff --git a/src/bun.js/webcore/blob/ReadFile.zig b/src/bun.js/webcore/blob/ReadFile.zig index 2add4b0053e365..040ff469b4f6e9 100644 --- a/src/bun.js/webcore/blob/ReadFile.zig +++ b/src/bun.js/webcore/blob/ReadFile.zig @@ -1,14 +1,14 @@ const bun = @import("root").bun; const JSC = bun.JSC; const std = @import("std"); -const Blob = JSC.WebCore.Blob; +const Blob = bun.JSC.WebCore.Blob; const invalid_fd = bun.invalid_fd; const SystemError = JSC.SystemError; const SizeType = Blob.SizeType; const io = bun.io; -const FileOpenerMixin = Blob.Store.FileOpenerMixin; -const FileCloserMixin = Blob.Store.FileCloserMixin; +const FileOpenerMixin = Store.FileOpenerMixin; +const FileCloserMixin = Store.FileCloserMixin; const Environment = bun.Environment; const bloblog = bun.Output.scoped(.WriteFile, true); const JSPromise = JSC.JSPromise; @@ -24,7 +24,7 @@ pub fn NewReadFileHandler(comptime Function: anytype) type { promise: JSPromise.Strong = .{}, globalThis: *JSGlobalObject, - pub fn run(handler: *@This(), maybe_bytes: Blob.ReadFile.ResultType) void { + pub fn run(handler: *@This(), maybe_bytes: ReadFileResultType) void { var promise = handler.promise.swap(); var blob = handler.context; blob.allocator = null; @@ -34,7 +34,7 @@ pub fn NewReadFileHandler(comptime Function: anytype) type { .result => |result| { const bytes = result.buf; if (blob.size > 0) - blob.size = @min(@as(Blob.SizeType, @truncate(bytes.len)), blob.size); + blob.size = @min(@as(SizeType, @truncate(bytes.len)), blob.size); const WrappedFn = struct { pub fn wrapped(b: *Blob, g: *JSGlobalObject, by: []u8) JSC.JSValue { return JSC.toJSHostValue(g, Function(b, g, by, .temporary)); @@ -56,6 +56,11 @@ const ByteStore = Blob.ByteStore; const Store = Blob.Store; const ClosingState = Blob.ClosingState; +pub const ReadFileOnReadFileCallback = *const fn (ctx: *anyopaque, bytes: ReadFileResultType) void; +pub const ReadFileRead = struct { buf: []u8, is_temporary: bool = false, total_size: SizeType = 0 }; +pub const ReadFileResultType = SystemError.Maybe(ReadFileRead); +pub const ReadFileTask = JSC.WorkTask(ReadFile); + pub const ReadFile = struct { file_store: FileStore, byte_store: ByteStore = ByteStore{ .allocator = bun.default_allocator }, @@ -72,7 +77,7 @@ pub const ReadFile = struct { system_error: ?JSC.SystemError = null, errno: ?anyerror = null, onCompleteCtx: *anyopaque = undefined, - onCompleteCallback: OnReadFileCallback = undefined, + onCompleteCallback: ReadFileOnReadFileCallback = undefined, io_task: ?*ReadFileTask = null, io_poll: bun.io.Poll = .{}, io_request: bun.io.Request = .{ .callback = &onRequestReadable }, @@ -80,19 +85,11 @@ pub const ReadFile = struct { close_after_io: bool = false, state: std.atomic.Value(ClosingState) = std.atomic.Value(ClosingState).init(.running), - pub const Read = struct { - buf: []u8, - is_temporary: bool = false, - total_size: SizeType = 0, - }; - pub const ResultType = SystemError.Maybe(Read); - - pub const OnReadFileCallback = *const fn (ctx: *anyopaque, bytes: ResultType) void; - pub usingnamespace FileOpenerMixin(ReadFile); pub usingnamespace FileCloserMixin(ReadFile); pub fn update(this: *ReadFile) void { + if (Environment.isWindows) return; //why switch (this.state.load(.monotonic)) { .closing => { this.onFinish(); @@ -105,7 +102,7 @@ pub const ReadFile = struct { _: std.mem.Allocator, store: *Store, onReadFileContext: *anyopaque, - onCompleteCallback: OnReadFileCallback, + onCompleteCallback: ReadFileOnReadFileCallback, off: SizeType, max_len: SizeType, ) !*ReadFile { @@ -131,13 +128,13 @@ pub const ReadFile = struct { max_len: SizeType, comptime Context: type, context: Context, - comptime callback: fn (ctx: Context, bytes: ResultType) void, + comptime callback: fn (ctx: Context, bytes: ReadFileResultType) void, ) !*ReadFile { if (Environment.isWindows) @compileError("dont call this function on windows"); const Handler = struct { - pub fn run(ptr: *anyopaque, bytes: ResultType) void { + pub fn run(ptr: *anyopaque, bytes: ReadFileResultType) void { callback(bun.cast(Context, ptr), bytes); } }; @@ -257,8 +254,6 @@ pub const ReadFile = struct { return true; } - pub const ReadFileTask = JSC.WorkTask(@This()); - pub fn then(this: *ReadFile, _: *JSC.JSGlobalObject) void { const cb = this.onCompleteCallback; const cb_ctx = this.onCompleteCtx; @@ -266,12 +261,12 @@ pub const ReadFile = struct { if (this.store == null and this.system_error != null) { const system_error = this.system_error.?; bun.destroy(this); - cb(cb_ctx, ResultType{ .err = system_error }); + cb(cb_ctx, ReadFileResultType{ .err = system_error }); return; } else if (this.store == null) { bun.destroy(this); if (Environment.allow_assert) @panic("assertion failure - store should not be null"); - cb(cb_ctx, ResultType{ + cb(cb_ctx, ReadFileResultType{ .err = SystemError{ .code = bun.String.static("INTERNAL_ERROR"), .message = bun.String.static("assertion failure - store should not be null"), @@ -290,7 +285,7 @@ pub const ReadFile = struct { bun.destroy(this); if (system_error) |err| { - cb(cb_ctx, ResultType{ .err = err }); + cb(cb_ctx, ReadFileResultType{ .err = err }); return; } @@ -302,6 +297,7 @@ pub const ReadFile = struct { } fn runAsync(this: *ReadFile, task: *ReadFileTask) void { + if (Environment.isWindows) return; //why this.io_task = task; if (this.file_store.pathlike == .fd) { @@ -347,7 +343,7 @@ pub const ReadFile = struct { if (this.store) |store| { if (store.data == .file) { - store.data.file.last_modified = JSC.toJSTime(stat.mtime().tv_sec, stat.mtime().tv_nsec); + store.data.file.last_modified = JSC.toJSTime(stat.mtime().sec, stat.mtime().nsec); } } @@ -445,6 +441,7 @@ pub const ReadFile = struct { } fn doReadLoop(this: *ReadFile) void { + if (Environment.isWindows) return; //why while (this.state.load(.monotonic) == .running) { // we hold a 64 KB stack buffer incase the amount of data to // be read is greater than the reported amount @@ -563,7 +560,7 @@ pub const ReadFileUV = struct { system_error: ?JSC.SystemError = null, errno: ?anyerror = null, on_complete_data: *anyopaque = undefined, - on_complete_fn: ReadFile.OnReadFileCallback, + on_complete_fn: ReadFileOnReadFileCallback, is_regular_file: bool = false, req: libuv.fs_t = std.mem.zeroes(libuv.fs_t), @@ -596,7 +593,7 @@ pub const ReadFileUV = struct { const cb_ctx = this.on_complete_data; if (this.system_error) |err| { - cb(cb_ctx, ReadFile.ResultType{ .err = err }); + cb(cb_ctx, ReadFileResultType{ .err = err }); return; } @@ -661,7 +658,7 @@ pub const ReadFileUV = struct { // keep in sync with resolveSizeAndLastModified if (this.store.data == .file) { - this.store.data.file.last_modified = JSC.toJSTime(stat.mtime().tv_sec, stat.mtime().tv_nsec); + this.store.data.file.last_modified = JSC.toJSTime(stat.mtime().sec, stat.mtime().nsec); } if (bun.S.ISDIR(@intCast(stat.mode))) { diff --git a/src/bun.js/webcore/blob/WriteFile.zig b/src/bun.js/webcore/blob/WriteFile.zig index 9560fc4cd2f18f..06416629c53f05 100644 --- a/src/bun.js/webcore/blob/WriteFile.zig +++ b/src/bun.js/webcore/blob/WriteFile.zig @@ -17,6 +17,10 @@ const ZigString = JSC.ZigString; const ClosingState = Blob.ClosingState; +pub const WriteFileResultType = SystemError.Maybe(SizeType); +pub const WriteFileOnWriteFileCallback = *const fn (ctx: *anyopaque, count: WriteFileResultType) void; +pub const WriteFileTask = JSC.WorkTask(WriteFile); + pub const WriteFile = struct { file_blob: Blob, bytes_blob: Blob, @@ -31,15 +35,13 @@ pub const WriteFile = struct { state: std.atomic.Value(ClosingState) = std.atomic.Value(ClosingState).init(.running), onCompleteCtx: *anyopaque = undefined, - onCompleteCallback: OnWriteFileCallback = undefined, + onCompleteCallback: WriteFileOnWriteFileCallback = undefined, total_written: usize = 0, could_block: bool = false, close_after_io: bool = false, mkdirp_if_not_exists: bool = false, - pub const ResultType = SystemError.Maybe(SizeType); - pub const OnWriteFileCallback = *const fn (ctx: *anyopaque, count: ResultType) void; pub const io_tag = io.Poll.Tag.WriteFile; pub usingnamespace FileOpenerMixin(WriteFile); @@ -92,7 +94,7 @@ pub const WriteFile = struct { file_blob: Blob, bytes_blob: Blob, onWriteFileContext: *anyopaque, - onCompleteCallback: OnWriteFileCallback, + onCompleteCallback: WriteFileOnWriteFileCallback, mkdirp_if_not_exists: bool, ) !*WriteFile { const write_file = bun.new(WriteFile, WriteFile{ @@ -113,11 +115,11 @@ pub const WriteFile = struct { bytes_blob: Blob, comptime Context: type, context: Context, - comptime callback: fn (ctx: Context, bytes: ResultType) void, + comptime callback: fn (ctx: Context, bytes: WriteFileResultType) void, mkdirp_if_not_exists: bool, ) !*WriteFile { const Handler = struct { - pub fn run(ptr: *anyopaque, bytes: ResultType) void { + pub fn run(ptr: *anyopaque, bytes: WriteFileResultType) void { callback(bun.cast(Context, ptr), bytes); } }; @@ -178,8 +180,6 @@ pub const WriteFile = struct { return true; } - pub const WriteFileTask = JSC.WorkTask(@This()); - pub fn then(this: *WriteFile, _: *JSC.JSGlobalObject) void { const cb = this.onCompleteCallback; const cb_ctx = this.onCompleteCtx; @@ -199,6 +199,7 @@ pub const WriteFile = struct { bun.destroy(this); cb(cb_ctx, .{ .result = @as(SizeType, @truncate(wrote)) }); } + pub fn run(this: *WriteFile, task: *WriteFileTask) void { if (Environment.isWindows) { @panic("todo"); @@ -308,6 +309,7 @@ pub const WriteFile = struct { } fn doWriteLoop(this: *WriteFile) void { + if (Environment.isWindows) return; //why while (this.state.load(.monotonic) == .running) { var remain = this.bytes_blob.sharedView(); @@ -355,7 +357,7 @@ pub const WriteFileWindows = struct { io_request: uv.fs_t, file_blob: Blob, bytes_blob: Blob, - onCompleteCallback: OnWriteFileCallback, + onCompleteCallback: WriteFileOnWriteFileCallback, onCompleteCtx: *anyopaque, mkdirp_if_not_exists: bool = false, uv_bufs: [1]uv.uv_buf_t, @@ -374,7 +376,7 @@ pub const WriteFileWindows = struct { bytes_blob: Blob, event_loop: *bun.JSC.EventLoop, onWriteFileContext: *anyopaque, - onCompleteCallback: OnWriteFileCallback, + onCompleteCallback: WriteFileOnWriteFileCallback, mkdirp_if_not_exists: bool, ) *WriteFileWindows { const write_file = WriteFileWindows.new(.{ @@ -419,8 +421,6 @@ pub const WriteFileWindows = struct { write_file.event_loop.refConcurrently(); return write_file; } - pub const ResultType = WriteFile.ResultType; - pub const OnWriteFileCallback = WriteFile.OnWriteFileCallback; pub inline fn loop(this: *const WriteFileWindows) *uv.Loop { return this.event_loop.virtual_machine.event_loop_handle.?; @@ -637,7 +637,7 @@ pub const WriteFileWindows = struct { bytes_blob: Blob, comptime Context: type, context: Context, - comptime callback: *const fn (ctx: Context, bytes: ResultType) void, + comptime callback: *const fn (ctx: Context, bytes: WriteFileResultType) void, mkdirp_if_not_exists: bool, ) *WriteFileWindows { return WriteFileWindows.createWithCtx( @@ -654,7 +654,7 @@ pub const WriteFileWindows = struct { pub const WriteFilePromise = struct { promise: JSPromise.Strong = .{}, globalThis: *JSGlobalObject, - pub fn run(handler: *@This(), count: Blob.WriteFile.ResultType) void { + pub fn run(handler: *@This(), count: WriteFileResultType) void { var promise = handler.promise.swap(); const globalThis = handler.globalThis; bun.destroy(handler); diff --git a/src/bun.js/webcore/body.zig b/src/bun.js/webcore/body.zig index c6177879d9b708..17d95af117663c 100644 --- a/src/bun.js/webcore/body.zig +++ b/src/bun.js/webcore/body.zig @@ -1427,7 +1427,7 @@ pub const BodyValueBufferer = struct { global: *JSGlobalObject, allocator: std.mem.Allocator, ) @This() { - const this = .{ + const this: BodyValueBufferer = .{ .ctx = ctx, .onFinishedBuffering = onFinish, .allocator = allocator, @@ -1486,7 +1486,7 @@ pub const BodyValueBufferer = struct { } } - fn onFinishedLoadingFile(sink: *@This(), bytes: JSC.WebCore.Blob.ReadFile.ResultType) void { + fn onFinishedLoadingFile(sink: *@This(), bytes: Blob.ReadFileResultType) void { switch (bytes) { .err => |err| { log("onFinishedLoadingFile Error", .{}); @@ -1722,9 +1722,9 @@ pub const BodyValueBufferer = struct { comptime { const jsonResolveStream = JSC.toJSHostFunction(onResolveStream); - @export(jsonResolveStream, .{ .name = Export[0].symbol_name }); + @export(&jsonResolveStream, .{ .name = Export[0].symbol_name }); const jsonRejectStream = JSC.toJSHostFunction(onRejectStream); - @export(jsonRejectStream, .{ .name = Export[1].symbol_name }); + @export(&jsonRejectStream, .{ .name = Export[1].symbol_name }); } }; diff --git a/src/bun.js/webcore/encoding.zig b/src/bun.js/webcore/encoding.zig index 78846c6fa4dad0..0c58eb0390561f 100644 --- a/src/bun.js/webcore/encoding.zig +++ b/src/bun.js/webcore/encoding.zig @@ -31,7 +31,7 @@ const JSValue = JSC.JSValue; const JSGlobalObject = JSC.JSGlobalObject; const VirtualMachine = JSC.VirtualMachine; -const Task = @import("../javascript.zig").Task; +const Task = JSC.Task; const picohttp = bun.picohttp; diff --git a/src/bun.js/webcore/request.zig b/src/bun.js/webcore/request.zig index c0b3882a12fee8..4d85d4226c33c0 100644 --- a/src/bun.js/webcore/request.zig +++ b/src/bun.js/webcore/request.zig @@ -611,7 +611,7 @@ pub const Request = struct { } } - if (value.asDirect(JSC.WebCore.Response)) |response| { + if (value.asDirect(Response)) |response| { if (!fields.contains(.method)) { req.method = response.init.method; fields.insert(.method); diff --git a/src/bun.js/webcore/response.zig b/src/bun.js/webcore/response.zig index 2a9d0d144ba931..a5bf52f203b5b7 100644 --- a/src/bun.js/webcore/response.zig +++ b/src/bun.js/webcore/response.zig @@ -898,7 +898,7 @@ pub const Fetch = struct { pub const Empty: HTTPRequestBody = .{ .AnyBlob = .{ .Blob = .{} } }; - pub fn store(this: *HTTPRequestBody) ?*JSC.WebCore.Blob.Store { + pub fn store(this: *HTTPRequestBody) ?*Blob.Store { return switch (this.*) { .AnyBlob => this.AnyBlob.store(), else => null, @@ -1127,9 +1127,9 @@ pub const Fetch = struct { }); comptime { const jsonResolveRequestStream = JSC.toJSHostFunction(onResolveRequestStream); - @export(jsonResolveRequestStream, .{ .name = Export[0].symbol_name }); + @export(&jsonResolveRequestStream, .{ .name = Export[0].symbol_name }); const jsonRejectRequestStream = JSC.toJSHostFunction(onRejectRequestStream); - @export(jsonRejectRequestStream, .{ .name = Export[1].symbol_name }); + @export(&jsonRejectRequestStream, .{ .name = Export[1].symbol_name }); } pub fn startRequestStream(this: *FetchTasklet) void { @@ -2203,7 +2203,7 @@ pub const Fetch = struct { comptime { const Bun__fetchPreconnect = JSC.toJSHostFunction(Bun__fetchPreconnect_); - @export(Bun__fetchPreconnect, .{ .name = "Bun__fetchPreconnect" }); + @export(&Bun__fetchPreconnect, .{ .name = "Bun__fetchPreconnect" }); } pub fn Bun__fetchPreconnect_( globalObject: *JSC.JSGlobalObject, @@ -2264,7 +2264,7 @@ pub const Fetch = struct { comptime { const Bun__fetch = JSC.toJSHostFunction(Bun__fetch_); - @export(Bun__fetch, .{ .name = "Bun__fetch" }); + @export(&Bun__fetch, .{ .name = "Bun__fetch" }); } /// Implementation of `Bun.fetch` diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index 1f0adae622f514..5fc56a1c327bc3 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -36,14 +36,14 @@ const JSPrinter = bun.js_printer; const picohttp = bun.picohttp; const StringJoiner = bun.StringJoiner; const uws = bun.uws; -const Blob = JSC.WebCore.Blob; +const Blob = bun.JSC.WebCore.Blob; const Response = JSC.WebCore.Response; const Request = JSC.WebCore.Request; const assert = bun.assert; const Syscall = bun.sys; const uv = bun.windows.libuv; -const AnyBlob = JSC.WebCore.AnyBlob; +const AnyBlob = bun.JSC.WebCore.AnyBlob; pub const ReadableStream = struct { value: JSValue, ptr: Source, @@ -123,7 +123,7 @@ pub const ReadableStream = struct { pub fn toAnyBlob( stream: *ReadableStream, globalThis: *JSC.JSGlobalObject, - ) ?JSC.WebCore.AnyBlob { + ) ?AnyBlob { if (stream.isDisturbed(globalThis)) { return null; } @@ -139,7 +139,7 @@ pub const ReadableStream = struct { }, .File => |blobby| { if (blobby.lazy == .blob) { - var blob = JSC.WebCore.Blob.initWithStore(blobby.lazy.blob, globalThis); + var blob = Blob.initWithStore(blobby.lazy.blob, globalThis); blob.store.?.ref(); // it should be lazy, file shouldn't have opened yet. bun.assert(!blobby.started); @@ -492,7 +492,7 @@ pub const StreamStart = union(Tag) { close: bool = false, mode: bun.Mode = 0o664, - pub fn flags(this: *const FileSinkOptions) bun.Mode { + pub fn flags(this: *const FileSinkOptions) i32 { _ = this; return bun.O.NONBLOCK | bun.O.CLOEXEC | bun.O.CREAT | bun.O.WRONLY; @@ -562,7 +562,7 @@ pub const StreamStart = union(Tag) { .ArrayBufferSink => { var as_uint8array = false; var stream = false; - var chunk_size: JSC.WebCore.Blob.SizeType = 0; + var chunk_size: Blob.SizeType = 0; var empty = true; if (value.getOwn(globalThis, "asUint8Array")) |val| { @@ -582,7 +582,7 @@ pub const StreamStart = union(Tag) { if (value.fastGet(globalThis, .highWaterMark)) |chunkSize| { if (chunkSize.isNumber()) { empty = false; - chunk_size = @as(JSC.WebCore.Blob.SizeType, @intCast(@max(0, @as(i51, @truncate(chunkSize.toInt64()))))); + chunk_size = @as(Blob.SizeType, @intCast(@max(0, @as(i51, @truncate(chunkSize.toInt64()))))); } } @@ -597,11 +597,11 @@ pub const StreamStart = union(Tag) { } }, .FileSink => { - var chunk_size: JSC.WebCore.Blob.SizeType = 0; + var chunk_size: Blob.SizeType = 0; if (value.fastGet(globalThis, .highWaterMark)) |chunkSize| { if (chunkSize.isNumber()) - chunk_size = @as(JSC.WebCore.Blob.SizeType, @intCast(@max(0, @as(i51, @truncate(chunkSize.toInt64()))))); + chunk_size = @as(Blob.SizeType, @intCast(@max(0, @as(i51, @truncate(chunkSize.toInt64()))))); } if (value.fastGet(globalThis, .path)) |path| { @@ -660,12 +660,12 @@ pub const StreamStart = union(Tag) { }, .NetworkSink, .HTTPSResponseSink, .HTTPResponseSink => { var empty = true; - var chunk_size: JSC.WebCore.Blob.SizeType = 2048; + var chunk_size: Blob.SizeType = 2048; if (value.fastGet(globalThis, .highWaterMark)) |chunkSize| { if (chunkSize.isNumber()) { empty = false; - chunk_size = @as(JSC.WebCore.Blob.SizeType, @intCast(@max(256, @as(i51, @truncate(chunkSize.toInt64()))))); + chunk_size = @as(Blob.SizeType, @intCast(@max(256, @as(i51, @truncate(chunkSize.toInt64()))))); } } @@ -1608,10 +1608,10 @@ pub const SinkDestructor = struct { } switch (ptr.tag()) { - .Detached => { + @field(Ptr.Tag, @typeName(Detached)) => { return; }, - .Subprocess => { + @field(Ptr.Tag, @typeName(Subprocess)) => { const subprocess = ptr.as(Subprocess); subprocess.onStdinDestroyed(); }, @@ -1982,17 +1982,17 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { } comptime { - @export(finalize, .{ .name = shim.symbolName("finalize") }); - @export(jsWrite, .{ .name = shim.symbolName("write") }); - @export(jsGetInternalFd, .{ .name = shim.symbolName("getInternalFd") }); - @export(close, .{ .name = shim.symbolName("close") }); - @export(jsFlush, .{ .name = shim.symbolName("flush") }); - @export(jsStart, .{ .name = shim.symbolName("start") }); - @export(jsEnd, .{ .name = shim.symbolName("end") }); - @export(jsConstruct, .{ .name = shim.symbolName("construct") }); - @export(endWithSink, .{ .name = shim.symbolName("endWithSink") }); - @export(updateRef, .{ .name = shim.symbolName("updateRef") }); - @export(memoryCost, .{ .name = shim.symbolName("memoryCost") }); + @export(&finalize, .{ .name = shim.symbolName("finalize") }); + @export(&jsWrite, .{ .name = shim.symbolName("write") }); + @export(&jsGetInternalFd, .{ .name = shim.symbolName("getInternalFd") }); + @export(&close, .{ .name = shim.symbolName("close") }); + @export(&jsFlush, .{ .name = shim.symbolName("flush") }); + @export(&jsStart, .{ .name = shim.symbolName("start") }); + @export(&jsEnd, .{ .name = shim.symbolName("end") }); + @export(&jsConstruct, .{ .name = shim.symbolName("construct") }); + @export(&endWithSink, .{ .name = shim.symbolName("endWithSink") }); + @export(&updateRef, .{ .name = shim.symbolName("updateRef") }); + @export(&memoryCost, .{ .name = shim.symbolName("memoryCost") }); shim.assertJSFunction(.{ write, @@ -3434,7 +3434,7 @@ pub const FileSink = struct { const log = Output.scoped(.FileSink, false); - pub usingnamespace bun.NewRefCounted(FileSink, deinit); + pub usingnamespace bun.NewRefCounted(FileSink, deinit, null); pub const IOWriter = bun.io.StreamingWriter(@This(), onWrite, onError, onReady, onClose); pub const Poll = IOWriter; @@ -3456,7 +3456,7 @@ pub const FileSink = struct { } comptime { - @export(Bun__ForceFileSinkToBeSynchronousForProcessObjectStdio, .{ .name = "Bun__ForceFileSinkToBeSynchronousForProcessObjectStdio" }); + @export(&Bun__ForceFileSinkToBeSynchronousForProcessObjectStdio, .{ .name = "Bun__ForceFileSinkToBeSynchronousForProcessObjectStdio" }); } pub fn onAttachedProcessExit(this: *FileSink) void { @@ -4781,7 +4781,7 @@ pub const ByteBlobLoader = struct { } } - var blob = JSC.WebCore.Blob.initWithStore(store, globalThis); + var blob = Blob.initWithStore(store, globalThis); blob.offset = this.offset; blob.size = this.remain; this.parent().is_closed = true; diff --git a/src/bun.zig b/src/bun.zig index a61b2373dab8c7..ce576c8bf327d5 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -336,7 +336,7 @@ pub const OSPathSlice = []const OSPathChar; pub const OSPathBuffer = if (Environment.isWindows) WPathBuffer else PathBuffer; pub inline fn cast(comptime To: type, value: anytype) To { - if (@typeInfo(@TypeOf(value)) == .Int) { + if (@typeInfo(@TypeOf(value)) == .int) { return @ptrFromInt(@as(usize, value)); } @@ -345,12 +345,12 @@ pub inline fn cast(comptime To: type, value: anytype) To { pub fn len(value: anytype) usize { return switch (@typeInfo(@TypeOf(value))) { - .Array => |info| info.len, - .Vector => |info| info.len, - .Pointer => |info| switch (info.size) { - .One => switch (@typeInfo(info.child)) { - .Array => |array| brk: { - if (array.sentinel != null) { + .array => |info| info.len, + .vector => |info| info.len, + .pointer => |info| switch (info.size) { + .one => switch (@typeInfo(info.child)) { + .array => |array| brk: { + if (array.sentinel_ptr != null) { @compileError("use bun.sliceTo"); } @@ -358,20 +358,20 @@ pub fn len(value: anytype) usize { }, else => @compileError("invalid type given to std.mem.len"), }, - .Many => { - const sentinel_ptr = info.sentinel orelse + .many => { + const sentinel_ptr = info.sentinel_ptr orelse @compileError("length of pointer with no sentinel"); const sentinel = @as(*align(1) const info.child, @ptrCast(sentinel_ptr)).*; return std.mem.indexOfSentinel(info.child, sentinel, value); }, - .C => { + .c => { assert(value != null); return std.mem.indexOfSentinel(info.child, 0, value); }, - .Slice => value.len, + .slice => value.len, }, - .Struct => |info| if (info.is_tuple) { + .@"struct" => |info| if (info.is_tuple) { return info.fields.len; } else @compileError("invalid type given to std.mem.len"), else => @compileError("invalid type given to std.mem.len"), @@ -380,34 +380,34 @@ pub fn len(value: anytype) usize { fn Span(comptime T: type) type { switch (@typeInfo(T)) { - .Optional => |optional_info| { + .optional => |optional_info| { return ?Span(optional_info.child); }, - .Pointer => |ptr_info| { + .pointer => |ptr_info| { var new_ptr_info = ptr_info; switch (ptr_info.size) { - .One => switch (@typeInfo(ptr_info.child)) { - .Array => |info| { + .one => switch (@typeInfo(ptr_info.child)) { + .array => |info| { new_ptr_info.child = info.child; - new_ptr_info.sentinel = info.sentinel; + new_ptr_info.sentinel_ptr = info.sentinel_ptr; }, else => @compileError("invalid type given to std.mem.Span"), }, - .C => { - new_ptr_info.sentinel = &@as(ptr_info.child, 0); + .c => { + new_ptr_info.sentinel_ptr = &@as(ptr_info.child, 0); new_ptr_info.is_allowzero = false; }, - .Many, .Slice => {}, + .many, .slice => {}, } - new_ptr_info.size = .Slice; - return @Type(.{ .Pointer = new_ptr_info }); + new_ptr_info.size = .slice; + return @Type(.{ .pointer = new_ptr_info }); }, else => @compileError("invalid type given to std.mem.Span: " ++ @typeName(T)), } } pub fn span(ptr: anytype) Span(@TypeOf(ptr)) { - if (@typeInfo(@TypeOf(ptr)) == .Optional) { + if (@typeInfo(@TypeOf(ptr)) == .optional) { if (ptr) |non_null| { return span(non_null); } else { @@ -416,8 +416,8 @@ pub fn span(ptr: anytype) Span(@TypeOf(ptr)) { } const Result = Span(@TypeOf(ptr)); const l = len(ptr); - const ptr_info = @typeInfo(Result).Pointer; - if (ptr_info.sentinel) |s_ptr| { + const ptr_info = @typeInfo(Result).pointer; + if (ptr_info.sentinel_ptr) |s_ptr| { const s = @as(*align(1) const ptr_info.child, @ptrCast(s_ptr)).*; return ptr[0..l :s]; } else { @@ -591,11 +591,11 @@ pub fn fastRandom() u64 { } }; - var prng_: ?std.rand.DefaultPrng = null; + var prng_: ?std.Random.DefaultPrng = null; pub fn get() u64 { if (prng_ == null) { - prng_ = std.rand.DefaultPrng.init(random_seed.get()); + prng_ = std.Random.DefaultPrng.init(random_seed.get()); } return prng_.?.random().uintAtMost(u64, std.math.maxInt(u64)); @@ -808,7 +808,7 @@ pub const zlib = @import("./zlib.zig"); pub var start_time: i128 = 0; pub fn openFileZ(pathZ: [:0]const u8, open_flags: std.fs.File.OpenFlags) !std.fs.File { - var flags: Mode = 0; + var flags: i32 = 0; switch (open_flags.mode) { .read_only => flags |= O.RDONLY, .write_only => flags |= O.WRONLY, @@ -821,7 +821,7 @@ pub fn openFileZ(pathZ: [:0]const u8, open_flags: std.fs.File.OpenFlags) !std.fs pub fn openFile(path_: []const u8, open_flags: std.fs.File.OpenFlags) !std.fs.File { if (comptime Environment.isWindows) { - var flags: Mode = 0; + var flags: i32 = 0; switch (open_flags.mode) { .read_only => flags |= O.RDONLY, .write_only => flags |= O.WRONLY, @@ -1468,10 +1468,10 @@ pub fn getFdPathW(fd_: anytype, buf: *WPathBuffer) ![]u16 { fn lenSliceTo(ptr: anytype, comptime end: meta.Elem(@TypeOf(ptr))) usize { switch (@typeInfo(@TypeOf(ptr))) { - .Pointer => |ptr_info| switch (ptr_info.size) { - .One => switch (@typeInfo(ptr_info.child)) { - .Array => |array_info| { - if (array_info.sentinel) |sentinel_ptr| { + .pointer => |ptr_info| switch (ptr_info.size) { + .one => switch (@typeInfo(ptr_info.child)) { + .array => |array_info| { + if (array_info.sentinel_ptr) |sentinel_ptr| { const sentinel = @as(*align(1) const array_info.child, @ptrCast(sentinel_ptr)).*; if (sentinel == end) { return std.mem.indexOfSentinel(array_info.child, end, ptr); @@ -1481,7 +1481,7 @@ fn lenSliceTo(ptr: anytype, comptime end: meta.Elem(@TypeOf(ptr))) usize { }, else => {}, }, - .Many => if (ptr_info.sentinel) |sentinel_ptr| { + .many => if (ptr_info.sentinel_ptr) |sentinel_ptr| { const sentinel = @as(*align(1) const ptr_info.child, @ptrCast(sentinel_ptr)).*; // We may be looking for something other than the sentinel, // but iterating past the sentinel would be a bug so we need @@ -1490,12 +1490,12 @@ fn lenSliceTo(ptr: anytype, comptime end: meta.Elem(@TypeOf(ptr))) usize { while (ptr[i] != end and ptr[i] != sentinel) i += 1; return i; }, - .C => { + .c => { assert(ptr != null); return std.mem.indexOfSentinel(ptr_info.child, end, ptr); }, - .Slice => { - if (ptr_info.sentinel) |sentinel_ptr| { + .slice => { + if (ptr_info.sentinel_ptr) |sentinel_ptr| { const sentinel = @as(*align(1) const ptr_info.child, @ptrCast(sentinel_ptr)).*; if (sentinel == end) { return std.mem.indexOfSentinel(ptr_info.child, sentinel, ptr); @@ -1512,51 +1512,51 @@ fn lenSliceTo(ptr: anytype, comptime end: meta.Elem(@TypeOf(ptr))) usize { /// Helper for the return type of sliceTo() fn SliceTo(comptime T: type, comptime end: meta.Elem(T)) type { switch (@typeInfo(T)) { - .Optional => |optional_info| { + .optional => |optional_info| { return ?SliceTo(optional_info.child, end); }, - .Pointer => |ptr_info| { + .pointer => |ptr_info| { var new_ptr_info = ptr_info; - new_ptr_info.size = .Slice; + new_ptr_info.size = .slice; switch (ptr_info.size) { - .One => switch (@typeInfo(ptr_info.child)) { - .Array => |array_info| { + .one => switch (@typeInfo(ptr_info.child)) { + .array => |array_info| { new_ptr_info.child = array_info.child; // The return type must only be sentinel terminated if we are guaranteed // to find the value searched for, which is only the case if it matches // the sentinel of the type passed. - if (array_info.sentinel) |sentinel_ptr| { + if (array_info.sentinel_ptr) |sentinel_ptr| { const sentinel = @as(*align(1) const array_info.child, @ptrCast(sentinel_ptr)).*; if (end == sentinel) { - new_ptr_info.sentinel = &end; + new_ptr_info.sentinel_ptr = &end; } else { - new_ptr_info.sentinel = null; + new_ptr_info.sentinel_ptr = null; } } }, else => {}, }, - .Many, .Slice => { + .many, .slice => { // The return type must only be sentinel terminated if we are guaranteed // to find the value searched for, which is only the case if it matches // the sentinel of the type passed. - if (ptr_info.sentinel) |sentinel_ptr| { + if (ptr_info.sentinel_ptr) |sentinel_ptr| { const sentinel = @as(*align(1) const ptr_info.child, @ptrCast(sentinel_ptr)).*; if (end == sentinel) { - new_ptr_info.sentinel = &end; + new_ptr_info.sentinel_ptr = &end; } else { - new_ptr_info.sentinel = null; + new_ptr_info.sentinel_ptr = null; } } }, - .C => { - new_ptr_info.sentinel = &end; + .c => { + new_ptr_info.sentinel_ptr = &end; // C pointers are always allowzero, but we don't want the return type to be. assert(new_ptr_info.is_allowzero); new_ptr_info.is_allowzero = false; }, } - return @Type(.{ .Pointer = new_ptr_info }); + return @Type(.{ .pointer = new_ptr_info }); }, else => {}, } @@ -1571,14 +1571,14 @@ fn SliceTo(comptime T: type, comptime end: meta.Elem(T)) type { /// Pointer properties such as mutability and alignment are preserved. /// C pointers are assumed to be non-null. pub fn sliceTo(ptr: anytype, comptime end: meta.Elem(@TypeOf(ptr))) SliceTo(@TypeOf(ptr), end) { - if (@typeInfo(@TypeOf(ptr)) == .Optional) { + if (@typeInfo(@TypeOf(ptr)) == .optional) { const non_null = ptr orelse return null; return sliceTo(non_null, end); } const Result = SliceTo(@TypeOf(ptr), end); const length = lenSliceTo(ptr, end); - const ptr_info = @typeInfo(Result).Pointer; - if (ptr_info.sentinel) |s_ptr| { + const ptr_info = @typeInfo(Result).pointer; + if (ptr_info.sentinel_ptr) |s_ptr| { const s = @as(*align(1) const ptr_info.child, @ptrCast(s_ptr)).*; return ptr[0..length :s]; } else { @@ -1693,7 +1693,6 @@ pub const failing_allocator = std.mem.Allocator{ .ptr = undefined, .vtable = &.{ var __reload_in_progress__ = std.atomic.Value(bool).init(false); threadlocal var __reload_in_progress__on_current_thread = false; pub fn isProcessReloadInProgressOnAnotherThread() bool { - @fence(.acquire); return __reload_in_progress__.load(.monotonic) and !__reload_in_progress__on_current_thread; } @@ -2305,7 +2304,7 @@ pub fn initArgv(allocator: std.mem.Allocator) !void { // Updates in Zig v0.12 related to Windows cmd line parsing may fix this, // see (here: https://ziglang.org/download/0.12.0/release-notes.html#Windows-Command-Line-Argument-Parsing), // so this may only need to be a temporary workaround. - const cmdline_ptr = std.os.windows.kernel32.GetCommandLineW(); + const cmdline_ptr = bun.windows.GetCommandLineW(); var length: c_int = 0; // As per the documentation: @@ -2323,7 +2322,7 @@ pub fn initArgv(allocator: std.mem.Allocator) !void { }; const argvu16 = argvu16_ptr[0..@intCast(length)]; - const out_argv = try allocator.alloc([:0]u8, @intCast(length)); + const out_argv = try allocator.alloc([:0]const u8, @intCast(length)); var string_builder = StringBuilder{}; for (argvu16) |argraw| { @@ -2559,7 +2558,7 @@ pub const win32 = struct { @memset(std.mem.asBytes(procinfo), 0); const rc = w.kernel32.CreateProcessW( image_pathZ.ptr, - w.kernel32.GetCommandLineW(), + bun.windows.GetCommandLineW(), null, null, 1, @@ -2601,7 +2600,7 @@ pub const FDTag = enum { const fd = toFD(fd_); const T = @TypeOf(fd_); if (comptime Environment.isWindows) { - if (@typeInfo(T) == .Int or @typeInfo(T) == .ComptimeInt) { + if (@typeInfo(T) == .int or @typeInfo(T) == .comptime_int) { switch (fd_) { 0 => return .stdin, 1 => return .stdout, @@ -2672,7 +2671,7 @@ pub fn serializable(input: anytype) @TypeOf(input) { const T = @TypeOf(input); comptime { if (trait.isExternContainer(T)) { - if (@typeInfo(T) == .Union) { + if (@typeInfo(T) == .@"union") { @compileError("Extern unions must be serialized with serializableInto"); } } @@ -3045,12 +3044,12 @@ pub const Dirname = struct { }; pub noinline fn outOfMemory() noreturn { - @setCold(true); + @branchHint(.cold); crash_handler.crashHandler(.out_of_memory, null, @returnAddress()); } pub fn todoPanic(src: std.builtin.SourceLocation, comptime format: string, args: anytype) noreturn { - @setCold(true); + @branchHint(.cold); bun.Analytics.Features.todo_panic = 1; Output.panic("TODO: " ++ format ++ " ({s}:{d})", args ++ .{ src.file, src.line }); } @@ -3086,10 +3085,11 @@ pub inline fn new(comptime T: type, init: T) *T { break :ptr ptr; }; - if (comptime Environment.allow_assert) { - const logAlloc = Output.scoped(.alloc, @hasDecl(T, "logAllocations")); - logAlloc("new({s}) = {*}", .{ meta.typeName(T), ptr }); - } + // TODO:: + // if (comptime Environment.allow_assert) { + // const logAlloc = Output.scoped(.alloc, @hasDecl(T, "logAllocations")); + // logAlloc("new({s}) = {*}", .{ meta.typeName(T), ptr }); + // } return ptr; } @@ -3132,21 +3132,20 @@ pub fn New(comptime T: type) type { /// Reference-counted heap-allocated instance value. /// /// `ref_count` is expected to be defined on `T` with a default value set to `1` -pub fn NewRefCounted(comptime T: type, comptime deinit_fn: ?fn (self: *T) void) type { +pub fn NewRefCounted(comptime T: type, comptime deinit_fn: ?fn (self: *T) void, debug_name: ?[:0]const u8) type { if (!@hasField(T, "ref_count")) { @compileError("Expected a field named \"ref_count\" with a default value of 1 on " ++ @typeName(T)); } for (std.meta.fields(T)) |field| { if (strings.eqlComptime(field.name, "ref_count")) { - if (field.default_value == null) { + if (field.default_value_ptr == null) { @compileError("Expected a field named \"ref_count\" with a default value of 1 on " ++ @typeName(T)); } } } - const output_name: []const u8 = if (@hasDecl(T, "DEBUG_REFCOUNT_NAME")) T.DEBUG_REFCOUNT_NAME else meta.typeBaseName(@typeName(T)); - + const output_name = debug_name orelse meta.typeBaseName(@typeName(T)); const log = Output.scoped(output_name, true); return struct { @@ -3192,21 +3191,20 @@ pub fn NewRefCounted(comptime T: type, comptime deinit_fn: ?fn (self: *T) void) }; } -pub fn NewThreadSafeRefCounted(comptime T: type, comptime deinit_fn: ?fn (self: *T) void) type { +pub fn NewThreadSafeRefCounted(comptime T: type, comptime deinit_fn: ?fn (self: *T) void, debug_name: ?[:0]const u8) type { if (!@hasField(T, "ref_count")) { @compileError("Expected a field named \"ref_count\" with a default value of 1 on " ++ @typeName(T)); } for (std.meta.fields(T)) |field| { if (strings.eqlComptime(field.name, "ref_count")) { - if (field.default_value == null) { + if (field.default_value_ptr == null) { @compileError("Expected a field named \"ref_count\" with a default value of 1 on " ++ @typeName(T)); } } } - const output_name: []const u8 = if (@hasDecl(T, "DEBUG_REFCOUNT_NAME")) T.DEBUG_REFCOUNT_NAME else meta.typeBaseName(@typeName(T)); - + const output_name = debug_name orelse meta.typeBaseName(@typeName(T)); const log = Output.scoped(output_name, true); return struct { @@ -3297,7 +3295,7 @@ const errno_map = errno_map: { }; pub fn errnoToZigErr(err: anytype) anyerror { - var num = if (@typeInfo(@TypeOf(err)) == .Enum) + var num = if (@typeInfo(@TypeOf(err)) == .@"enum") @intFromEnum(err) else err; @@ -3330,13 +3328,13 @@ pub fn iterateDir(dir: std.fs.Dir) DirIterator.Iterator { } fn ReinterpretSliceType(comptime T: type, comptime slice: type) type { - const is_const = @typeInfo(slice).Pointer.is_const; + const is_const = @typeInfo(slice).pointer.is_const; return if (is_const) []const T else []T; } /// Zig has a todo for @ptrCast changing the `.len`. This is the workaround pub fn reinterpretSlice(comptime T: type, slice: anytype) ReinterpretSliceType(T, @TypeOf(slice)) { - const is_const = @typeInfo(@TypeOf(slice)).Pointer.is_const; + const is_const = @typeInfo(@TypeOf(slice)).pointer.is_const; const bytes = std.mem.sliceAsBytes(slice); const new_ptr = @as(if (is_const) [*]const T else [*]T, @ptrCast(@alignCast(bytes.ptr))); return new_ptr[0..@divTrunc(bytes.len, @sizeOf(T))]; @@ -3532,23 +3530,23 @@ pub const handleErrorReturnTrace = crash_handler.handleErrorReturnTrace; noinline fn assertionFailure() noreturn { if (@inComptime()) { @compileError("assertion failure"); + } else { + @branchHint(.cold); + Output.panic("Internal assertion failure", .{}); } - - @setCold(true); - Output.panic("Internal assertion failure", .{}); } noinline fn assertionFailureWithLocation(src: std.builtin.SourceLocation) noreturn { if (@inComptime()) { @compileError("assertion failure"); + } else { + @branchHint(.cold); + Output.panic("Internal assertion failure {s}:{d}:{d}", .{ + src.file, + src.line, + src.column, + }); } - - @setCold(true); - Output.panic("Internal assertion failure {s}:{d}:{d}", .{ - src.file, - src.line, - src.column, - }); } pub fn debugAssert(cheap_value_only_plz: bool) callconv(callconv_inline) void { @@ -3618,14 +3616,14 @@ pub fn getRoughTickCount() timespec { .sec = 0, }; const clocky = struct { - pub var clock_id: i32 = 0; + pub var clock_id: std.c.CLOCK = .REALTIME; pub fn get() void { var res = timespec{}; - _ = std.c.clock_getres(C.CLOCK_MONOTONIC_RAW_APPROX, @ptrCast(&res)); + _ = std.c.clock_getres(.MONOTONIC_RAW_APPROX, @ptrCast(&res)); if (res.ms() <= 1) { - clock_id = C.CLOCK_MONOTONIC_RAW_APPROX; + clock_id = .MONOTONIC_RAW_APPROX; } else { - clock_id = C.CLOCK_MONOTONIC_RAW; + clock_id = .MONOTONIC_RAW; } } @@ -3644,14 +3642,14 @@ pub fn getRoughTickCount() timespec { .sec = 0, }; const clocky = struct { - pub var clock_id: i32 = 0; + pub var clock_id: std.os.linux.CLOCK = .REALTIME; pub fn get() void { var res = timespec{}; - _ = std.os.linux.clock_getres(std.os.linux.CLOCK.MONOTONIC_COARSE, @ptrCast(&res)); + _ = std.os.linux.clock_getres(.MONOTONIC_COARSE, @ptrCast(&res)); if (res.ms() <= 1) { - clock_id = std.os.linux.CLOCK.MONOTONIC_COARSE; + clock_id = .MONOTONIC_COARSE; } else { - clock_id = std.os.linux.CLOCK.MONOTONIC_RAW; + clock_id = .MONOTONIC_RAW; } } @@ -3812,7 +3810,7 @@ pub const UUID = @import("./bun.js/uuid.zig"); /// call a first element '0' or '1' which makes integer type ambiguous. pub fn OrdinalT(comptime Int: type) type { return enum(Int) { - invalid = switch (@typeInfo(Int).Int.signedness) { + invalid = switch (@typeInfo(Int).int.signedness) { .unsigned => std.math.maxInt(Int), .signed => -1, }, @@ -3880,7 +3878,7 @@ pub const bake = @import("bake/bake.zig"); /// like std.enums.tagName, except it doesn't lose the sentinel value. pub fn tagName(comptime Enum: type, value: Enum) ?[:0]const u8 { - return inline for (@typeInfo(Enum).Enum.fields) |f| { + return inline for (@typeInfo(Enum).@"enum".fields) |f| { if (@intFromEnum(value) == f.value) break f.name; } else null; } @@ -4020,7 +4018,7 @@ pub fn GenericIndex(backing_int: type, uid: anytype) type { return @enumFromInt(int); } - /// Prefer this over @intFromEnum because of type confusion with `.Optional` + /// Prefer this over @intFromEnum because of type confusion with `.optional` pub inline fn get(i: @This()) backing_int { bun.assert(@intFromEnum(i) != null_value); // memory corruption return @intFromEnum(i); @@ -4121,7 +4119,7 @@ pub fn once(comptime f: anytype) Once(f) { /// It is undefined behavior if `f` re-enters the same Once instance. pub fn Once(comptime f: anytype) type { return struct { - const Return = @typeInfo(@TypeOf(f)).Fn.return_type.?; + const Return = @typeInfo(@TypeOf(f)).@"fn".return_type.?; done: bool = false, payload: Return = undefined, @@ -4139,7 +4137,7 @@ pub fn Once(comptime f: anytype) type { } fn callSlow(self: *@This(), args: std.meta.ArgsTuple(@TypeOf(f))) Return { - @setCold(true); + @branchHint(.cold); self.mutex.lock(); defer self.mutex.unlock(); @@ -4158,7 +4156,7 @@ pub fn Once(comptime f: anytype) type { /// `val` must be a pointer to an optional type (e.g. `*?T`) /// /// This function takes the value out of the optional, replacing it with null, and returns the value. -pub inline fn take(val: anytype) ?bun.meta.OptionalChild(@TypeOf(val)) { +pub inline fn take(val: anytype) ?@typeInfo(@typeInfo(@TypeOf(val)).pointer.child).optional.child { if (val.*) |v| { val.* = null; return v; @@ -4182,11 +4180,11 @@ pub inline fn wrappingNegation(val: anytype) @TypeOf(val) { fn assertNoPointers(T: type) void { switch (@typeInfo(T)) { - .Pointer => @compileError("no pointers!"), - inline .Struct, .Union => |s| for (s.fields) |field| { + .pointer => @compileError("no pointers!"), + inline .@"struct", .@"union" => |s| for (s.fields) |field| { assertNoPointers(field.type); }, - .Array => |a| assertNoPointers(a.child), + .array => |a| assertNoPointers(a.child), else => {}, } } @@ -4197,7 +4195,7 @@ pub inline fn writeAnyToHasher(hasher: anytype, thing: anytype) void { } pub inline fn isComptimeKnown(x: anytype) bool { - return comptime @typeInfo(@TypeOf(.{x})).Struct.fields[0].is_comptime; + return comptime @typeInfo(@TypeOf(.{x})).@"struct".fields[0].is_comptime; } pub inline fn itemOrNull(comptime T: type, slice: []const T, index: usize) ?T { @@ -4314,7 +4312,7 @@ pub const StackCheck = struct { // Workaround for lack of branch hints. pub noinline fn throwStackOverflow() StackOverflow!void { - @setCold(true); + @branchHint(.cold); return error.StackOverflow; } const StackOverflow = error{StackOverflow}; diff --git a/src/bun_js.zig b/src/bun_js.zig index 857770971b62e0..904c16d0388a3e 100644 --- a/src/bun_js.zig +++ b/src/bun_js.zig @@ -155,7 +155,7 @@ pub const Run = struct { } fn bootBunShell(ctx: Command.Context, entry_path: []const u8) !bun.shell.ExitCode { - @setCold(true); + @branchHint(.cold); // this is a hack: make dummy bundler so we can use its `.runEnvLoader()` function to populate environment variables probably should split out the functionality var bundle = try bun.Transpiler.init( @@ -482,7 +482,7 @@ pub export fn Bun__onRejectEntryPointResult(global: *JSC.JSGlobalObject, callfra } noinline fn dumpBuildError(vm: *JSC.VirtualMachine) void { - @setCold(true); + @branchHint(.cold); Output.flush(); @@ -498,7 +498,7 @@ noinline fn dumpBuildError(vm: *JSC.VirtualMachine) void { } pub noinline fn failWithBuildError(vm: *JSC.VirtualMachine) noreturn { - @setCold(true); + @branchHint(.cold); dumpBuildError(vm); Global.exit(1); } diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 066843e3231266..c1003b858741b8 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -1719,7 +1719,7 @@ pub const BundleV2 = struct { ref_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(1), started_at_ns: u64 = 0, - pub usingnamespace bun.NewThreadSafeRefCounted(JSBundleCompletionTask, @This().deinit); + pub usingnamespace bun.NewThreadSafeRefCounted(JSBundleCompletionTask, _deinit, null); pub fn configureBundler( completion: *JSBundleCompletionTask, @@ -1797,7 +1797,7 @@ pub const BundleV2 = struct { pub const TaskCompletion = bun.JSC.AnyTask.New(JSBundleCompletionTask, onComplete); - pub fn deinit(this: *JSBundleCompletionTask) void { + fn _deinit(this: *JSBundleCompletionTask) void { this.result.deinit(); this.log.deinit(); this.poll_ref.disable(); @@ -14790,7 +14790,7 @@ pub const LinkerContext = struct { Part.SymbolUseMap, c.allocator, .{ - .{ wrapper_ref, .{ .count_estimate = 1 } }, + .{ wrapper_ref, Symbol.Use{ .count_estimate = 1 } }, }, ) catch unreachable, .declared_symbols = js_ast.DeclaredSymbol.List.fromSlice( @@ -14847,10 +14847,10 @@ pub const LinkerContext = struct { const part_index = c.graph.addPartToFile( source_index, .{ - .symbol_uses = bun.from( + .symbol_uses = bun.fromMapLike( Part.SymbolUseMap, c.allocator, - .{ + &.{ .{ wrapper_ref, .{ .count_estimate = 1 } }, }, ) catch unreachable, diff --git a/src/c.zig b/src/c.zig index 98414cbfbde47f..d21250a90529ec 100644 --- a/src/c.zig +++ b/src/c.zig @@ -88,9 +88,9 @@ pub fn lstat_absolute(path: [:0]const u8) !Stat { else => Kind.unknown, }, }, - .atime = @as(i128, atime.tv_sec) * std.time.ns_per_s + atime.tv_nsec, - .mtime = @as(i128, mtime.tv_sec) * std.time.ns_per_s + mtime.tv_nsec, - .ctime = @as(i128, ctime.tv_sec) * std.time.ns_per_s + ctime.tv_nsec, + .atime = @as(i128, atime.sec) * std.time.ns_per_s + atime.nsec, + .mtime = @as(i128, mtime.sec) * std.time.ns_per_s + mtime.nsec, + .ctime = @as(i128, ctime.sec) * std.time.ns_per_s + ctime.nsec, }; } @@ -417,7 +417,7 @@ pub fn _dlsym(handle: ?*anyopaque, name: [:0]const u8) ?*anyopaque { } pub fn dlsymWithHandle(comptime Type: type, comptime name: [:0]const u8, comptime handle_getter: fn () ?*anyopaque) ?Type { - if (comptime @typeInfo(Type) != .Pointer) { + if (comptime @typeInfo(Type) != .pointer) { @compileError("dlsym must be a pointer type (e.g. ?const *fn()). Received " ++ @typeName(Type) ++ "."); } @@ -480,7 +480,7 @@ pub extern fn memmove(dest: [*]u8, src: [*]const u8, n: usize) void; // https://man7.org/linux/man-pages/man3/fmod.3.html pub extern fn fmod(f64, f64) f64; -pub fn dlopen(filename: [:0]const u8, flags: i32) ?*anyopaque { +pub fn dlopen(filename: [:0]const u8, flags: C.RTLD) ?*anyopaque { if (comptime Environment.isWindows) { return bun.windows.LoadLibraryA(filename); } diff --git a/src/cli.zig b/src/cli.zig index 0f1bad446ed8ee..54a05dad0ced89 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -109,7 +109,7 @@ const ColonListType = @import("./cli/colon_list_type.zig").ColonListType; pub const LoaderColonList = ColonListType(Api.Loader, Arguments.loader_resolver); pub const DefineColonList = ColonListType(string, Arguments.noop_resolver); fn invalidTarget(diag: *clap.Diagnostic, _target: []const u8) noreturn { - @setCold(true); + @branchHint(.cold); diag.name.long = "target"; diag.arg = _target; diag.report(Output.errorWriter(), error.InvalidTarget) catch {}; @@ -1250,7 +1250,7 @@ const AutoCommand = struct { pub const HelpCommand = struct { pub fn exec(allocator: std.mem.Allocator) !void { - @setCold(true); + @branchHint(.cold); execWithReason(allocator, .explicit); } @@ -1344,7 +1344,7 @@ pub const HelpCommand = struct { ; pub fn printWithReason(comptime reason: Reason, show_all_flags: bool) void { - var rand_state = std.rand.DefaultPrng.init(@as(u64, @intCast(@max(std.time.milliTimestamp(), 0)))); + var rand_state = std.Random.DefaultPrng.init(@as(u64, @intCast(@max(std.time.milliTimestamp(), 0)))); const rand = rand_state.random(); const package_x_i = rand.uintAtMost(usize, packages_to_x_filler.len - 1); @@ -1388,7 +1388,7 @@ pub const HelpCommand = struct { } pub fn execWithReason(_: std.mem.Allocator, comptime reason: Reason) void { - @setCold(true); + @branchHint(.cold); printWithReason(reason, false); if (reason == .invalid_command) { @@ -1400,7 +1400,7 @@ pub const HelpCommand = struct { pub const ReservedCommand = struct { pub fn exec(_: std.mem.Allocator) !void { - @setCold(true); + @branchHint(.cold); const command_name = for (bun.argv[1..]) |arg| { if (arg.len > 1 and arg[0] == '-') continue; break arg; @@ -2651,13 +2651,13 @@ pub const Command = struct { }; pub fn printVersionAndExit() noreturn { - @setCold(true); + @branchHint(.cold); Output.writer().writeAll(Global.package_json_version ++ "\n") catch {}; Global.exit(0); } pub fn printRevisionAndExit() noreturn { - @setCold(true); + @branchHint(.cold); Output.writer().writeAll(Global.package_json_version_with_revision ++ "\n") catch {}; Global.exit(0); } diff --git a/src/cli/bunx_command.zig b/src/cli/bunx_command.zig index 3256f302edcbda..e5210fb97d67f9 100644 --- a/src/cli/bunx_command.zig +++ b/src/cli/bunx_command.zig @@ -252,7 +252,7 @@ pub const BunxCommand = struct { } } else { const stat = target_package_json.stat().unwrap() catch break :is_stale true; - break :is_stale std.time.timestamp() - stat.mtime().tv_sec > seconds_cache_valid; + break :is_stale std.time.timestamp() - stat.mtime().sec > seconds_cache_valid; } }; @@ -568,7 +568,7 @@ pub const BunxCommand = struct { if (rc != 0) { break :is_stale true; } - break :is_stale std.time.timestamp() - stat.mtime().tv_sec > seconds_cache_valid; + break :is_stale std.time.timestamp() - stat.mtime().sec > seconds_cache_valid; } }; diff --git a/src/cli/create_command.zig b/src/cli/create_command.zig index bc85f9533113b0..07656cefcfa870 100644 --- a/src/cli/create_command.zig +++ b/src/cli/create_command.zig @@ -99,7 +99,7 @@ fn execTask(allocator: std.mem.Allocator, task_: string, cwd: string, _: string, const task = std.mem.trim(u8, task_, " \n\r\t"); if (task.len == 0) return; - var splitter = std.mem.split(u8, task, " "); + var splitter = std.mem.splitScalar(u8, task, ' '); var count: usize = 0; while (splitter.next() != null) { count += 1; @@ -117,7 +117,7 @@ fn execTask(allocator: std.mem.Allocator, task_: string, cwd: string, _: string, { var i: usize = npm_args; - splitter = std.mem.split(u8, task, " "); + splitter = std.mem.splitScalar(u8, task, ' '); while (splitter.next()) |split| { argv[i] = split; i += 1; @@ -237,7 +237,7 @@ const BUN_CREATE_DIR = ".bun-create"; var home_dir_buf: bun.PathBuffer = undefined; pub const CreateCommand = struct { pub fn exec(ctx: Command.Context, example_tag: Example.Tag, template: []const u8) !void { - @setCold(true); + @branchHint(.cold); Global.configureAllocator(.{ .long_running = false }); HTTP.HTTPThread.init(&.{}); @@ -1367,7 +1367,7 @@ pub const CreateCommand = struct { for (items) |task| { if (task.asString(ctx.allocator)) |task_entry| { // if (needs.bun_bun_for_nextjs or bun_bun_for_react_scripts) { - // var iter = std.mem.split(u8, task_entry, " "); + // var iter = std.mem.splitScalar(u8, task_entry, ' '); // var last_was_bun = false; // while (iter.next()) |current| { // if (strings.eqlComptime(current, "bun")) { @@ -2306,7 +2306,6 @@ const GitHandler = struct { else run(destination, PATH, false) catch false; - @fence(.acquire); success.store( if (outcome) 1 @@ -2318,8 +2317,6 @@ const GitHandler = struct { } pub fn wait() bool { - @fence(.release); - while (success.load(.acquire) == 0) { Futex.wait(&success, 0, 1000) catch continue; } diff --git a/src/cli/filter_run.zig b/src/cli/filter_run.zig index ba1a00ed90b545..af4071132f69c9 100644 --- a/src/cli/filter_run.zig +++ b/src/cli/filter_run.zig @@ -409,12 +409,7 @@ const AbortHandler = struct { .mask = std.posix.empty_sigset, .flags = std.posix.SA.SIGINFO | std.posix.SA.RESTART | std.posix.SA.RESETHAND, }; - // if we can't set the handler, we just ignore it - std.posix.sigaction(std.posix.SIG.INT, &action, null) catch |err| { - if (Environment.isDebug) { - Output.warn("Failed to set abort handler: {s}\n", .{@errorName(err)}); - } - }; + std.posix.sigaction(std.posix.SIG.INT, &action, null); } else { const res = bun.windows.SetConsoleCtrlHandler(windowsCtrlHandler, std.os.windows.TRUE); if (res == 0) { diff --git a/src/cli/init_command.zig b/src/cli/init_command.zig index a4676aa6506a0c..9506ef0b55968a 100644 --- a/src/cli/init_command.zig +++ b/src/cli/init_command.zig @@ -68,7 +68,7 @@ pub const InitCommand = struct { /// Create a new asset file, overriding anything that already exists. Known /// assets will have their contents pre-populated; otherwise the file will be empty. fn create(comptime asset_name: []const u8, args: anytype) !void { - const is_template = comptime (@TypeOf(args) != @TypeOf(null)) and @typeInfo(@TypeOf(args)).Struct.fields.len > 0; + const is_template = comptime (@TypeOf(args) != @TypeOf(null)) and @typeInfo(@TypeOf(args)).@"struct".fields.len > 0; return createFull(asset_name, asset_name, "", is_template, args); } diff --git a/src/cli/install_completions_command.zig b/src/cli/install_completions_command.zig index ed9e5d02ace5cf..28109a61319b52 100644 --- a/src/cli/install_completions_command.zig +++ b/src/cli/install_completions_command.zig @@ -303,7 +303,7 @@ pub const InstallCompletionsCommand = struct { }, .zsh => { if (bun.getenvZ("fpath")) |fpath| { - var splitter = std.mem.split(u8, fpath, " "); + var splitter = std.mem.splitScalar(u8, fpath, ' '); while (splitter.next()) |dir| { completions_dir = dir; diff --git a/src/cli/outdated_command.zig b/src/cli/outdated_command.zig index af827fcbcbfba0..c15fc0ca4f4f74 100644 --- a/src/cli/outdated_command.zig +++ b/src/cli/outdated_command.zig @@ -427,14 +427,12 @@ pub const OutdatedCommand = struct { table.printColumnNames(); for (workspace_pkg_ids) |workspace_pkg_id| { - inline for ( - .{ - Behavior.prod, - Behavior.dev, - Behavior.peer, - Behavior.optional, - }, - ) |group_behavior| { + inline for ([_]Behavior{ + .{ .prod = true }, + .{ .dev = true }, + .{ .peer = true }, + .{ .optional = true }, + }) |group_behavior| { for (outdated_ids.items) |ids| { if (workspace_pkg_id != ids.workspace_pkg_id) continue; const package_id = ids.package_id; diff --git a/src/cli/package_manager_command.zig b/src/cli/package_manager_command.zig index 87866a4713b9a2..77a833a084cfa0 100644 --- a/src/cli/package_manager_command.zig +++ b/src/cli/package_manager_command.zig @@ -59,7 +59,7 @@ pub const PackageManagerCommand = struct { } pub fn printHash(ctx: Command.Context, file: File) !void { - @setCold(true); + @branchHint(.cold); const cli = try PackageManager.CommandLineArguments.parse(ctx.allocator, .pm); var pm, const cwd = try PackageManager.init(ctx, cli, PackageManager.Subcommand.pm); diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index f82ae5b5351a47..d7ae1c799c9ef4 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -757,7 +757,7 @@ pub const RunCommand = struct { const dir_slice = target_path_buffer[0 .. prefix.len + len + dir_name.len]; if (Environment.isDebug) { - const dir_slice_u8 = std.unicode.utf16leToUtf8Alloc(bun.default_allocator, dir_slice) catch @panic("oom"); + const dir_slice_u8 = std.unicode.utf16LeToUtf8Alloc(bun.default_allocator, dir_slice) catch @panic("oom"); defer bun.default_allocator.free(dir_slice_u8); std.fs.deleteTreeAbsolute(dir_slice_u8) catch {}; std.fs.makeDirAbsolute(dir_slice_u8) catch @panic("huh?"); @@ -1569,7 +1569,7 @@ pub const RunCommand = struct { const PATH = this_transpiler.env.get("PATH") orelse ""; var path_for_which = PATH; - if (comptime bin_dirs_only) { + if (bin_dirs_only) { if (ORIGINAL_PATH.len < PATH.len) { path_for_which = PATH[0 .. PATH.len - (ORIGINAL_PATH.len + 1)]; } else { @@ -1598,7 +1598,7 @@ pub const RunCommand = struct { return true; } - if (comptime log_errors) { + if (log_errors) { const ext = std.fs.path.extension(target_name); const default_loader = options.defaultLoaders.get(ext); if (default_loader != null and default_loader.?.isJavaScriptLikeOrJSON() or target_name.len > 0 and (target_name[0] == '.' or target_name[0] == '/' or std.fs.path.isAbsolute(target_name))) { diff --git a/src/cli/test_command.zig b/src/cli/test_command.zig index f9d036d2f2c23b..244083f4a8d261 100644 --- a/src/cli/test_command.zig +++ b/src/cli/test_command.zig @@ -83,7 +83,7 @@ fn escapeXml(str: string, writer: anytype) !void { try writer.writeAll(str[last..]); } } -fn fmtStatusTextLine(comptime status: @Type(.EnumLiteral), comptime emoji_or_color: bool) []const u8 { +fn fmtStatusTextLine(comptime status: @Type(.enum_literal), comptime emoji_or_color: bool) []const u8 { comptime { // emoji and color might be split into two different options in the future // some terminals support color, but not emoji. @@ -107,7 +107,7 @@ fn fmtStatusTextLine(comptime status: @Type(.EnumLiteral), comptime emoji_or_col } } -fn writeTestStatusLine(comptime status: @Type(.EnumLiteral), writer: anytype) void { +fn writeTestStatusLine(comptime status: @Type(.enum_literal), writer: anytype) void { if (Output.enable_ansi_colors_stderr) writer.print(fmtStatusTextLine(status, true), .{}) catch unreachable else @@ -271,9 +271,7 @@ pub const JunitReporter = struct { \\ ); - try this.contents.appendSlice(bun.default_allocator, - \\\n"); } diff --git a/src/cli/upgrade_command.zig b/src/cli/upgrade_command.zig index 9c656a24c29e93..dc9204a14d0cbb 100644 --- a/src/cli/upgrade_command.zig +++ b/src/cli/upgrade_command.zig @@ -400,7 +400,7 @@ pub const UpgradeCommand = struct { }; pub fn exec(ctx: Command.Context) !void { - @setCold(true); + @branchHint(.cold); const args = bun.argv; if (args.len > 2) { diff --git a/src/codegen/bindgen.ts b/src/codegen/bindgen.ts index b3d8be92c67098..200dcf0caa6070 100644 --- a/src/codegen/bindgen.ts +++ b/src/codegen/bindgen.ts @@ -1485,7 +1485,7 @@ zigInternal.line("};"); zigInternal.line(); zigInternal.line("comptime {"); zigInternal.line(` if (bun.Environment.export_cpp_apis) {`); -zigInternal.line(" for (@typeInfo(binding_internals).Struct.decls) |decl| {"); +zigInternal.line(" for (@typeInfo(binding_internals).@\"struct\".decls) |decl| {"); zigInternal.line(" _ = &@field(binding_internals, decl.name);"); zigInternal.line(" }"); zigInternal.line(" }"); diff --git a/src/codegen/generate-classes.ts b/src/codegen/generate-classes.ts index 738ea2330c408b..8d8f01be847000 100644 --- a/src/codegen/generate-classes.ts +++ b/src/codegen/generate-classes.ts @@ -2064,7 +2064,7 @@ const JavaScriptCoreBindings = struct { ` }; comptime { -${[...exports.values()].map(name => ` @export(JavaScriptCoreBindings.${name}, .{ .name = "${name}" });`).join("\n")} +${[...exports.values()].map(name => ` @export(&JavaScriptCoreBindings.${name}, .{ .name = "${name}" });`).join("\n")} }` ); } diff --git a/src/codegen/generate-js2native.ts b/src/codegen/generate-js2native.ts index 7034c0b9852b6a..8b2f46bbd0527b 100644 --- a/src/codegen/generate-js2native.ts +++ b/src/codegen/generate-js2native.ts @@ -236,7 +236,7 @@ export function getJS2NativeZig(gs2NativeZigPath: string) { .flatMap(x => { const base = basename(x.filename.replace(/\.bind\.ts$/, "")); return [ - ` @export(bun.gen.${base}.create${cap(x.symbol)}Callback, .{ .name = ${JSON.stringify( + ` @export(&bun.gen.${base}.create${cap(x.symbol)}Callback, .{ .name = ${JSON.stringify( `js2native_bindgen_${base}_${x.symbol}`, )} });`, ]; diff --git a/src/copy_file.zig b/src/copy_file.zig index 220aeb773395e3..d254711b49badc 100644 --- a/src/copy_file.zig +++ b/src/copy_file.zig @@ -60,7 +60,7 @@ const CopyFileReturnType = bun.sys.Maybe(void); pub fn copyFileWithState(in: InputType, out: InputType, copy_file_state: *CopyFileState) CopyFileReturnType { if (comptime Environment.isMac) { - const rc = posix.system.fcopyfile(in, out, null, posix.system.COPYFILE_DATA); + const rc = posix.system.fcopyfile(in, out, null, posix.system.COPYFILE{ .DATA = true }); switch (posix.errno(rc)) { .SUCCESS => return CopyFileReturnType.success, diff --git a/src/crash_handler.zig b/src/crash_handler.zig index 6a1ae522b207bc..0692f384f2754d 100644 --- a/src/crash_handler.zig +++ b/src/crash_handler.zig @@ -178,7 +178,7 @@ pub fn crashHandler( error_return_trace: ?*std.builtin.StackTrace, begin_addr: ?usize, ) noreturn { - @setCold(true); + @branchHint(.cold); if (bun.Environment.isDebug) bun.Output.disableScopedDebugWriter(); @@ -275,11 +275,11 @@ pub fn crashHandler( } else switch (bun.Environment.os) { .windows => { var name: std.os.windows.PWSTR = undefined; - const result = bun.windows.GetThreadDescription(std.os.windows.kernel32.GetCurrentThread(), &name); + const result = bun.windows.GetThreadDescription(bun.windows.GetCurrentThread(), &name); if (std.os.windows.HRESULT_CODE(result) == .SUCCESS and name[0] != 0) { writer.print("({})", .{bun.fmt.utf16(bun.span(name))}) catch std.posix.abort(); } else { - writer.print("(thread {d})", .{std.os.windows.kernel32.GetCurrentThreadId()}) catch std.posix.abort(); + writer.print("(thread {d})", .{bun.windows.GetCurrentThreadId()}) catch std.posix.abort(); } }, .mac, .linux => {}, @@ -706,7 +706,7 @@ pub fn handleRootError(err: anyerror, error_return_trace: ?*std.builtin.StackTra } pub fn panicImpl(msg: []const u8, error_return_trace: ?*std.builtin.StackTrace, begin_addr: ?usize) noreturn { - @setCold(true); + @branchHint(.cold); crashHandler( if (bun.strings.eqlComptime(msg, "reached unreachable code")) .{ .@"unreachable" = {} } @@ -796,10 +796,10 @@ pub fn updatePosixSegfaultHandler(act: ?*std.posix.Sigaction) !void { } } - try std.posix.sigaction(std.posix.SIG.SEGV, act, null); - try std.posix.sigaction(std.posix.SIG.ILL, act, null); - try std.posix.sigaction(std.posix.SIG.BUS, act, null); - try std.posix.sigaction(std.posix.SIG.FPE, act, null); + std.posix.sigaction(std.posix.SIG.SEGV, act, null); + std.posix.sigaction(std.posix.SIG.ILL, act, null); + std.posix.sigaction(std.posix.SIG.BUS, act, null); + std.posix.sigaction(std.posix.SIG.FPE, act, null); } var windows_segfault_handle: ?windows.HANDLE = null; @@ -1146,19 +1146,19 @@ const StackLine = struct { fn callback(info: *std.posix.dl_phdr_info, _: usize, context: *CtxTy) !void { defer context.i += 1; - if (context.address < info.dlpi_addr) return; - const phdrs = info.dlpi_phdr[0..info.dlpi_phnum]; + if (context.address < info.addr) return; + const phdrs = info.phdr[0..info.phnum]; for (phdrs) |*phdr| { if (phdr.p_type != std.elf.PT_LOAD) continue; // Overflowing addition is used to handle the case of VSDOs // having a p_vaddr = 0xffffffffff700000 - const seg_start = info.dlpi_addr +% phdr.p_vaddr; + const seg_start = info.addr +% phdr.p_vaddr; const seg_end = seg_start + phdr.p_memsz; if (context.address >= seg_start and context.address < seg_end) { - // const name = bun.sliceTo(info.dlpi_name, 0) orelse ""; + // const name = bun.sliceTo(info.name, 0) orelse ""; context.result = .{ - .address = @intCast(context.address - info.dlpi_addr), + .address = @intCast(context.address - info.addr), .object = null, }; return error.Found; @@ -1470,7 +1470,7 @@ fn crash() noreturn { std.posix.SIG.HUP, std.posix.SIG.TERM, }) |sig| { - std.posix.sigaction(sig, &sigact, null) catch {}; + std.posix.sigaction(sig, &sigact, null); } @trap(); @@ -1481,7 +1481,7 @@ fn crash() noreturn { pub var verbose_error_trace = false; noinline fn coldHandleErrorReturnTrace(err_int_workaround_for_zig_ccall_bug: std.meta.Int(.unsigned, @bitSizeOf(anyerror)), trace: *std.builtin.StackTrace, comptime is_root: bool) void { - @setCold(true); + @branchHint(.cold); const err = @errorFromInt(err_int_workaround_for_zig_ccall_bug); // The format of the panic trace is slightly different in debug @@ -1582,9 +1582,7 @@ pub fn dumpStackTrace(trace: std.builtin.StackTrace) void { stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return; break :attempt_dump; }; - var arena = bun.ArenaAllocator.init(bun.default_allocator); - defer arena.deinit(); - debug.writeStackTrace(trace, stderr, arena.allocator(), debug_info, std.io.tty.detectConfig(std.io.getStdErr())) catch |err| { + debug.writeStackTrace(trace, stderr, debug_info, std.io.tty.detectConfig(std.io.getStdErr())) catch |err| { stderr.print("Unable to dump stack trace: {s}\nFallback trace:\n", .{@errorName(err)}) catch return; break :attempt_dump; }; diff --git a/src/css/css_parser.zig b/src/css/css_parser.zig index 48a52dd51dd8a9..17a20262d6879c 100644 --- a/src/css/css_parser.zig +++ b/src/css/css_parser.zig @@ -243,6 +243,7 @@ pub const Location = css_rules.Location; pub const Error = Err(ParserError); pub fn Result(comptime T: type) type { + @setEvalBranchQuota(1_000_000); return Maybe(T, ParseError(ParserError)); } @@ -271,11 +272,11 @@ pub fn DefineListShorthand(comptime T: type) type { return struct {}; } -pub fn DefineShorthand(comptime T: type, comptime property_name: PropertyIdTag) type { +pub fn DefineShorthand(comptime T: type, comptime property_name: PropertyIdTag, comptime PropertyFieldMap: anytype) type { _ = property_name; // autofix // TODO: validate map, make sure each field is set // make sure each field is same index as in T - _ = T.PropertyFieldMap; + _ = PropertyFieldMap; return struct { /// Returns a shorthand from the longhand properties defined in the given declaration block. @@ -527,9 +528,9 @@ pub fn DefineSizeShorthand(comptime T: type, comptime V: type) type { pub fn DeriveParse(comptime T: type) type { const tyinfo = @typeInfo(T); - const is_union_enum = tyinfo == .Union; - const enum_type = if (comptime is_union_enum) @typeInfo(tyinfo.Union.tag_type.?) else tyinfo; - const enum_actual_type = if (comptime is_union_enum) tyinfo.Union.tag_type.? else T; + const is_union_enum = tyinfo == .@"union"; + const enum_type = if (comptime is_union_enum) @typeInfo(tyinfo.@"union".tag_type.?) else tyinfo; + const enum_actual_type = if (comptime is_union_enum) tyinfo.@"union".tag_type.? else T; const Map = bun.ComptimeEnumMap(enum_actual_type); @@ -541,7 +542,7 @@ pub fn DeriveParse(comptime T: type) type { var first_payload_index: ?usize = null; var payload_count: usize = 0; var void_count: usize = 0; - for (tyinfo.Union.fields, 0..) |field, i| { + for (tyinfo.@"union".fields, 0..) |field, i| { if (field.type == void) { void_count += 1; if (first_void_index == null) first_void_index = i; @@ -619,7 +620,7 @@ pub fn DeriveParse(comptime T: type) type { ) Result(T) { const last_payload_index = first_payload_index + payload_count - 1; if (comptime maybe_first_void_index == null) { - inline for (tyinfo.Union.fields[first_payload_index .. first_payload_index + payload_count], first_payload_index..) |field, i| { + inline for (tyinfo.@"union".fields[first_payload_index .. first_payload_index + payload_count], first_payload_index..) |field, i| { if (comptime (i == last_payload_index)) { return .{ .result = switch (generic.parseFor(field.type)(input)) { .result => |v| @unionInit(T, field.name, v), @@ -637,7 +638,7 @@ pub fn DeriveParse(comptime T: type) type { const void_fields = bun.meta.EnumFields(T)[first_void_index .. first_void_index + void_count]; if (comptime void_count == 1) { - const void_field = enum_type.Enum.fields[first_void_index]; + const void_field = enum_type.@"enum".fields[first_void_index]; // The field is declared before the payload fields. // So try to parse an ident matching the name of the field, then fallthrough // to parsing the payload fields. @@ -647,7 +648,7 @@ pub fn DeriveParse(comptime T: type) type { return .{ .result = @enumFromInt(void_field.value) }; } - inline for (tyinfo.Union.fields[first_payload_index .. first_payload_index + payload_count], first_payload_index..) |field, i| { + inline for (tyinfo.@"union".fields[first_payload_index .. first_payload_index + payload_count], first_payload_index..) |field, i| { if (comptime (i == last_payload_index and last_payload_index > first_void_index)) { return .{ .result = switch (generic.parseFor(field.type)(input)) { .result => |v| @unionInit(T, field.name, v), @@ -659,7 +660,7 @@ pub fn DeriveParse(comptime T: type) type { } } } else { - inline for (tyinfo.Union.fields[first_payload_index .. first_payload_index + payload_count], first_payload_index..) |field, i| { + inline for (tyinfo.@"union".fields[first_payload_index .. first_payload_index + payload_count], first_payload_index..) |field, i| { if (comptime (i == last_payload_index and last_payload_index > first_void_index)) { return .{ .result = switch (generic.parseFor(field.type)(input)) { .result => |v| @unionInit(T, field.name, v), @@ -692,7 +693,7 @@ pub fn DeriveParse(comptime T: type) type { input.reset(&state); } - inline for (tyinfo.Union.fields[first_payload_index .. first_payload_index + payload_count], first_payload_index..) |field, i| { + inline for (tyinfo.@"union".fields[first_payload_index .. first_payload_index + payload_count], first_payload_index..) |field, i| { if (comptime (i == last_payload_index and last_payload_index > first_void_index)) { return .{ .result = switch (generic.parseFor(field.type)(input)) { .result => |v| @unionInit(T, field.name, v), @@ -704,7 +705,7 @@ pub fn DeriveParse(comptime T: type) type { } } } else if (comptime first_void_index > first_payload_index) { - inline for (tyinfo.Union.fields[first_payload_index .. first_payload_index + payload_count], first_payload_index..) |field, i| { + inline for (tyinfo.@"union".fields[first_payload_index .. first_payload_index + payload_count], first_payload_index..) |field, i| { if (comptime (i == last_payload_index and last_payload_index > first_void_index)) { return .{ .result = switch (generic.parseFor(field.type)(input)) { .result => |v| @unionInit(T, field.name, v), @@ -742,7 +743,7 @@ pub fn DeriveParse(comptime T: type) type { // comptime payload_count: usize, // ) Result(T) { // const last_payload_index = first_payload_index + payload_count - 1; - // inline for (tyinfo.Union.fields[first_payload_index..], first_payload_index..) |field, i| { + // inline for (tyinfo.@"union".fields[first_payload_index..], first_payload_index..) |field, i| { // if (comptime (i == last_payload_index and last_payload_index > first_void_index)) { // return generic.parseFor(field.type)(input); // } @@ -773,24 +774,24 @@ pub fn DeriveParse(comptime T: type) type { pub fn DeriveToCss(comptime T: type) type { const tyinfo = @typeInfo(T); const enum_fields = bun.meta.EnumFields(T); - const is_enum_or_union_enum = tyinfo == .Union or tyinfo == .Enum; + const is_enum_or_union_enum = tyinfo == .@"union" or tyinfo == .@"enum"; return struct { pub fn toCss(this: *const T, comptime W: type, dest: *Printer(W)) PrintErr!void { if (comptime is_enum_or_union_enum) { inline for (std.meta.fields(T), 0..) |field, i| { if (@intFromEnum(this.*) == enum_fields[i].value) { - if (comptime tyinfo == .Enum or field.type == void) { + if (comptime tyinfo == .@"enum" or field.type == void) { return dest.writeStr(enum_fields[i].name); } else if (comptime generic.hasToCss(field.type)) { return generic.toCss(field.type, &@field(this, field.name), W, dest); - } else if (@hasDecl(field.type, "__generateToCss") and @typeInfo(field.type) == .Struct) { + } else if (@hasDecl(field.type, "__generateToCss") and @typeInfo(field.type) == .@"struct") { const variant_fields = std.meta.fields(field.type); if (variant_fields.len > 1) { const last = variant_fields.len - 1; inline for (variant_fields, 0..) |variant_field, j| { // Unwrap it from the optional - if (@typeInfo(variant_field.type) == .Optional) { + if (@typeInfo(variant_field.type) == .optional) { if (@field(@field(this, field.name), variant_field.name)) |*value| { try value.toCss(W, dest); } @@ -899,10 +900,7 @@ pub fn DefineEnumProperty(comptime T: type) type { }; } -pub fn DeriveValueType(comptime T: type) type { - _ = @typeInfo(T).Enum; - - const ValueTypeMap = T.ValueTypeMap; +pub fn DeriveValueType(comptime T: type, comptime ValueTypeMap: anytype) type { const field_values: []const MediaFeatureType = field_values: { const fields = std.meta.fields(T); var mapping: [fields.len]MediaFeatureType = undefined; @@ -927,7 +925,7 @@ pub fn DeriveValueType(comptime T: type) type { } fn consume_until_end_of_block(block_type: BlockType, tokenizer: *Tokenizer) void { - @setCold(true); + @branchHint(.cold); var stack = SmallList(BlockType, 16){}; stack.appendAssumeCapacity(block_type); @@ -4086,7 +4084,7 @@ pub const Delimiters = packed struct(u8) { const NONE: Delimiters = .{}; - pub fn getDelimiter(comptime tag: @TypeOf(.EnumLiteral)) Delimiters { + pub fn getDelimiter(comptime tag: @TypeOf(.enum_literal)) Delimiters { var empty = Delimiters{}; @field(empty, @tagName(tag)) = true; return empty; @@ -4440,7 +4438,7 @@ const Tokenizer = struct { // Any other valid case here already resulted in IDHash. '0'...'9', '-' => true, else => false, - }) break :brk .{ .hash = this.consumeName() }; + }) break :brk .{ .unrestrictedhash = this.consumeName() }; break :brk .{ .delim = '#' }; }, '$' => brk: { @@ -5012,7 +5010,7 @@ const Tokenizer = struct { // todo_stuff.match_byte switch (this.nextByteUnchecked()) { ' ', '\t', '\n', '\r', FORM_FEED_BYTE => { - var value = .{ .borrowed = this.sliceFrom(start_pos) }; + var value: CopyOnWriteStr = .{ .borrowed = this.sliceFrom(start_pos) }; return this.consumeUrlEnd(start_pos, &value); }, ')' => { @@ -5475,7 +5473,7 @@ const TokenKind = enum { /// A [``](https://drafts.csswg.org/css-syntax/#hash-token-diagram) with the type flag set to "unrestricted" /// /// The value does not include the `#` marker. - hash, + unrestrictedhash, /// A [``](https://drafts.csswg.org/css-syntax/#hash-token-diagram) with the type flag set to "id" /// @@ -5599,7 +5597,7 @@ pub const Token = union(TokenKind) { /// A [``](https://drafts.csswg.org/css-syntax/#hash-token-diagram) with the type flag set to "unrestricted" /// /// The value does not include the `#` marker. - hash: []const u8, + unrestrictedhash: []const u8, /// A [``](https://drafts.csswg.org/css-syntax/#hash-token-diagram) with the type flag set to "id" /// @@ -5718,7 +5716,7 @@ pub const Token = union(TokenKind) { inline .ident, .function, .at_keyword, - .hash, + .unrestrictedhash, .idhash, .quoted_string, .bad_string, @@ -5765,13 +5763,9 @@ pub const Token = union(TokenKind) { try writer.writeAll("@"); try serializer.serializeIdentifier(this.at_keyword, writer); }, - .hash => { - try writer.writeAll("#"); - try serializer.serializeName(this.hash, writer); - }, - .idhash => { + .unrestrictedhash, .idhash => |v| { try writer.writeAll("#"); - try serializer.serializeName(this.idhash, writer); + try serializer.serializeName(v, writer); }, .quoted_string => |x| { try serializer.serializeName(x, writer); @@ -5864,7 +5858,7 @@ pub const Token = union(TokenKind) { try dest.writeStr("@"); return serializer.serializeIdentifier(value, dest) catch return dest.addFmtError(); }, - .hash => |value| { + .unrestrictedhash => |value| { try dest.writeStr("#"); return serializer.serializeName(value, dest) catch return dest.addFmtError(); }, diff --git a/src/css/generics.zig b/src/css/generics.zig index 11cbcd50ceb5b6..9c7fbb3a230b81 100644 --- a/src/css/generics.zig +++ b/src/css/generics.zig @@ -45,15 +45,15 @@ pub inline fn implementDeepClone(comptime T: type, this: *const T, allocator: Al return this.*; } - if (comptime @typeInfo(T) == .Pointer) { + if (comptime @typeInfo(T) == .pointer) { const TT = std.meta.Child(T); return implementEql(TT, this.*); } return switch (tyinfo) { - .Struct => { + .@"struct" => { var strct: T = undefined; - inline for (tyinfo.Struct.fields) |field| { + inline for (tyinfo.@"struct".fields) |field| { if (comptime canTransitivelyImplementDeepClone(field.type) and @hasDecl(field.type, "__generateDeepClone")) { @field(strct, field.name) = implementDeepClone(field.type, &field(this, field.name, allocator)); } else { @@ -62,8 +62,8 @@ pub inline fn implementDeepClone(comptime T: type, this: *const T, allocator: Al } return strct; }, - .Union => { - inline for (bun.meta.EnumFields(T), tyinfo.Union.fields) |enum_field, union_field| { + .@"union" => { + inline for (bun.meta.EnumFields(T), tyinfo.@"union".fields) |enum_field, union_field| { if (@intFromEnum(this.*) == enum_field.value) { if (comptime canTransitivelyImplementDeepClone(union_field.type) and @hasDecl(union_field.type, "__generateDeepClone")) { return @unionInit(T, enum_field.name, implementDeepClone(union_field.type, &@field(this, enum_field.name), allocator)); @@ -97,11 +97,11 @@ pub fn implementEql(comptime T: type, this: *const T, other: *const T) bool { if (comptime T == []const u8) { return bun.strings.eql(this.*, other.*); } - if (comptime @typeInfo(T) == .Pointer) { + if (comptime @typeInfo(T) == .pointer) { const TT = std.meta.Child(T); return implementEql(TT, this.*, other.*); } - if (comptime @typeInfo(T) == .Optional) { + if (comptime @typeInfo(T) == .optional) { const TT = std.meta.Child(T); if (this.* != null and other.* != null) return implementEql(TT, &this.*.?, &other.*.?); return false; @@ -110,9 +110,9 @@ pub fn implementEql(comptime T: type, this: *const T, other: *const T) bool { return VendorPrefix.eql(this.*, other.*); } return switch (tyinfo) { - .Optional => @compileError("Handled above, this means Zack wrote a bug."), - .Pointer => @compileError("Handled above, this means Zack wrote a bug."), - .Array => { + .optional => @compileError("Handled above, this means Zack wrote a bug."), + .pointer => @compileError("Handled above, this means Zack wrote a bug."), + .array => { const Child = std.meta.Child(T); if (comptime bun.meta.isSimpleEqlType(Child)) { return std.mem.eql(Child, &this.*, &other.*); @@ -129,14 +129,14 @@ pub fn implementEql(comptime T: type, this: *const T, other: *const T) bool { } return true; }, - .Struct => { - inline for (tyinfo.Struct.fields) |field| { + .@"struct" => { + inline for (tyinfo.@"struct".fields) |field| { if (!eql(field.type, &@field(this, field.name), &@field(other, field.name))) return false; } return true; }, - .Union => { - if (tyinfo.Union.tag_type == null) @compileError("Unions must have a tag type"); + .@"union" => { + if (tyinfo.@"union".tag_type == null) @compileError("Unions must have a tag type"); if (@intFromEnum(this.*) != @intFromEnum(other.*)) return false; const enum_fields = bun.meta.EnumFields(T); inline for (enum_fields, std.meta.fields(T)) |enum_field, union_field| { @@ -171,42 +171,42 @@ pub fn implementHash(comptime T: type, this: *const T, hasher: *std.hash.Wyhash) }; bun.writeAnyToHasher(hasher, list.len); for (list) |*item| { - hash(tyinfo.Array.child, item, hasher); + hash(tyinfo.array.child, item, hasher); } return; } if (comptime T == []const u8) { return hasher.update(this.*); } - if (comptime @typeInfo(T) == .Pointer) { + if (comptime @typeInfo(T) == .pointer) { @compileError("Invalid type for implementHash(): " ++ @typeName(T)); } - if (comptime @typeInfo(T) == .Optional) { + if (comptime @typeInfo(T) == .optional) { @compileError("Invalid type for implementHash(): " ++ @typeName(T)); } return switch (tyinfo) { - .Optional => { + .optional => { if (this.* == null) { bun.writeAnyToHasher(hasher, "null"); } else { bun.writeAnyToHasher(hasher, "some"); - hash(tyinfo.Optional.child, &this.*.?, hasher); + hash(tyinfo.optional.child, &this.*.?, hasher); } }, - .Pointer => { - hash(tyinfo.Pointer.child, &this.*, hasher); + .pointer => { + hash(tyinfo.pointer.child, &this.*, hasher); }, - .Array => { + .array => { bun.writeAnyToHasher(hasher, this.len); for (this.*[0..]) |*item| { - hash(tyinfo.Array.child, item, hasher); + hash(tyinfo.array.child, item, hasher); } }, - .Struct => { - inline for (tyinfo.Struct.fields) |field| { + .@"struct" => { + inline for (tyinfo.@"struct".fields) |field| { if (comptime hasHash(field.type)) { hash(field.type, &@field(this, field.name), hasher); - } else if (@hasDecl(field.type, "__generateHash") and @typeInfo(field.type) == .Struct) { + } else if (@hasDecl(field.type, "__generateHash") and @typeInfo(field.type) == .@"struct") { implementHash(field.type, &@field(this, field.name), hasher); } else { @compileError("Can't hash these fields: " ++ @typeName(field.type) ++ ". On " ++ @typeName(T)); @@ -214,11 +214,11 @@ pub fn implementHash(comptime T: type, this: *const T, hasher: *std.hash.Wyhash) } return; }, - .Enum => { + .@"enum" => { bun.writeAnyToHasher(hasher, @intFromEnum(this.*)); }, - .Union => { - if (tyinfo.Union.tag_type == null) @compileError("Unions must have a tag type"); + .@"union" => { + if (tyinfo.@"union".tag_type == null) @compileError("Unions must have a tag type"); bun.writeAnyToHasher(hasher, @intFromEnum(this.*)); const enum_fields = bun.meta.EnumFields(T); inline for (enum_fields, std.meta.fields(T)) |enum_field, union_field| { @@ -226,7 +226,7 @@ pub fn implementHash(comptime T: type, this: *const T, hasher: *std.hash.Wyhash) const field = union_field; if (comptime hasHash(field.type)) { hash(field.type, &@field(this, field.name), hasher); - } else if (@hasDecl(field.type, "__generateHash") and @typeInfo(field.type) == .Struct) { + } else if (@hasDecl(field.type, "__generateHash") and @typeInfo(field.type) == .@"struct") { implementHash(field.type, &@field(this, field.name), hasher); } else { @compileError("Can't hash these fields: " ++ @typeName(field.type) ++ ". On " ++ @typeName(T)); @@ -253,7 +253,7 @@ pub fn slice(comptime T: type, val: *const T) []const bun.meta.looksLikeListCont pub fn isCompatible(comptime T: type, val: *const T, browsers: bun.css.targets.Browsers) bool { if (@hasDecl(T, "isCompatible")) return T.isCompatible(val, browsers); const tyinfo = @typeInfo(T); - if (tyinfo == .Pointer) { + if (tyinfo == .pointer) { const TT = std.meta.Child(T); return isCompatible(TT, val.*, browsers); } @@ -291,14 +291,14 @@ pub inline fn parseWithOptions(comptime T: type, input: *Parser, options: *const } pub inline fn parse(comptime T: type, input: *Parser) Result(T) { - if (comptime @typeInfo(T) == .Pointer) { + if (comptime @typeInfo(T) == .pointer) { const TT = std.meta.Child(T); return switch (parse(TT, input)) { .result => |v| .{ .result = bun.create(input.allocator(), TT, v) }, .err => |e| .{ .err = e }, }; } - if (comptime @typeInfo(T) == .Optional) { + if (comptime @typeInfo(T) == .optional) { const TT = std.meta.Child(T); return .{ .result = input.tryParse(parseFor(TT), .{}).asValue() }; } @@ -334,11 +334,11 @@ pub inline fn parseFor(comptime T: type) @TypeOf(struct { pub fn hasToCss(comptime T: type) bool { const tyinfo = @typeInfo(T); if (comptime T == []const u8) return false; - if (tyinfo == .Pointer) { + if (tyinfo == .pointer) { const TT = std.meta.Child(T); return hasToCss(TT); } - if (tyinfo == .Optional) { + if (tyinfo == .optional) { const TT = std.meta.Child(T); return hasToCss(TT); } @@ -356,11 +356,11 @@ pub fn hasToCss(comptime T: type) bool { } pub inline fn toCss(comptime T: type, this: *const T, comptime W: type, dest: *Printer(W)) PrintErr!void { - if (@typeInfo(T) == .Pointer) { + if (@typeInfo(T) == .pointer) { const TT = std.meta.Child(T); return toCss(TT, this.*, W, dest); } - if (@typeInfo(T) == .Optional) { + if (@typeInfo(T) == .optional) { const TT = std.meta.Child(T); if (this.*) |*val| { @@ -397,29 +397,30 @@ pub fn eqlList(comptime T: type, lhs: *const ArrayList(T), rhs: *const ArrayList pub fn canTransitivelyImplementEql(comptime T: type) bool { return switch (@typeInfo(T)) { - .Struct, .Union => true, + .@"struct", .@"union" => true, else => false, }; } pub inline fn eql(comptime T: type, lhs: *const T, rhs: *const T) bool { const tyinfo = comptime @typeInfo(T); - if (comptime tyinfo == .Pointer) { + @setEvalBranchQuota(10_000); + if (comptime tyinfo == .pointer) { if (comptime T == []const u8) return bun.strings.eql(lhs.*, rhs.*); - if (comptime tyinfo.Pointer.size == .One) { + if (comptime tyinfo.pointer.size == .one) { const TT = std.meta.Child(T); return eql(TT, lhs.*, rhs.*); - } else if (comptime tyinfo.Pointer.size == .Slice) { + } else if (comptime tyinfo.pointer.size == .slice) { if (lhs.*.len != rhs.*.len) return false; for (lhs.*[0..], rhs.*[0..]) |*a, *b| { - if (!eql(tyinfo.Pointer.child, a, b)) return false; + if (!eql(tyinfo.pointer.child, a, b)) return false; } return true; } else { - @compileError("Unsupported pointer size: " ++ @tagName(tyinfo.Pointer.size) ++ " (" ++ @typeName(T) ++ ")"); + @compileError("Unsupported pointer size: " ++ @tagName(tyinfo.pointer.size) ++ " (" ++ @typeName(T) ++ ")"); } } - if (comptime tyinfo == .Optional) { + if (comptime tyinfo == .optional) { const TT = std.meta.Child(T); if (lhs.* == null and rhs.* == null) return true; if (lhs.* != null and rhs.* != null) return eql(TT, &lhs.*.?, &rhs.*.?); @@ -450,32 +451,32 @@ pub inline fn eql(comptime T: type, lhs: *const T, rhs: *const T) bool { pub fn canTransitivelyImplementDeepClone(comptime T: type) bool { return switch (@typeInfo(T)) { - .Struct, .Union => true, + .@"struct", .@"union" => true, else => false, }; } pub inline fn deepClone(comptime T: type, this: *const T, allocator: Allocator) T { const tyinfo = comptime @typeInfo(T); - if (comptime tyinfo == .Pointer) { - if (comptime tyinfo.Pointer.size == .One) { + if (comptime tyinfo == .pointer) { + if (comptime tyinfo.pointer.size == .one) { const TT = std.meta.Child(T); return bun.create(allocator, TT, deepClone(TT, this.*, allocator)); } - if (comptime tyinfo.Pointer.size == .Slice) { - var slc = allocator.alloc(tyinfo.Pointer.child, this.len) catch bun.outOfMemory(); - if (comptime bun.meta.isSimpleCopyType(tyinfo.Pointer.child) or tyinfo.Pointer.child == []const u8) { + if (comptime tyinfo.pointer.size == .slice) { + var slc = allocator.alloc(tyinfo.pointer.child, this.len) catch bun.outOfMemory(); + if (comptime bun.meta.isSimpleCopyType(tyinfo.pointer.child) or tyinfo.pointer.child == []const u8) { @memcpy(slc, this.*); } else { for (this.*, 0..) |*e, i| { - slc[i] = deepClone(tyinfo.Pointer.child, e, allocator); + slc[i] = deepClone(tyinfo.pointer.child, e, allocator); } } return slc; } - @compileError("Deep clone not supported for this kind of pointer: " ++ @tagName(tyinfo.Pointer.size) ++ " (" ++ @typeName(T) ++ ")"); + @compileError("Deep clone not supported for this kind of pointer: " ++ @tagName(tyinfo.pointer.size) ++ " (" ++ @typeName(T) ++ ")"); } - if (comptime tyinfo == .Optional) { + if (comptime tyinfo == .optional) { const TT = std.meta.Child(T); if (this.* != null) return deepClone(TT, &this.*.?, allocator); return null; @@ -607,11 +608,11 @@ pub fn hasHash(comptime T: type) bool { const tyinfo = @typeInfo(T); if (comptime T == []const u8) return true; if (comptime bun.meta.isSimpleEqlType(T)) return true; - if (tyinfo == .Pointer) { + if (tyinfo == .pointer) { const TT = std.meta.Child(T); return hasHash(TT); } - if (tyinfo == .Optional) { + if (tyinfo == .optional) { const TT = std.meta.Child(T); return hasHash(TT); } @@ -630,11 +631,11 @@ pub fn hasHash(comptime T: type) bool { pub fn hash(comptime T: type, this: *const T, hasher: *std.hash.Wyhash) void { if (comptime T == void) return; const tyinfo = @typeInfo(T); - if (comptime tyinfo == .Pointer and T != []const u8) { + if (comptime tyinfo == .pointer and T != []const u8) { const TT = std.meta.Child(T); - if (tyinfo.Pointer.size == .One) { + if (tyinfo.pointer.size == .one) { return hash(TT, this.*, hasher); - } else if (tyinfo.Pointer.size == .Slice) { + } else if (tyinfo.pointer.size == .slice) { for (this.*) |*item| { hash(TT, item, hasher); } @@ -643,7 +644,7 @@ pub fn hash(comptime T: type, this: *const T, hasher: *std.hash.Wyhash) void { @compileError("Can't hash this pointer type: " ++ @typeName(T)); } } - if (comptime @typeInfo(T) == .Optional) { + if (comptime @typeInfo(T) == .optional) { const TT = std.meta.Child(T); if (this.* != null) return hash(TT, &this.*.?, hasher); return; diff --git a/src/css/media_query.zig b/src/css/media_query.zig index 25a5c58c619d4d..1f4980f42d7e95 100644 --- a/src/css/media_query.zig +++ b/src/css/media_query.zig @@ -708,7 +708,7 @@ pub const MediaFeatureId = enum { /// The non-standard -moz-device-pixel-ratio media feature. @"-moz-device-pixel-ratio", - pub usingnamespace css.DeriveValueType(@This()); + pub usingnamespace css.DeriveValueType(@This(), ValueTypeMap); pub const ValueTypeMap = .{ .width = MediaFeatureType.length, @@ -1510,7 +1510,7 @@ pub fn MediaFeatureName(comptime FeatureId: type) type { // this only works if FeatureId doesn't hold any references to the input string. // i.e. it is an enum comptime { - std.debug.assert(@typeInfo(FeatureId) == .Enum); + std.debug.assert(@typeInfo(FeatureId) == .@"enum"); } input.allocator().free(final_name); }; diff --git a/src/css/properties/align.zig b/src/css/properties/align.zig index af5fd00cb4d8a6..60e6a68eb357af 100644 --- a/src/css/properties/align.zig +++ b/src/css/properties/align.zig @@ -747,7 +747,7 @@ pub const Gap = struct { /// The column gap. column: GapValue, - pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.gap); + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.gap, PropertyFieldMap); pub const PropertyFieldMap = .{ .row = "row-gap", @@ -790,7 +790,7 @@ pub const PlaceItems = struct { /// The item justification. justify: JustifyItems, - pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"place-items"); + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"place-items", PropertyFieldMap); pub const PropertyFieldMap = .{ .@"align" = "align-items", @@ -862,7 +862,7 @@ pub const PlaceSelf = struct { /// The item justification. justify: JustifySelf, - pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"place-self"); + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"place-self", PropertyFieldMap); pub const PropertyFieldMap = .{ .@"align" = "align-self", @@ -956,7 +956,7 @@ pub const PlaceContent = struct { /// The content justification. justify: JustifyContent, - pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"place-content"); + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"place-content", PropertyFieldMap); pub const PropertyFieldMap = .{ .@"align" = css.PropertyIdTag.@"align-content", diff --git a/src/css/properties/background.zig b/src/css/properties/background.zig index 2447a370fa6d68..2946f44b083bdc 100644 --- a/src/css/properties/background.zig +++ b/src/css/properties/background.zig @@ -524,63 +524,54 @@ pub const AspectRatio = struct { }; pub const BackgroundProperty = packed struct(u16) { - @"background-color": bool = false, - @"background-image": bool = false, - @"background-position-x": bool = false, - @"background-position-y": bool = false, - @"background-repeat": bool = false, - @"background-size": bool = false, - @"background-attachment": bool = false, - @"background-origin": bool = false, - @"background-clip": bool = false, + color: bool = false, + image: bool = false, + @"position-x": bool = false, + @"position-y": bool = false, + repeat: bool = false, + size: bool = false, + attachment: bool = false, + origin: bool = false, + clip: bool = false, __unused: u7 = 0, pub usingnamespace css.Bitflags(@This()); - pub const @"background-color" = BackgroundProperty{ .@"background-color" = true }; - pub const @"background-image" = BackgroundProperty{ .@"background-image" = true }; - pub const @"background-position-x" = BackgroundProperty{ .@"background-position-x" = true }; - pub const @"background-position-y" = BackgroundProperty{ .@"background-position-y" = true }; - pub const @"background-position" = BackgroundProperty{ .@"background-position-x" = true, .@"background-position-y" = true }; - pub const @"background-repeat" = BackgroundProperty{ .@"background-repeat" = true }; - pub const @"background-size" = BackgroundProperty{ .@"background-size" = true }; - pub const @"background-attachment" = BackgroundProperty{ .@"background-attachment" = true }; - pub const @"background-origin" = BackgroundProperty{ .@"background-origin" = true }; - pub const @"background-clip" = BackgroundProperty{ .@"background-clip" = true }; + pub const @"background-color" = BackgroundProperty{ .color = true }; + pub const @"background-image" = BackgroundProperty{ .image = true }; + pub const @"background-position-x" = BackgroundProperty{ .@"position-x" = true }; + pub const @"background-position-y" = BackgroundProperty{ .@"position-y" = true }; + pub const @"background-position" = BackgroundProperty{ .@"position-x" = true, .@"position-y" = true }; + pub const @"background-repeat" = BackgroundProperty{ .repeat = true }; + pub const @"background-size" = BackgroundProperty{ .size = true }; + pub const @"background-attachment" = BackgroundProperty{ .attachment = true }; + pub const @"background-origin" = BackgroundProperty{ .origin = true }; + pub const @"background-clip" = BackgroundProperty{ .clip = true }; + pub const background = BackgroundProperty{ - .@"background-color" = true, - .@"background-image" = true, - .@"background-position-x" = true, - .@"background-position-y" = true, - .@"background-repeat" = true, - .@"background-size" = true, - .@"background-attachment" = true, - .@"background-origin" = true, - .@"background-clip" = true, + .color = true, + .image = true, + .@"position-x" = true, + .@"position-y" = true, + .repeat = true, + .size = true, + .attachment = true, + .origin = true, + .clip = true, }; - pub fn fromPropertyId(property_id: css.PropertyId) ?BackgroundProperty { + pub fn tryFromPropertyId(property_id: css.PropertyId) ?BackgroundProperty { return switch (property_id) { - .@"background-color" => BackgroundProperty{ .@"background-color" = true }, - .@"background-image" => BackgroundProperty{ .@"background-image" = true }, - .@"background-position-x" => BackgroundProperty{ .@"background-position-x" = true }, - .@"background-position-y" => BackgroundProperty{ .@"background-position-y" = true }, - .@"background-position" => BackgroundProperty{ .@"background-position-x" = true, .@"background-position-y" = true }, - .@"background-repeat" => BackgroundProperty{ .@"background-repeat" = true }, - .@"background-size" => BackgroundProperty{ .@"background-size" = true }, - .@"background-attachment" => BackgroundProperty{ .@"background-attachment" = true }, - .@"background-origin" => BackgroundProperty{ .@"background-origin" = true }, - .background => BackgroundProperty{ - .@"background-color" = true, - .@"background-image" = true, - .@"background-position-x" = true, - .@"background-position-y" = true, - .@"background-repeat" = true, - .@"background-size" = true, - .@"background-attachment" = true, - .@"background-origin" = true, - .@"background-clip" = true, - }, + .@"background-color" => @"background-color", + .@"background-image" => @"background-image", + .@"background-position-x" => @"background-position-x", + .@"background-position-y" => @"background-position-y", + .@"background-position" => @"background-position", + .@"background-repeat" => @"background-repeat", + .@"background-size" => @"background-size", + .@"background-attachment" => @"background-attachment", + .@"background-origin" => @"background-origin", + .background => background, else => null, }; } @@ -726,7 +717,7 @@ pub const BackgroundHandler = struct { this.flush(allocator, dest, context); var unparsed = val.deepClone(allocator); context.addUnparsedFallbacks(&unparsed); - if (BackgroundProperty.fromPropertyId(val.property_id)) |prop| { + if (BackgroundProperty.tryFromPropertyId(val.property_id)) |prop| { this.flushed_properties.insert(prop); } diff --git a/src/css/properties/border.zig b/src/css/properties/border.zig index 091fb3ef0390a0..71a2137065c265 100644 --- a/src/css/properties/border.zig +++ b/src/css/properties/border.zig @@ -551,79 +551,79 @@ const BorderShorthand = struct { }; const BorderProperty = packed struct(u32) { - @"border-top-color": bool = false, - @"border-bottom-color": bool = false, - @"border-left-color": bool = false, - @"border-right-color": bool = false, - @"border-block-start-color": bool = false, - @"border-block-end-color": bool = false, - @"border-inline-start-color": bool = false, - @"border-inline-end-color": bool = false, - @"border-top-width": bool = false, - @"border-bottom-width": bool = false, - @"border-left-width": bool = false, - @"border-right-width": bool = false, - @"border-block-start-width": bool = false, - @"border-block-end-width": bool = false, - @"border-inline-start-width": bool = false, - @"border-inline-end-width": bool = false, - @"border-top-style": bool = false, - @"border-bottom-style": bool = false, - @"border-left-style": bool = false, - @"border-right-style": bool = false, - @"border-block-start-style": bool = false, - @"border-block-end-style": bool = false, - @"border-inline-start-style": bool = false, - @"border-inline-end-style": bool = false, + @"top-color": bool = false, + @"bottom-color": bool = false, + @"left-color": bool = false, + @"right-color": bool = false, + @"block-start-color": bool = false, + @"block-end-color": bool = false, + @"inline-start-color": bool = false, + @"inline-end-color": bool = false, + @"top-width": bool = false, + @"bottom-width": bool = false, + @"left-width": bool = false, + @"right-width": bool = false, + @"block-start-width": bool = false, + @"block-end-width": bool = false, + @"inline-start-width": bool = false, + @"inline-end-width": bool = false, + @"top-style": bool = false, + @"bottom-style": bool = false, + @"left-style": bool = false, + @"right-style": bool = false, + @"block-start-style": bool = false, + @"block-end-style": bool = false, + @"inline-start-style": bool = false, + @"inline-end-style": bool = false, __unused: u8 = 0, pub usingnamespace css.Bitflags(@This()); - const @"border-top-color" = BorderProperty{ .@"border-top-color" = true }; - const @"border-bottom-color" = BorderProperty{ .@"border-bottom-color" = true }; - const @"border-left-color" = BorderProperty{ .@"border-left-color" = true }; - const @"border-right-color" = BorderProperty{ .@"border-right-color" = true }; - const @"border-block-start-color" = BorderProperty{ .@"border-block-start-color" = true }; - const @"border-block-end-color" = BorderProperty{ .@"border-block-end-color" = true }; - const @"border-inline-start-color" = BorderProperty{ .@"border-inline-start-color" = true }; - const @"border-inline-end-color" = BorderProperty{ .@"border-inline-end-color" = true }; - const @"border-top-width" = BorderProperty{ .@"border-top-width" = true }; - const @"border-bottom-width" = BorderProperty{ .@"border-bottom-width" = true }; - const @"border-left-width" = BorderProperty{ .@"border-left-width" = true }; - const @"border-right-width" = BorderProperty{ .@"border-right-width" = true }; - const @"border-block-start-width" = BorderProperty{ .@"border-block-start-width" = true }; - const @"border-block-end-width" = BorderProperty{ .@"border-block-end-width" = true }; - const @"border-inline-start-width" = BorderProperty{ .@"border-inline-start-width" = true }; - const @"border-inline-end-width" = BorderProperty{ .@"border-inline-end-width" = true }; - const @"border-top-style" = BorderProperty{ .@"border-top-style" = true }; - const @"border-bottom-style" = BorderProperty{ .@"border-bottom-style" = true }; - const @"border-left-style" = BorderProperty{ .@"border-left-style" = true }; - const @"border-right-style" = BorderProperty{ .@"border-right-style" = true }; - const @"border-block-start-style" = BorderProperty{ .@"border-block-start-style" = true }; - const @"border-block-end-style" = BorderProperty{ .@"border-block-end-style" = true }; - const @"border-inline-start-style" = BorderProperty{ .@"border-inline-start-style" = true }; - const @"border-inline-end-style" = BorderProperty{ .@"border-inline-end-style" = true }; - - const @"border-block-color" = BorderProperty{ .@"border-block-start-color" = true, .@"border-block-end-color" = true }; - const @"border-inline-color" = BorderProperty{ .@"border-inline-start-color" = true, .@"border-inline-end-color" = true }; - const @"border-block-width" = BorderProperty{ .@"border-block-start-width" = true, .@"border-block-end-width" = true }; - const @"border-inline-width" = BorderProperty{ .@"border-inline-start-width" = true, .@"border-inline-end-width" = true }; - const @"border-block-style" = BorderProperty{ .@"border-block-start-style" = true, .@"border-block-end-style" = true }; - const @"border-inline-style" = BorderProperty{ .@"border-inline-start-style" = true, .@"border-inline-end-style" = true }; - const @"border-top" = BorderProperty{ .@"border-top-color" = true, .@"border-top-width" = true, .@"border-top-style" = true }; - const @"border-bottom" = BorderProperty{ .@"border-bottom-color" = true, .@"border-bottom-width" = true, .@"border-bottom-style" = true }; - const @"border-left" = BorderProperty{ .@"border-left-color" = true, .@"border-left-width" = true, .@"border-left-style" = true }; - const @"border-right" = BorderProperty{ .@"border-right-color" = true, .@"border-right-width" = true, .@"border-right-style" = true }; - const @"border-block-start" = BorderProperty{ .@"border-block-start-color" = true, .@"border-block-start-width" = true, .@"border-block-start-style" = true }; - const @"border-block-end" = BorderProperty{ .@"border-block-end-color" = true, .@"border-block-end-width" = true, .@"border-block-end-style" = true }; - const @"border-inline-start" = BorderProperty{ .@"border-inline-start-color" = true, .@"border-inline-start-width" = true, .@"border-inline-start-style" = true }; - const @"border-inline-end" = BorderProperty{ .@"border-inline-end-color" = true, .@"border-inline-end-width" = true, .@"border-inline-end-style" = true }; - const @"border-block" = BorderProperty{ .@"border-block-start-color" = true, .@"border-block-end-color" = true, .@"border-block-start-width" = true, .@"border-block-end-width" = true, .@"border-block-start-style" = true, .@"border-block-end-style" = true }; - const @"border-inline" = BorderProperty{ .@"border-inline-start-color" = true, .@"border-inline-end-color" = true, .@"border-inline-start-width" = true, .@"border-inline-end-width" = true, .@"border-inline-start-style" = true, .@"border-inline-end-style" = true }; - const @"border-width" = BorderProperty{ .@"border-left-width" = true, .@"border-right-width" = true, .@"border-top-width" = true, .@"border-bottom-width" = true }; - const @"border-style" = BorderProperty{ .@"border-left-style" = true, .@"border-right-style" = true, .@"border-top-style" = true, .@"border-bottom-style" = true }; - const @"border-color" = BorderProperty{ .@"border-left-color" = true, .@"border-right-color" = true, .@"border-top-color" = true, .@"border-bottom-color" = true }; - const border = BorderProperty{ .@"border-left-width" = true, .@"border-right-width" = true, .@"border-top-width" = true, .@"border-bottom-width" = true, .@"border-left-style" = true, .@"border-right-style" = true, .@"border-top-style" = true, .@"border-bottom-style" = true, .@"border-left-color" = true, .@"border-right-color" = true, .@"border-top-color" = true, .@"border-bottom-color" = true }; + const @"border-top-color" = BorderProperty{ .@"top-color" = true }; + const @"border-bottom-color" = BorderProperty{ .@"bottom-color" = true }; + const @"border-left-color" = BorderProperty{ .@"left-color" = true }; + const @"border-right-color" = BorderProperty{ .@"right-color" = true }; + const @"border-block-start-color" = BorderProperty{ .@"block-start-color" = true }; + const @"border-block-end-color" = BorderProperty{ .@"block-end-color" = true }; + const @"border-inline-start-color" = BorderProperty{ .@"inline-start-color" = true }; + const @"border-inline-end-color" = BorderProperty{ .@"inline-end-color" = true }; + const @"border-top-width" = BorderProperty{ .@"top-width" = true }; + const @"border-bottom-width" = BorderProperty{ .@"bottom-width" = true }; + const @"border-left-width" = BorderProperty{ .@"left-width" = true }; + const @"border-right-width" = BorderProperty{ .@"right-width" = true }; + const @"border-block-start-width" = BorderProperty{ .@"block-start-width" = true }; + const @"border-block-end-width" = BorderProperty{ .@"block-end-width" = true }; + const @"border-inline-start-width" = BorderProperty{ .@"inline-start-width" = true }; + const @"border-inline-end-width" = BorderProperty{ .@"inline-end-width" = true }; + const @"border-top-style" = BorderProperty{ .@"top-style" = true }; + const @"border-bottom-style" = BorderProperty{ .@"bottom-style" = true }; + const @"border-left-style" = BorderProperty{ .@"left-style" = true }; + const @"border-right-style" = BorderProperty{ .@"right-style" = true }; + const @"border-block-start-style" = BorderProperty{ .@"block-start-style" = true }; + const @"border-block-end-style" = BorderProperty{ .@"block-end-style" = true }; + const @"border-inline-start-style" = BorderProperty{ .@"inline-start-style" = true }; + const @"border-inline-end-style" = BorderProperty{ .@"inline-end-style" = true }; + + const @"border-block-color" = BorderProperty{ .@"block-start-color" = true, .@"block-end-color" = true }; + const @"border-inline-color" = BorderProperty{ .@"inline-start-color" = true, .@"inline-end-color" = true }; + const @"border-block-width" = BorderProperty{ .@"block-start-width" = true, .@"block-end-width" = true }; + const @"border-inline-width" = BorderProperty{ .@"inline-start-width" = true, .@"inline-end-width" = true }; + const @"border-block-style" = BorderProperty{ .@"block-start-style" = true, .@"block-end-style" = true }; + const @"border-inline-style" = BorderProperty{ .@"inline-start-style" = true, .@"inline-end-style" = true }; + const @"border-top" = BorderProperty{ .@"top-color" = true, .@"top-width" = true, .@"top-style" = true }; + const @"border-bottom" = BorderProperty{ .@"bottom-color" = true, .@"bottom-width" = true, .@"bottom-style" = true }; + const @"border-left" = BorderProperty{ .@"left-color" = true, .@"left-width" = true, .@"left-style" = true }; + const @"border-right" = BorderProperty{ .@"right-color" = true, .@"right-width" = true, .@"right-style" = true }; + const @"border-block-start" = BorderProperty{ .@"block-start-color" = true, .@"block-start-width" = true, .@"block-start-style" = true }; + const @"border-block-end" = BorderProperty{ .@"block-end-color" = true, .@"block-end-width" = true, .@"block-end-style" = true }; + const @"border-inline-start" = BorderProperty{ .@"inline-start-color" = true, .@"inline-start-width" = true, .@"inline-start-style" = true }; + const @"border-inline-end" = BorderProperty{ .@"inline-end-color" = true, .@"inline-end-width" = true, .@"inline-end-style" = true }; + const @"border-block" = BorderProperty{ .@"block-start-color" = true, .@"block-end-color" = true, .@"block-start-width" = true, .@"block-end-width" = true, .@"block-start-style" = true, .@"block-end-style" = true }; + const @"border-inline" = BorderProperty{ .@"inline-start-color" = true, .@"inline-end-color" = true, .@"inline-start-width" = true, .@"inline-end-width" = true, .@"inline-start-style" = true, .@"inline-end-style" = true }; + const @"border-width" = BorderProperty{ .@"left-width" = true, .@"right-width" = true, .@"top-width" = true, .@"bottom-width" = true }; + const @"border-style" = BorderProperty{ .@"left-style" = true, .@"right-style" = true, .@"top-style" = true, .@"bottom-style" = true }; + const @"border-color" = BorderProperty{ .@"left-color" = true, .@"right-color" = true, .@"top-color" = true, .@"bottom-color" = true }; + const border = BorderProperty{ .@"left-width" = true, .@"right-width" = true, .@"top-width" = true, .@"bottom-width" = true, .@"left-style" = true, .@"right-style" = true, .@"top-style" = true, .@"bottom-style" = true, .@"left-color" = true, .@"right-color" = true, .@"top-color" = true, .@"bottom-color" = true }; pub fn tryFromPropertyId(property_id: css.PropertyIdTag) ?@This() { @setEvalBranchQuota(10000); diff --git a/src/css/properties/border_image.zig b/src/css/properties/border_image.zig index 6d21e1e25ff68d..e47112e513714e 100644 --- a/src/css/properties/border_image.zig +++ b/src/css/properties/border_image.zig @@ -40,7 +40,7 @@ pub const BorderImage = struct { /// How the border image is scaled and tiled. repeat: BorderImageRepeat, - pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"border-image"); + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"border-image", PropertyFieldMap); pub const PropertyFieldMap = .{ .source = css.PropertyIdTag.@"border-image-source", @@ -393,33 +393,33 @@ pub const BorderImageSlice = struct { }; pub const BorderImageProperty = packed struct(u8) { - @"border-image-source": bool = false, - @"border-image-slice": bool = false, - @"border-image-width": bool = false, - @"border-image-outset": bool = false, - @"border-image-repeat": bool = false, + source: bool = false, + slice: bool = false, + width: bool = false, + outset: bool = false, + repeat: bool = false, __unused: u3 = 0, - pub const @"border-image-source" = BorderImageProperty{ .@"border-image-source" = true }; - pub const @"border-image-slice" = BorderImageProperty{ .@"border-image-slice" = true }; - pub const @"border-image-width" = BorderImageProperty{ .@"border-image-width" = true }; - pub const @"border-image-outset" = BorderImageProperty{ .@"border-image-outset" = true }; - pub const @"border-image-repeat" = BorderImageProperty{ .@"border-image-repeat" = true }; + pub const @"border-image-source" = BorderImageProperty{ .source = true }; + pub const @"border-image-slice" = BorderImageProperty{ .slice = true }; + pub const @"border-image-width" = BorderImageProperty{ .width = true }; + pub const @"border-image-outset" = BorderImageProperty{ .outset = true }; + pub const @"border-image-repeat" = BorderImageProperty{ .repeat = true }; pub usingnamespace css.Bitflags(@This()); pub const @"border-image" = BorderImageProperty{ - .@"border-image-source" = true, - .@"border-image-slice" = true, - .@"border-image-width" = true, - .@"border-image-outset" = true, - .@"border-image-repeat" = true, + .source = true, + .slice = true, + .width = true, + .outset = true, + .repeat = true, }; pub fn tryFromPropertyId(property_id: css.PropertyIdTag) ?BorderImageProperty { inline for (std.meta.fields(BorderImageProperty)) |field| { if (comptime std.mem.eql(u8, field.name, "__unused")) continue; - const desired = comptime @field(css.PropertyIdTag, field.name); + const desired = comptime @field(css.PropertyIdTag, "border-image-" ++ field.name); if (desired == property_id) { var result: BorderImageProperty = .{}; @field(result, field.name) = true; @@ -599,7 +599,7 @@ pub const BorderImageHandler = struct { this.flushed_properties.insert(BorderImageProperty.@"border-image"); } else { if (source) |*mut_source| { - if (!this.flushed_properties.contains(BorderImageProperty{ .@"border-image-source" = true })) { + if (!this.flushed_properties.contains(BorderImageProperty.@"border-image-source")) { for (mut_source.getFallbacks(allocator, context.targets).slice()) |fallback| { dest.append(allocator, Property{ .@"border-image-source" = fallback }) catch bun.outOfMemory(); } diff --git a/src/css/properties/border_radius.zig b/src/css/properties/border_radius.zig index d2c9843c24309b..ec35e73618d1f0 100644 --- a/src/css/properties/border_radius.zig +++ b/src/css/properties/border_radius.zig @@ -38,7 +38,7 @@ pub const BorderRadius = struct { /// The x and y radius values for the bottom left corner. bottom_left: Size2D(LengthPercentage), - pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"border-radius"); + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"border-radius", PropertyFieldMap); pub const PropertyFieldMap = .{ .top_left = "border-top-left-radius", diff --git a/src/css/properties/custom.zig b/src/css/properties/custom.zig index c94ed570fdd6d7..d955f6566704f4 100644 --- a/src/css/properties/custom.zig +++ b/src/css/properties/custom.zig @@ -462,9 +462,9 @@ pub const TokenList = struct { } continue; }, - .hash, .idhash => { + .unrestrictedhash, .idhash => { const h = switch (tok.*) { - .hash => |h| h, + .unrestrictedhash => |h| h, .idhash => |h| h, else => unreachable, }; @@ -472,7 +472,7 @@ pub const TokenList = struct { const r, const g, const b, const a = css.color.parseHashColor(h) orelse { tokens.append( input.allocator(), - .{ .token = .{ .hash = h } }, + .{ .token = .{ .unrestrictedhash = h } }, ) catch unreachable; break :brk; }; diff --git a/src/css/properties/display.zig b/src/css/properties/display.zig index eba2fee7cdf464..3671d346fe556a 100644 --- a/src/css/properties/display.zig +++ b/src/css/properties/display.zig @@ -241,14 +241,14 @@ pub const DisplayInside = union(enum) { const displayInsideMap = bun.ComptimeStringMap(DisplayInside, .{ .{ "flow", DisplayInside.flow }, .{ "flow-root", DisplayInside.flow_root }, - .{ "table", .table }, - .{ "flex", .{ .flex = css.VendorPrefix{ .none = true } } }, - .{ "-webkit-flex", .{ .flex = css.VendorPrefix{ .webkit = true } } }, - .{ "-ms-flexbox", .{ .flex = css.VendorPrefix{ .ms = true } } }, - .{ "-webkit-box", .{ .box = css.VendorPrefix{ .webkit = true } } }, - .{ "-moz-box", .{ .box = css.VendorPrefix{ .moz = true } } }, - .{ "grid", .grid }, - .{ "ruby", .ruby }, + .{ "table", DisplayInside.table }, + .{ "flex", DisplayInside{ .flex = css.VendorPrefix{ .none = true } } }, + .{ "-webkit-flex", DisplayInside{ .flex = css.VendorPrefix{ .webkit = true } } }, + .{ "-ms-flexbox", DisplayInside{ .flex = css.VendorPrefix{ .ms = true } } }, + .{ "-webkit-box", DisplayInside{ .box = css.VendorPrefix{ .webkit = true } } }, + .{ "-moz-box", DisplayInside{ .box = css.VendorPrefix{ .moz = true } } }, + .{ "grid", DisplayInside.grid }, + .{ "ruby", DisplayInside.ruby }, }); const location = input.currentSourceLocation(); diff --git a/src/css/properties/flex.zig b/src/css/properties/flex.zig index 57412f121bdecd..c63f7597026dd9 100644 --- a/src/css/properties/flex.zig +++ b/src/css/properties/flex.zig @@ -94,7 +94,7 @@ pub const FlexFlow = struct { /// How the flex items wrap. wrap: FlexWrap, - pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"flex-flow"); + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"flex-flow", PropertyFieldMap); pub const PropertyFieldMap = .{ .direction = css.PropertyIdTag.@"flex-direction", @@ -170,7 +170,7 @@ pub const Flex = struct { /// The flex basis. basis: LengthPercentageOrAuto, - pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.flex); + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.flex, PropertyFieldMap); pub const PropertyFieldMap = .{ .grow = css.PropertyIdTag.@"flex-grow", diff --git a/src/css/properties/font.zig b/src/css/properties/font.zig index 80af832a245af5..93935b9228af29 100644 --- a/src/css/properties/font.zig +++ b/src/css/properties/font.zig @@ -608,7 +608,7 @@ pub const Font = struct { /// How the text should be capitalized. Only CSS 2.1 values are supported. variant_caps: FontVariantCaps, - pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.font); + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.font, PropertyFieldMap); pub const PropertyFieldMap = .{ .family = css.PropertyIdTag.@"font-family", diff --git a/src/css/properties/masking.zig b/src/css/properties/masking.zig index cf192d97e7abb0..84062e63f9f5be 100644 --- a/src/css/properties/masking.zig +++ b/src/css/properties/masking.zig @@ -427,7 +427,7 @@ pub const MaskBorder = struct { /// How the mask image is interpreted. mode: MaskBorderMode, - pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"mask-border"); + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.@"mask-border", PropertyFieldMap); pub const PropertyFieldMap = .{ .source = css.PropertyIdTag.@"mask-border-source", diff --git a/src/css/properties/transition.zig b/src/css/properties/transition.zig index 4c7d5b5c31f3d7..c2acd1a3fd923c 100644 --- a/src/css/properties/transition.zig +++ b/src/css/properties/transition.zig @@ -52,7 +52,7 @@ pub const Transition = struct { /// The easing function for the transition. timing_function: EasingFunction, - pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.transition); + pub usingnamespace css.DefineShorthand(@This(), css.PropertyIdTag.transition, PropertyFieldMap); pub usingnamespace css.DefineListShorthand(@This()); pub const PropertyFieldMap = .{ diff --git a/src/css/rules/container.zig b/src/css/rules/container.zig index 13a11ca966d708..13d7203b26e7fe 100644 --- a/src/css/rules/container.zig +++ b/src/css/rules/container.zig @@ -62,7 +62,7 @@ pub const ContainerSizeFeatureId = enum { /// The [orientation](https://w3c.github.io/csswg-drafts/css-contain-3/#orientation) size container feature. orientation, - pub usingnamespace css.DeriveValueType(@This()); + pub usingnamespace css.DeriveValueType(@This(), ValueTypeMap); pub const ValueTypeMap = .{ .width = css.MediaFeatureType.length, @@ -141,7 +141,7 @@ pub const StyleQuery = union(enum) { if (input.expectColon().asErr()) |e| return .{ .err = e }; input.skipWhitespace(); const opts = css.ParserOptions.default(input.allocator(), null); - const feature = .{ + const feature: StyleQuery = .{ .feature = switch (css.Property.parse( property_id, input, diff --git a/src/css/selectors/parser.zig b/src/css/selectors/parser.zig index 88a811a4b934d7..135b45bc437e75 100644 --- a/src/css/selectors/parser.zig +++ b/src/css/selectors/parser.zig @@ -2153,10 +2153,6 @@ pub fn NthOfSelectorData(comptime Impl: type) type { pub fn nthData(this: *const @This()) NthSelectorData { return this.data; } - - pub fn selectors(this: *const @This()) []GenericSelector(Impl) { - return this.selectors; - } }; } diff --git a/src/css/small_list.zig b/src/css/small_list.zig index d2749f79139ddc..e433f503c2a3cf 100644 --- a/src/css/small_list.zig +++ b/src/css/small_list.zig @@ -575,7 +575,7 @@ pub fn SmallList(comptime T: type, comptime N: comptime_int) type { } fn reserveOneUnchecked(this: *@This(), allocator: Allocator) void { - @setCold(true); + @branchHint(.cold); bun.assert(this.len() == this.capacity); const new_cap = growCapacity(this.capacity, this.len() + 1); this.tryGrow(allocator, new_cap); diff --git a/src/css/values/color.zig b/src/css/values/color.zig index e742af54b745c7..01ef1cf9c1c7db 100644 --- a/src/css/values/color.zig +++ b/src/css/values/color.zig @@ -262,7 +262,7 @@ pub const CssColor = union(enum) { }; switch (token.*) { - .hash, .idhash => |v| { + .unrestrictedhash, .idhash => |v| { const r, const g, const b, const a = css.color.parseHashColor(v) orelse return .{ .err = location.newUnexpectedTokenError(token.*) }; return .{ .result = .{ .rgba = RGBA.new(r, g, b, a), @@ -1621,7 +1621,7 @@ pub const LAB = struct { /// The alpha component. alpha: f32, - pub usingnamespace DefineColorspace(@This()); + pub usingnamespace DefineColorspace(@This(), ChannelTypeMap); pub usingnamespace ColorspaceConversions(@This()); pub usingnamespace UnboundedColorGamut(@This()); @@ -1651,7 +1651,7 @@ pub const SRGB = struct { /// The alpha component. alpha: f32, - pub usingnamespace DefineColorspace(@This()); + pub usingnamespace DefineColorspace(@This(), ChannelTypeMap); pub usingnamespace ColorspaceConversions(@This()); pub usingnamespace BoundedColorGamut(@This()); @@ -1691,7 +1691,7 @@ pub const HSL = struct { /// The alpha component. alpha: f32, - pub usingnamespace DefineColorspace(@This()); + pub usingnamespace DefineColorspace(@This(), ChannelTypeMap); pub usingnamespace ColorspaceConversions(@This()); pub usingnamespace HslHwbColorGamut(@This(), "s", "l"); @@ -1735,7 +1735,7 @@ pub const HWB = struct { /// The alpha component. alpha: f32, - pub usingnamespace DefineColorspace(@This()); + pub usingnamespace DefineColorspace(@This(), ChannelTypeMap); pub usingnamespace ColorspaceConversions(@This()); pub usingnamespace HslHwbColorGamut(@This(), "w", "b"); @@ -1774,7 +1774,7 @@ pub const SRGBLinear = struct { /// The alpha component. alpha: f32, - pub usingnamespace DefineColorspace(@This()); + pub usingnamespace DefineColorspace(@This(), ChannelTypeMap); pub usingnamespace ColorspaceConversions(@This()); pub usingnamespace BoundedColorGamut(@This()); @@ -1804,7 +1804,7 @@ pub const P3 = struct { /// The alpha component. alpha: f32, - pub usingnamespace DefineColorspace(@This()); + pub usingnamespace DefineColorspace(@This(), ChannelTypeMap); pub usingnamespace ColorspaceConversions(@This()); pub usingnamespace BoundedColorGamut(@This()); @@ -1828,7 +1828,7 @@ pub const A98 = struct { /// The alpha component. alpha: f32, - pub usingnamespace DefineColorspace(@This()); + pub usingnamespace DefineColorspace(@This(), ChannelTypeMap); pub usingnamespace ColorspaceConversions(@This()); pub usingnamespace BoundedColorGamut(@This()); @@ -1852,7 +1852,7 @@ pub const ProPhoto = struct { /// The alpha component. alpha: f32, - pub usingnamespace DefineColorspace(@This()); + pub usingnamespace DefineColorspace(@This(), ChannelTypeMap); pub usingnamespace ColorspaceConversions(@This()); pub usingnamespace BoundedColorGamut(@This()); @@ -1876,7 +1876,7 @@ pub const Rec2020 = struct { /// The alpha component. alpha: f32, - pub usingnamespace DefineColorspace(@This()); + pub usingnamespace DefineColorspace(@This(), ChannelTypeMap); pub usingnamespace ColorspaceConversions(@This()); pub usingnamespace BoundedColorGamut(@This()); @@ -1900,7 +1900,7 @@ pub const XYZd50 = struct { /// The alpha component. alpha: f32, - pub usingnamespace DefineColorspace(@This()); + pub usingnamespace DefineColorspace(@This(), ChannelTypeMap); pub usingnamespace ColorspaceConversions(@This()); pub usingnamespace UnboundedColorGamut(@This()); @@ -1927,7 +1927,7 @@ pub const XYZd65 = struct { /// The alpha component. alpha: f32, - pub usingnamespace DefineColorspace(@This()); + pub usingnamespace DefineColorspace(@This(), ChannelTypeMap); pub usingnamespace ColorspaceConversions(@This()); pub usingnamespace UnboundedColorGamut(@This()); @@ -1957,7 +1957,7 @@ pub const LCH = struct { /// The alpha component. alpha: f32, - pub usingnamespace DefineColorspace(@This()); + pub usingnamespace DefineColorspace(@This(), ChannelTypeMap); pub usingnamespace ColorspaceConversions(@This()); pub usingnamespace UnboundedColorGamut(@This()); @@ -1985,7 +1985,7 @@ pub const OKLAB = struct { /// The alpha component. alpha: f32, - pub usingnamespace DefineColorspace(@This()); + pub usingnamespace DefineColorspace(@This(), ChannelTypeMap); pub usingnamespace ColorspaceConversions(@This()); pub usingnamespace UnboundedColorGamut(@This()); @@ -2015,7 +2015,7 @@ pub const OKLCH = struct { /// The alpha component. alpha: f32, - pub usingnamespace DefineColorspace(@This()); + pub usingnamespace DefineColorspace(@This(), ChannelTypeMap); pub usingnamespace ColorspaceConversions(@This()); pub usingnamespace UnboundedColorGamut(@This()); @@ -3037,12 +3037,7 @@ pub fn ColorspaceConversions(comptime T: type) type { }; } -pub fn DefineColorspace(comptime T: type) type { - if (!@hasDecl(T, "ChannelTypeMap")) { - @compileError("A Colorspace must define a ChannelTypeMap"); - } - const ChannelTypeMap = T.ChannelTypeMap; - +pub fn DefineColorspace(comptime T: type, comptime ChannelTypeMap: anytype) type { const fields: []const std.builtin.Type.StructField = std.meta.fields(T); const a = fields[0].name; const b = fields[1].name; diff --git a/src/css/values/length.zig b/src/css/values/length.zig index a4e1a0989e7cc5..b256a0c463700e 100644 --- a/src/css/values/length.zig +++ b/src/css/values/length.zig @@ -355,7 +355,7 @@ pub const LengthValue = union(enum) { } pub fn sign(this: *const @This()) f32 { - const enum_fields = @typeInfo(@typeInfo(@This()).Union.tag_type.?).Enum.fields; + const enum_fields = @typeInfo(@typeInfo(@This()).@"union".tag_type.?).@"enum".fields; inline for (std.meta.fields(@This()), 0..) |field, i| { if (enum_fields[i].value == @intFromEnum(this.*)) { return css.signfns.signF32(@field(this, field.name)); @@ -379,7 +379,7 @@ pub const LengthValue = union(enum) { } pub fn toUnitValue(this: *const @This()) struct { CSSNumber, []const u8 } { - const enum_fields = @typeInfo(@typeInfo(@This()).Union.tag_type.?).Enum.fields; + const enum_fields = @typeInfo(@typeInfo(@This()).@"union".tag_type.?).@"enum".fields; inline for (std.meta.fields(@This()), 0..) |field, i| { if (enum_fields[i].value == @intFromEnum(this.*)) { return .{ @field(this, field.name), field.name }; diff --git a/src/darwin_c.zig b/src/darwin_c.zig index b74dabc73818fe..acbcf178544d6f 100644 --- a/src/darwin_c.zig +++ b/src/darwin_c.zig @@ -150,9 +150,9 @@ pub const stat = blk: { // else => Kind.Unknown, // }, // }, -// .atime = @as(i128, atime.tv_sec) * std.time.ns_per_s + atime.tv_nsec, -// .mtime = @as(i128, mtime.tv_sec) * std.time.ns_per_s + mtime.tv_nsec, -// .ctime = @as(i128, ctime.tv_sec) * std.time.ns_per_s + ctime.tv_nsec, +// .atime = @as(i128, atime.sec) * std.time.ns_per_s + atime.nsec, +// .mtime = @as(i128, mtime.sec) * std.time.ns_per_s + mtime.nsec, +// .ctime = @as(i128, ctime.sec) * std.time.ns_per_s + ctime.nsec, // }; // } @@ -487,7 +487,7 @@ pub fn getSystemUptime() u64 { else => return 0, }; - return @intCast(std.time.timestamp() - boot_time.tv_sec); + return @intCast(std.time.timestamp() - boot_time.sec); } pub fn getSystemLoadavg() [3]f64 { diff --git a/src/defines.zig b/src/defines.zig index f038248505ab35..f61508f3056cc7 100644 --- a/src/defines.zig +++ b/src/defines.zig @@ -85,7 +85,7 @@ pub const DefineData = struct { } pub fn fromMergeableInputEntry(user_defines: *UserDefines, key: []const u8, value_str: []const u8, value_is_undefined: bool, method_call_must_be_replaced_with_undefined: bool, log: *logger.Log, allocator: std.mem.Allocator) !void { - var keySplitter = std.mem.split(u8, key, "."); + var keySplitter = std.mem.splitScalar(u8, key, '.'); while (keySplitter.next()) |part| { if (!js_lexer.isIdentifier(part)) { if (strings.eql(part, key)) { @@ -98,7 +98,7 @@ pub const DefineData = struct { } // check for nested identifiers - var valueSplitter = std.mem.split(u8, value_str, "."); + var valueSplitter = std.mem.splitScalar(u8, value_str, '.'); var isIdent = true; while (valueSplitter.next()) |part| { @@ -217,7 +217,7 @@ pub const Define = struct { const remainder = key[0..last_dot]; const count = std.mem.count(u8, remainder, ".") + 1; var parts = try allocator.alloc(string, count + 1); - var splitter = std.mem.split(u8, remainder, "."); + var splitter = std.mem.splitScalar(u8, remainder, '.'); var i: usize = 0; while (splitter.next()) |split| : (i += 1) { parts[i] = split; diff --git a/src/deps/boringssl.translated.zig b/src/deps/boringssl.translated.zig index 60460ab810fcb7..dd88fbfd987c5b 100644 --- a/src/deps/boringssl.translated.zig +++ b/src/deps/boringssl.translated.zig @@ -18791,7 +18791,7 @@ pub extern fn ERR_get_next_error_library() c_int; pub const struct_bio_st = extern struct { method: [*c]const BIO_METHOD, - init: c_int, + _init: c_int, shutdown: c_int, flags: c_int, retry_reason: c_int, diff --git a/src/deps/c_ares.zig b/src/deps/c_ares.zig index 09c365a34c8614..5f8b945fae31b8 100644 --- a/src/deps/c_ares.zig +++ b/src/deps/c_ares.zig @@ -1433,7 +1433,7 @@ pub const struct_any_reply = struct { pub fn toJS(this: *struct_any_reply, globalThis: *JSC.JSGlobalObject, allocator: std.mem.Allocator) JSC.JSValue { const array = JSC.JSValue.createEmptyArray(globalThis, blk: { var len: usize = 0; - inline for (comptime @typeInfo(struct_any_reply).Struct.fields) |field| { + inline for (comptime @typeInfo(struct_any_reply).@"struct".fields) |field| { if (comptime std.mem.endsWith(u8, field.name, "_reply")) { len += @intFromBool(@field(this, field.name) != null); } @@ -1443,7 +1443,7 @@ pub const struct_any_reply = struct { var i: u32 = 0; - inline for (comptime @typeInfo(struct_any_reply).Struct.fields) |field| { + inline for (comptime @typeInfo(struct_any_reply).@"struct".fields) |field| { if (comptime std.mem.endsWith(u8, field.name, "_reply")) { if (@field(this, field.name)) |reply| { const lookup_name = comptime field.name[0 .. field.name.len - "_reply".len]; @@ -1561,7 +1561,7 @@ pub const struct_any_reply = struct { } pub fn deinit(this: *struct_any_reply) void { - inline for (@typeInfo(struct_any_reply).Struct.fields) |field| { + inline for (@typeInfo(struct_any_reply).@"struct".fields) |field| { if (comptime std.mem.endsWith(u8, field.name, "_reply")) { if (@field(this, field.name)) |reply| { reply.deinit(); @@ -1993,7 +1993,7 @@ pub const ares_addr_port_node = struct_ares_addr_port_node; comptime { const Bun__canonicalizeIP = JSC.toJSHostFunction(Bun__canonicalizeIP_); - @export(Bun__canonicalizeIP, .{ .name = "Bun__canonicalizeIP" }); + @export(&Bun__canonicalizeIP, .{ .name = "Bun__canonicalizeIP" }); } pub fn Bun__canonicalizeIP_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { JSC.markBinding(@src()); diff --git a/src/deps/libuv.zig b/src/deps/libuv.zig index a07f095a2615ec..011be6e3823f35 100644 --- a/src/deps/libuv.zig +++ b/src/deps/libuv.zig @@ -2023,20 +2023,20 @@ pub const UV_CLOCK_MONOTONIC: c_int = 0; pub const UV_CLOCK_REALTIME: c_int = 1; pub const uv_clock_id = c_uint; pub const uv_timespec_t = extern struct { - tv_sec: c_long, - tv_nsec: c_long, + sec: c_long, + nsec: c_long, }; pub const uv_timespec64_t = extern struct { - tv_sec: i64, - tv_nsec: i32, + sec: i64, + nsec: i32, }; pub const uv_timeval_t = extern struct { - tv_sec: c_long, - tv_usec: c_long, + sec: c_long, + usec: c_long, }; pub const uv_timeval64_t = extern struct { - tv_sec: i64, - tv_usec: i32, + sec: i64, + usec: i32, }; pub const uv_stat_t = extern struct { dev: u64, diff --git a/src/deps/uws.zig b/src/deps/uws.zig index f5d364def2a929..f36ed289b36362 100644 --- a/src/deps/uws.zig +++ b/src/deps/uws.zig @@ -1798,8 +1798,8 @@ pub fn NewSocketHandler(comptime is_ssl: bool) type { /// # Returns /// This function returns a slice of the buffer on success, or null on failure. pub fn localAddressText(this: ThisSocket, buf: []u8, is_ipv6: *bool) ?[]const u8 { - const addr_v4_len = @sizeOf(std.meta.FieldType(std.posix.sockaddr.in, .addr)); - const addr_v6_len = @sizeOf(std.meta.FieldType(std.posix.sockaddr.in6, .addr)); + const addr_v4_len = @sizeOf(@FieldType(std.posix.sockaddr.in, "addr")); + const addr_v6_len = @sizeOf(@FieldType(std.posix.sockaddr.in6, "addr")); var sa_buf: [addr_v6_len + 1]u8 = undefined; const binary = this.localAddressBinary(&sa_buf) orelse return null; @@ -2104,23 +2104,23 @@ pub fn NewSocketHandler(comptime is_ssl: bool) type { } }; - if (comptime @hasDecl(Type, "onOpen") and @typeInfo(@TypeOf(Type.onOpen)) != .Null) + if (comptime @hasDecl(Type, "onOpen") and @typeInfo(@TypeOf(Type.onOpen)) != .null) us_socket_context_on_open(ssl_int, ctx, SocketHandler.on_open); - if (comptime @hasDecl(Type, "onClose") and @typeInfo(@TypeOf(Type.onClose)) != .Null) + if (comptime @hasDecl(Type, "onClose") and @typeInfo(@TypeOf(Type.onClose)) != .null) us_socket_context_on_close(ssl_int, ctx, SocketHandler.on_close); - if (comptime @hasDecl(Type, "onData") and @typeInfo(@TypeOf(Type.onData)) != .Null) + if (comptime @hasDecl(Type, "onData") and @typeInfo(@TypeOf(Type.onData)) != .null) us_socket_context_on_data(ssl_int, ctx, SocketHandler.on_data); - if (comptime @hasDecl(Type, "onWritable") and @typeInfo(@TypeOf(Type.onWritable)) != .Null) + if (comptime @hasDecl(Type, "onWritable") and @typeInfo(@TypeOf(Type.onWritable)) != .null) us_socket_context_on_writable(ssl_int, ctx, SocketHandler.on_writable); - if (comptime @hasDecl(Type, "onTimeout") and @typeInfo(@TypeOf(Type.onTimeout)) != .Null) + if (comptime @hasDecl(Type, "onTimeout") and @typeInfo(@TypeOf(Type.onTimeout)) != .null) us_socket_context_on_timeout(ssl_int, ctx, SocketHandler.on_timeout); - if (comptime @hasDecl(Type, "onConnectError") and @typeInfo(@TypeOf(Type.onConnectError)) != .Null) { + if (comptime @hasDecl(Type, "onConnectError") and @typeInfo(@TypeOf(Type.onConnectError)) != .null) { us_socket_context_on_socket_connect_error(ssl_int, ctx, SocketHandler.on_connect_error); us_socket_context_on_connect_error(ssl_int, ctx, SocketHandler.on_connect_error_connecting_socket); } - if (comptime @hasDecl(Type, "onEnd") and @typeInfo(@TypeOf(Type.onEnd)) != .Null) + if (comptime @hasDecl(Type, "onEnd") and @typeInfo(@TypeOf(Type.onEnd)) != .null) us_socket_context_on_end(ssl_int, ctx, SocketHandler.on_end); - if (comptime @hasDecl(Type, "onHandshake") and @typeInfo(@TypeOf(Type.onHandshake)) != .Null) + if (comptime @hasDecl(Type, "onHandshake") and @typeInfo(@TypeOf(Type.onHandshake)) != .null) us_socket_context_on_handshake(ssl_int, ctx, SocketHandler.on_handshake, null); } @@ -2249,25 +2249,25 @@ pub fn NewSocketHandler(comptime is_ssl: bool) type { } }; - if (comptime @hasDecl(Type, "onOpen") and @typeInfo(@TypeOf(Type.onOpen)) != .Null) + if (comptime @hasDecl(Type, "onOpen") and @typeInfo(@TypeOf(Type.onOpen)) != .null) us_socket_context_on_open(ssl_int, ctx, SocketHandler.on_open); - if (comptime @hasDecl(Type, "onClose") and @typeInfo(@TypeOf(Type.onClose)) != .Null) + if (comptime @hasDecl(Type, "onClose") and @typeInfo(@TypeOf(Type.onClose)) != .null) us_socket_context_on_close(ssl_int, ctx, SocketHandler.on_close); - if (comptime @hasDecl(Type, "onData") and @typeInfo(@TypeOf(Type.onData)) != .Null) + if (comptime @hasDecl(Type, "onData") and @typeInfo(@TypeOf(Type.onData)) != .null) us_socket_context_on_data(ssl_int, ctx, SocketHandler.on_data); - if (comptime @hasDecl(Type, "onWritable") and @typeInfo(@TypeOf(Type.onWritable)) != .Null) + if (comptime @hasDecl(Type, "onWritable") and @typeInfo(@TypeOf(Type.onWritable)) != .null) us_socket_context_on_writable(ssl_int, ctx, SocketHandler.on_writable); - if (comptime @hasDecl(Type, "onTimeout") and @typeInfo(@TypeOf(Type.onTimeout)) != .Null) + if (comptime @hasDecl(Type, "onTimeout") and @typeInfo(@TypeOf(Type.onTimeout)) != .null) us_socket_context_on_timeout(ssl_int, ctx, SocketHandler.on_timeout); - if (comptime @hasDecl(Type, "onConnectError") and @typeInfo(@TypeOf(Type.onConnectError)) != .Null) { + if (comptime @hasDecl(Type, "onConnectError") and @typeInfo(@TypeOf(Type.onConnectError)) != .null) { us_socket_context_on_socket_connect_error(ssl_int, ctx, SocketHandler.on_connect_error); us_socket_context_on_connect_error(ssl_int, ctx, SocketHandler.on_connect_error_connecting_socket); } - if (comptime @hasDecl(Type, "onEnd") and @typeInfo(@TypeOf(Type.onEnd)) != .Null) + if (comptime @hasDecl(Type, "onEnd") and @typeInfo(@TypeOf(Type.onEnd)) != .null) us_socket_context_on_end(ssl_int, ctx, SocketHandler.on_end); - if (comptime @hasDecl(Type, "onHandshake") and @typeInfo(@TypeOf(Type.onHandshake)) != .Null) + if (comptime @hasDecl(Type, "onHandshake") and @typeInfo(@TypeOf(Type.onHandshake)) != .null) us_socket_context_on_handshake(ssl_int, ctx, SocketHandler.on_handshake, null); - if (comptime @hasDecl(Type, "onLongTimeout") and @typeInfo(@TypeOf(Type.onLongTimeout)) != .Null) + if (comptime @hasDecl(Type, "onLongTimeout") and @typeInfo(@TypeOf(Type.onLongTimeout)) != .null) us_socket_context_on_long_timeout(ssl_int, ctx, SocketHandler.on_long_timeout); } diff --git a/src/deps/zig-clap/clap.zig b/src/deps/zig-clap/clap.zig index cea034c1a7853b..8b128a24c9e9eb 100644 --- a/src/deps/zig-clap/clap.zig +++ b/src/deps/zig-clap/clap.zig @@ -69,7 +69,7 @@ pub fn parseParam(line: []const u8) !Param(Help) { @setEvalBranchQuota(999999); var found_comma = false; - var it = mem.tokenize(u8, line, " \t"); + var it = mem.tokenizeAny(u8, line, " \t"); var param_str = it.next() orelse return error.NoParamFound; const short_name = if (!mem.startsWith(u8, param_str, "--") and diff --git a/src/deps/zig-clap/clap/comptime.zig b/src/deps/zig-clap/clap/comptime.zig index 62d007c43b8cd9..cbd920952b2f7b 100644 --- a/src/deps/zig-clap/clap/comptime.zig +++ b/src/deps/zig-clap/clap/comptime.zig @@ -67,7 +67,7 @@ pub fn ComptimeClap( .passthrough_positionals = undefined, }; - var stream = clap.StreamingClap(usize, @typeInfo(@TypeOf(iter)).Pointer.child){ + var stream = clap.StreamingClap(usize, @typeInfo(@TypeOf(iter)).pointer.child){ .params = converted_params, .iter = iter, .diagnostic = opt.diagnostic, diff --git a/src/dns.zig b/src/dns.zig index c1d97920f264fb..1921e1428aa088 100644 --- a/src/dns.zig +++ b/src/dns.zig @@ -27,7 +27,7 @@ pub const GetAddrInfo = struct { hints.ai_family = this.options.family.toLibC(); hints.ai_socktype = this.options.socktype.toLibC(); hints.ai_protocol = this.options.protocol.toLibC(); - hints.ai_flags = this.options.flags; + hints.ai_flags = @bitCast(this.options.flags); return hints; } @@ -54,10 +54,10 @@ pub const GetAddrInfo = struct { socktype: SocketType = .stream, protocol: Protocol = .unspecified, backend: Backend = Backend.default, - flags: i32 = 0, + flags: std.c.AI = .{}, pub fn toLibC(this: Options) ?std.c.addrinfo { - if (this.family == .unspecified and this.socktype == .unspecified and this.protocol == .unspecified and this.flags == 0) { + if (this.family == .unspecified and this.socktype == .unspecified and this.protocol == .unspecified and this.flags == std.c.AI{}) { return null; } @@ -98,9 +98,9 @@ pub const GetAddrInfo = struct { if (!flags.isNumber()) return error.InvalidFlags; - options.flags = flags.coerce(i32, globalObject); + options.flags = flags.coerce(std.c.AI, globalObject); - if (options.flags & ~(AI_ALL | AI_ADDRCONFIG | AI_V4MAPPED) != 0) + if (!options.flags.ALL and !options.flags.ADDRCONFIG and !options.flags.V4MAPPED) return error.InvalidFlags; } diff --git a/src/env_loader.zig b/src/env_loader.zig index 63646845276287..adb7cd8ac4264e 100644 --- a/src/env_loader.zig +++ b/src/env_loader.zig @@ -221,7 +221,7 @@ pub const Loader = struct { return http_proxy; } - var no_proxy_list = std.mem.split(u8, no_proxy_text, ","); + var no_proxy_list = std.mem.splitScalar(u8, no_proxy_text, ','); var next = no_proxy_list.next(); while (next != null) { var host = strings.trim(next.?, &strings.whitespace_chars); @@ -1163,11 +1163,11 @@ pub const Map = struct { map: HashTable, - pub fn createNullDelimitedEnvMap(this: *Map, arena: std.mem.Allocator) ![:null]?[*:0]u8 { + pub fn createNullDelimitedEnvMap(this: *Map, arena: std.mem.Allocator) ![:null]?[*:0]const u8 { var env_map = &this.map; const envp_count = env_map.count(); - const envp_buf = try arena.allocSentinel(?[*:0]u8, envp_count, null); + const envp_buf = try arena.allocSentinel(?[*:0]const u8, envp_count, null); { var it = env_map.iterator(); var i: usize = 0; diff --git a/src/fd.zig b/src/fd.zig index 5f9872255e7556..32b255e922df6d 100644 --- a/src/fd.zig +++ b/src/fd.zig @@ -238,7 +238,7 @@ pub const FDImpl = packed struct { const this_fmt = if (environment.isDebug) std.fmt.bufPrint(&buf, "{}", .{this}) catch unreachable; const result: ?bun.sys.Error = switch (environment.os) { - .linux => result: { + .linux, .mac => result: { const fd = this.encode(); bun.assert(fd != bun.invalid_fd); bun.assert(fd.cast() >= 0); @@ -247,15 +247,6 @@ pub const FDImpl = packed struct { else => null, }; }, - .mac => result: { - const fd = this.encode(); - bun.assert(fd != bun.invalid_fd); - bun.assert(fd.cast() >= 0); - break :result switch (bun.C.getErrno(bun.sys.syscall.@"close$NOCANCEL"(fd.cast()))) { - .BADF => bun.sys.Error{ .errno = @intFromEnum(posix.E.BADF), .syscall = .close, .fd = fd }, - else => null, - }; - }, .windows => result: { switch (this.kind) { .uv => { diff --git a/src/fmt.zig b/src/fmt.zig index e946f2ae562df0..179c29a26ef0ff 100644 --- a/src/fmt.zig +++ b/src/fmt.zig @@ -1344,7 +1344,7 @@ pub fn quote(self: string) bun.fmt.QuotedFormatter { }; } -pub fn EnumTagListFormatter(comptime Enum: type, comptime Separator: @Type(.EnumLiteral)) type { +pub fn EnumTagListFormatter(comptime Enum: type, comptime Separator: @Type(.enum_literal)) type { return struct { pretty: bool = true, const output = brk: { @@ -1375,7 +1375,7 @@ pub fn EnumTagListFormatter(comptime Enum: type, comptime Separator: @Type(.Enum }; } -pub fn enumTagList(comptime Enum: type, comptime separator: @Type(.EnumLiteral)) EnumTagListFormatter(Enum, separator) { +pub fn enumTagList(comptime Enum: type, comptime separator: @Type(.enum_literal)) EnumTagListFormatter(Enum, separator) { return EnumTagListFormatter(Enum, separator){}; } diff --git a/src/futex.zig b/src/futex.zig index 24d66b056e69c4..546fd51547ec53 100644 --- a/src/futex.zig +++ b/src/futex.zig @@ -28,7 +28,7 @@ const atomic = std.atomic; /// The checking of `ptr` and `expect`, along with blocking the caller, is done atomically /// and totally ordered (sequentially consistent) with respect to other wait()/wake() calls on the same `ptr`. pub fn wait(ptr: *const atomic.Value(u32), expect: u32, timeout_ns: ?u64) error{Timeout}!void { - @setCold(true); + @branchHint(.cold); // Avoid calling into the OS for no-op timeouts. if (timeout_ns) |t| { @@ -42,7 +42,7 @@ pub fn wait(ptr: *const atomic.Value(u32), expect: u32, timeout_ns: ?u64) error{ } pub fn waitForever(ptr: *const atomic.Value(u32), expect: u32) void { - @setCold(true); + @branchHint(.cold); while (true) { Impl.wait(ptr, expect, null) catch |err| switch (err) { @@ -55,7 +55,7 @@ pub fn waitForever(ptr: *const atomic.Value(u32), expect: u32) void { /// Unblocks at most `max_waiters` callers blocked in a `wait()` call on `ptr`. pub fn wake(ptr: *const atomic.Value(u32), max_waiters: u32) void { - @setCold(true); + @branchHint(.cold); // Avoid calling into the OS if there's nothing to wake up. if (max_waiters == 0) { @@ -161,7 +161,7 @@ const DarwinImpl = struct { var timeout_overflowed = false; const addr: *const anyopaque = ptr; - const flags = c.UL_COMPARE_AND_WAIT | c.ULF_NO_ERRNO; + const flags: c.UL = .{ .op = .COMPARE_AND_WAIT, .NO_ERRNO = true }; const status = blk: { if (supports_ulock_wait2) { break :blk c.__ulock_wait2(flags, addr, expect, timeout_ns, 0); @@ -193,8 +193,8 @@ const DarwinImpl = struct { } fn wake(ptr: *const atomic.Value(u32), max_waiters: u32) void { - const default_flags: u32 = c.UL_COMPARE_AND_WAIT | c.ULF_NO_ERRNO; - const flags: u32 = default_flags | (if (max_waiters > 1) c.ULF_WAKE_ALL else @as(u32, 0)); + var flags: c.UL = .{ .op = .COMPARE_AND_WAIT, .NO_ERRNO = true }; + if (max_waiters > 1) flags.WAKE_ALL = true; while (true) { const addr: *const anyopaque = ptr; @@ -217,8 +217,8 @@ const LinuxImpl = struct { fn wait(ptr: *const atomic.Value(u32), expect: u32, timeout: ?u64) error{Timeout}!void { const ts: linux.timespec = if (timeout) |timeout_ns| .{ - .tv_sec = @intCast(timeout_ns / std.time.ns_per_s), - .tv_nsec = @intCast(timeout_ns % std.time.ns_per_s), + .sec = @intCast(timeout_ns / std.time.ns_per_s), + .nsec = @intCast(timeout_ns % std.time.ns_per_s), } else undefined; diff --git a/src/grapheme.zig b/src/grapheme.zig index 91a8c865e32677..2634c8d680a4f1 100644 --- a/src/grapheme.zig +++ b/src/grapheme.zig @@ -41,7 +41,7 @@ const Precompute = struct { var result: [std.math.maxInt(u10)]Value = undefined; @setEvalBranchQuota(2_000); - const info = @typeInfo(GraphemeBoundaryClass).Enum; + const info = @typeInfo(GraphemeBoundaryClass).@"enum"; for (0..std.math.maxInt(u2) + 1) |state_init| { for (info.fields) |field1| { for (info.fields) |field2| { diff --git a/src/http.zig b/src/http.zig index 3b4451c0cce36d..efb9255ba75e6c 100644 --- a/src/http.zig +++ b/src/http.zig @@ -50,13 +50,13 @@ const DeadSocket = opaque {}; var dead_socket = @as(*DeadSocket, @ptrFromInt(1)); //TODO: this needs to be freed when Worker Threads are implemented var socket_async_http_abort_tracker = std.AutoArrayHashMap(u32, uws.InternalSocket).init(bun.default_allocator); -var async_http_id: std.atomic.Value(u32) = std.atomic.Value(u32).init(0); +var async_http_id_monotonic: std.atomic.Value(u32) = std.atomic.Value(u32).init(0); const MAX_REDIRECT_URL_LENGTH = 128 * 1024; var custom_ssl_context_map = std.AutoArrayHashMap(*SSLConfig, *NewHTTPContext(true)).init(bun.default_allocator); pub var max_http_header_size: usize = 16 * 1024; comptime { - @export(max_http_header_size, .{ .name = "BUN_DEFAULT_MAX_HTTP_HEADER_SIZE" }); + @export(&max_http_header_size, .{ .name = "BUN_DEFAULT_MAX_HTTP_HEADER_SIZE" }); } const print_every = 0; @@ -235,7 +235,7 @@ const ProxyTunnel = struct { const ProxyTunnelWrapper = SSLWrapper(*HTTPClient); - usingnamespace bun.NewRefCounted(ProxyTunnel, ProxyTunnel.deinit); + usingnamespace bun.NewRefCounted(ProxyTunnel, _deinit, null); fn onOpen(this: *HTTPClient) void { this.state.response_stage = .proxy_handshake; @@ -520,7 +520,7 @@ const ProxyTunnel = struct { this.deref(); } - pub fn deinit(this: *ProxyTunnel) void { + fn _deinit(this: *ProxyTunnel) void { this.socket = .{ .none = {} }; if (this.wrapper) |*wrapper| { wrapper.deinit(); @@ -598,7 +598,7 @@ fn NewHTTPContext(comptime ssl: bool) type { } const ActiveSocket = TaggedPointerUnion(.{ - DeadSocket, + *DeadSocket, HTTPClient, PooledSocket, }); @@ -2462,13 +2462,11 @@ pub const AsyncHTTP = struct { } pub fn signalHeaderProgress(this: *AsyncHTTP) void { - @fence(.release); var progress = this.signals.header_progress orelse return; progress.store(true, .release); } pub fn enableBodyStreaming(this: *AsyncHTTP) void { - @fence(.release); var stream = this.signals.body_streaming orelse return; stream.store(true, .release); } @@ -2572,7 +2570,7 @@ pub const AsyncHTTP = struct { .result_callback = callback, .http_proxy = options.http_proxy, .signals = options.signals orelse .{}, - .async_http_id = if (options.signals != null and options.signals.?.aborted != null) async_http_id.fetchAdd(1, .monotonic) else 0, + .async_http_id = if (options.signals != null and options.signals.?.aborted != null) async_http_id_monotonic.fetchAdd(1, .monotonic) else 0, }; this.client = .{ @@ -3116,7 +3114,7 @@ pub const HTTPResponseMetadata = struct { }; fn printRequest(request: picohttp.Request, url: string, ignore_insecure: bool, body: []const u8, curl: bool) void { - @setCold(true); + @branchHint(.cold); var request_ = request; request_.path = url; @@ -3130,7 +3128,7 @@ fn printRequest(request: picohttp.Request, url: string, ignore_insecure: bool, b } fn printResponse(response: picohttp.Response) void { - @setCold(true); + @branchHint(.cold); Output.prettyErrorln("{}", .{response}); Output.flush(); } diff --git a/src/http/mime_type.zig b/src/http/mime_type.zig index a7cf32c5a990b7..1f284dc7ee4769 100644 --- a/src/http/mime_type.zig +++ b/src/http/mime_type.zig @@ -21,7 +21,7 @@ category: Category, pub const Map = bun.StringHashMap(MimeType); pub fn createHashTable(allocator: std.mem.Allocator) !Map { - @setCold(true); + @branchHint(.cold); const decls = comptime std.meta.declarations(all); diff --git a/src/http/websocket_http_client.zig b/src/http/websocket_http_client.zig index c612de3183b186..66aa643d6bb9ce 100644 --- a/src/http/websocket_http_client.zig +++ b/src/http/websocket_http_client.zig @@ -226,7 +226,7 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type { pub const name = if (ssl) "WebSocketHTTPSClient" else "WebSocketHTTPClient"; pub const shim = JSC.Shimmer("Bun", name, @This()); - pub usingnamespace bun.NewRefCounted(@This(), deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); const HTTPClient = @This(); pub fn register(_: *JSC.JSGlobalObject, _: *anyopaque, ctx: *uws.SocketContext) callconv(.C) void { @@ -713,16 +713,16 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type { }); comptime { - @export(connect, .{ + @export(&connect, .{ .name = Export[0].symbol_name, }); - @export(cancel, .{ + @export(&cancel, .{ .name = Export[1].symbol_name, }); - @export(register, .{ + @export(®ister, .{ .name = Export[2].symbol_name, }); - @export(memoryCost, .{ + @export(&memoryCost, .{ .name = Export[3].symbol_name, }); } @@ -1032,7 +1032,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { const WebSocket = @This(); - pub usingnamespace bun.NewRefCounted(@This(), deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); pub fn register(global: *JSC.JSGlobalObject, loop_: *anyopaque, ctx_: *anyopaque) callconv(.C) void { const vm = global.bunVM(); const loop = @as(*uws.Loop, @ptrCast(@alignCast(loop_))); @@ -2007,14 +2007,14 @@ pub fn NewWebSocketClient(comptime ssl: bool) type { }); comptime { - @export(writeBinaryData, .{ .name = Export[0].symbol_name }); - @export(writeString, .{ .name = Export[1].symbol_name }); - @export(close, .{ .name = Export[2].symbol_name }); - @export(cancel, .{ .name = Export[3].symbol_name }); - @export(register, .{ .name = Export[4].symbol_name }); - @export(init, .{ .name = Export[5].symbol_name }); - @export(finalize, .{ .name = Export[6].symbol_name }); - @export(memoryCost, .{ .name = Export[7].symbol_name }); + @export(&writeBinaryData, .{ .name = Export[0].symbol_name }); + @export(&writeString, .{ .name = Export[1].symbol_name }); + @export(&close, .{ .name = Export[2].symbol_name }); + @export(&cancel, .{ .name = Export[3].symbol_name }); + @export(®ister, .{ .name = Export[4].symbol_name }); + @export(&init, .{ .name = Export[5].symbol_name }); + @export(&finalize, .{ .name = Export[6].symbol_name }); + @export(&memoryCost, .{ .name = Export[7].symbol_name }); } }; } diff --git a/src/install/bin.zig b/src/install/bin.zig index 61bdf26ba427e3..f47807bef37515 100644 --- a/src/install/bin.zig +++ b/src/install/bin.zig @@ -364,7 +364,7 @@ pub const Bin = extern struct { } pub fn init() Bin { - return bun.serializable(.{ .tag = .none, .value = Value.init(.{ .none = {} }) }); + return bun.serializable(Bin{ .tag = .none, .value = Value.init(.{ .none = {} }) }); } pub const Value = extern union { diff --git a/src/install/bun.lock.zig b/src/install/bun.lock.zig index 0265c374948b4e..83ecb7b1de6ad9 100644 --- a/src/install/bun.lock.zig +++ b/src/install/bun.lock.zig @@ -941,7 +941,7 @@ pub const Stringifier = struct { any = true; } try writer.writeAll( - \\ "os": + \\ "os": ); try Negatable(Npm.OperatingSystem).toJson(meta.os, writer); } @@ -953,7 +953,7 @@ pub const Stringifier = struct { any = true; } try writer.writeAll( - \\ "cpu": + \\ "cpu": ); try Negatable(Npm.Architecture).toJson(meta.arch, writer); } @@ -1154,10 +1154,10 @@ pub const Stringifier = struct { }; const workspace_dependency_groups = [4]struct { []const u8, Dependency.Behavior }{ - .{ "dependencies", Dependency.Behavior.prod }, - .{ "devDependencies", Dependency.Behavior.dev }, - .{ "optionalDependencies", Dependency.Behavior.optional }, - .{ "peerDependencies", Dependency.Behavior.peer }, + .{ "dependencies", .{ .prod = true } }, + .{ "devDependencies", .{ .dev = true } }, + .{ "optionalDependencies", .{ .optional = true } }, + .{ "peerDependencies", .{ .peer = true } }, }; const ParseError = OOM || error{ @@ -1526,7 +1526,7 @@ pub fn parseIntoBinaryLockfile( const dep: Dependency = .{ .name = try string_buf.appendWithHash(name, name_hash), .name_hash = name_hash, - .behavior = Dependency.Behavior.workspace, + .behavior = .{ .workspace = true }, .version = .{ .tag = .workspace, .value = .{ diff --git a/src/install/dependency.zig b/src/install/dependency.zig index d89ebce1c43dfd..2ef640ddbb7290 100644 --- a/src/install/dependency.zig +++ b/src/install/dependency.zig @@ -1311,12 +1311,6 @@ pub const Behavior = packed struct(u8) { bundled: bool = false, _unused_2: u1 = 0, - pub const prod = Behavior{ .prod = true }; - pub const optional = Behavior{ .optional = true }; - pub const dev = Behavior{ .dev = true }; - pub const peer = Behavior{ .peer = true }; - pub const workspace = Behavior{ .workspace = true }; - pub inline fn isProd(this: Behavior) bool { return this.prod; } @@ -1357,13 +1351,13 @@ pub const Behavior = packed struct(u8) { return @as(u8, @bitCast(lhs)) & @as(u8, @bitCast(rhs)) != 0; } - pub inline fn add(this: Behavior, kind: @Type(.EnumLiteral)) Behavior { + pub inline fn add(this: Behavior, kind: @Type(.enum_literal)) Behavior { var new = this; @field(new, @tagName(kind)) = true; return new; } - pub inline fn set(this: Behavior, kind: @Type(.EnumLiteral), value: bool) Behavior { + pub inline fn set(this: Behavior, kind: @Type(.enum_literal), value: bool) Behavior { var new = this; @field(new, @tagName(kind)) = value; return new; @@ -1425,10 +1419,10 @@ pub const Behavior = packed struct(u8) { } comptime { - bun.assert(@as(u8, @bitCast(Behavior.prod)) == (1 << 1)); - bun.assert(@as(u8, @bitCast(Behavior.optional)) == (1 << 2)); - bun.assert(@as(u8, @bitCast(Behavior.dev)) == (1 << 3)); - bun.assert(@as(u8, @bitCast(Behavior.peer)) == (1 << 4)); - bun.assert(@as(u8, @bitCast(Behavior.workspace)) == (1 << 5)); + bun.assert(@as(u8, @bitCast(Behavior{ .prod = true })) == (1 << 1)); + bun.assert(@as(u8, @bitCast(Behavior{ .optional = true })) == (1 << 2)); + bun.assert(@as(u8, @bitCast(Behavior{ .dev = true })) == (1 << 3)); + bun.assert(@as(u8, @bitCast(Behavior{ .peer = true })) == (1 << 4)); + bun.assert(@as(u8, @bitCast(Behavior{ .workspace = true })) == (1 << 5)); } }; diff --git a/src/install/install.zig b/src/install/install.zig index aa0465d1c10f00..96a6ed95dab80d 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -1316,7 +1316,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { pub const Result = union(Tag) { success: void, - fail: struct { + failure: struct { err: anyerror, step: Step, debug_trace: if (Environment.isDebug) bun.crash_handler.StoredTrace else void, @@ -1326,13 +1326,10 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { } }, - pub inline fn success() Result { - return .{ .success = {} }; - } - + /// Init a Result with the 'fail' tag. use `.success` for the 'success' tag. pub inline fn fail(err: anyerror, step: Step, trace: ?*std.builtin.StackTrace) Result { return .{ - .fail = .{ + .failure = .{ .err = err, .step = step, .debug_trace = if (Environment.isDebug) @@ -1347,13 +1344,13 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { pub fn isFail(this: @This()) bool { return switch (this) { .success => false, - .fail => true, + .failure => true, }; } pub const Tag = enum { success, - fail, + failure, }; }; @@ -1540,10 +1537,10 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { state.walker.deinit(); return Result.fail(err, .opening_dest_dir, @errorReturnTrace()); }; - return Result.success(); + return .success; } - const dest_path_length = bun.windows.kernel32.GetFinalPathNameByHandleW(destbase.fd, &state.buf, state.buf.len, 0); + const dest_path_length = bun.windows.GetFinalPathNameByHandleW(destbase.fd, &state.buf, state.buf.len, 0); if (dest_path_length == 0) { const e = bun.windows.Win32Error.get(); const err = if (e.toSystemErrno()) |sys_err| bun.errnoToZigErr(sys_err) else error.Unexpected; @@ -1567,7 +1564,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { _ = node_fs_for_package_installer.mkdirRecursiveOSPathImpl(void, {}, fullpath, 0, false); state.to_copy_buf = state.buf[fullpath.len..]; - const cache_path_length = bun.windows.kernel32.GetFinalPathNameByHandleW(state.cached_package_dir.fd, &state.buf2, state.buf2.len, 0); + const cache_path_length = bun.windows.GetFinalPathNameByHandleW(state.cached_package_dir.fd, &state.buf2, state.buf2.len, 0); if (cache_path_length == 0) { const e = bun.windows.Win32Error.get(); const err = if (e.toSystemErrno()) |sys_err| bun.errnoToZigErr(sys_err) else error.Unexpected; @@ -1585,7 +1582,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { } state.to_copy_buf2 = to_copy_buf2; - return Result.success(); + return .success; } fn installWithCopyfile(this: *@This(), destination_dir: std.fs.Dir) Result { @@ -1715,7 +1712,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { if (Environment.isWindows) &state.buf2 else void{}, ) catch |err| return Result.fail(err, .copying_files, @errorReturnTrace()); - return Result.success(); + return .success; } fn NewTaskQueue(comptime TaskType: type) type { @@ -1726,7 +1723,6 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { wake_value: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), pub fn completeOne(this: *@This()) void { - @fence(.release); if (this.remaining.fetchSub(1, .monotonic) == 1) { _ = this.wake_value.fetchAdd(1, .monotonic); bun.Futex.wake(&this.wake_value, std.math.maxInt(u32)); @@ -1739,7 +1735,6 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { } pub fn wait(this: *@This()) void { - @fence(.acquire); this.wake_value.store(0, .monotonic); while (this.remaining.load(.monotonic) > 0) { bun.Futex.wait(&this.wake_value, 0, std.time.ns_per_ms * 5) catch {}; @@ -1963,7 +1958,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { return Result.fail(err, .copying_files, @errorReturnTrace()); }; - return Result.success(); + return .success; } fn installWithSymlink(this: *@This(), dest_dir: std.fs.Dir) !Result { @@ -2103,7 +2098,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { return Result.fail(err, .copying_files, @errorReturnTrace()); }; - return Result.success(); + return .success; } pub fn uninstall(this: *@This(), destination_dir: std.fs.Dir) void { @@ -2263,7 +2258,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { // When we're linking on Windows, we want to avoid keeping the source directory handle open if (comptime Environment.isWindows) { var wbuf: bun.WPathBuffer = undefined; - const dest_path_length = bun.windows.kernel32.GetFinalPathNameByHandleW(destination_dir.fd, &wbuf, dest_buf.len, 0); + const dest_path_length = bun.windows.GetFinalPathNameByHandleW(destination_dir.fd, &wbuf, dest_buf.len, 0); if (dest_path_length == 0) { const e = bun.windows.Win32Error.get(); const err = if (e.toSystemErrno()) |sys_err| bun.errnoToZigErr(sys_err) else error.Unexpected; @@ -2334,7 +2329,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { if (isDanglingSymlink(symlinked_path)) return Result.fail(error.DanglingSymlink, .linking_dependency, @errorReturnTrace()); - return Result.success(); + return .success; } pub fn getInstallMethod(this: *const @This()) Method { @@ -2402,12 +2397,12 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { const fd = bun.toFD(destination_dir.fd); const subpath = bun.path.joinZ(&[_][]const u8{ this.destination_dir_subpath, ".bun-patch-tag" }); const tag_fd = switch (bun.sys.openat(fd, subpath, bun.O.CREAT | bun.O.WRONLY, 0o666)) { - .err => |e| return .{ .fail = .{ .err = bun.errnoToZigErr(e.getErrno()), .step = Step.patching } }, + .err => |e| return .fail(bun.errnoToZigErr(e.getErrno()), .patching, @errorReturnTrace()), .result => |f| f, }; defer _ = bun.sys.close(tag_fd); - if (bun.sys.File.writeAll(.{ .handle = tag_fd }, this.package_version).asErr()) |e| return .{ .fail = .{ .err = bun.errnoToZigErr(e.getErrno()), .step = Step.patching } }; + if (bun.sys.File.writeAll(.{ .handle = tag_fd }, this.package_version).asErr()) |e| return .fail(bun.errnoToZigErr(e.getErrno()), .patching, @errorReturnTrace()); } pub fn installImpl(this: *@This(), skip_delete: bool, destination_dir: std.fs.Dir, method_: Method, resolution_tag: Resolution.Tag) Result { @@ -2486,7 +2481,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { else => {}, } - if (supported_method_to_use != .copyfile) return Result.success(); + if (supported_method_to_use != .copyfile) return .success; // TODO: linux io_uring return this.installWithCopyfile(destination_dir); @@ -7801,10 +7796,10 @@ pub const PackageManager = struct { const trusted_dependencies_string = "trustedDependencies"; const dependency_groups = &.{ - .{ "optionalDependencies", Dependency.Behavior.optional }, - .{ "devDependencies", Dependency.Behavior.dev }, - .{ "dependencies", Dependency.Behavior.prod }, - .{ "peerDependencies", Dependency.Behavior.peer }, + .{ "optionalDependencies", .{ .optional = true } }, + .{ "devDependencies", .{ .dev = true } }, + .{ "dependencies", .{ .prod = true } }, + .{ "peerDependencies", .{ .peer = true } }, }; pub const EditOptions = struct { @@ -11632,7 +11627,7 @@ pub const PackageManager = struct { var out_dir: if (bun.Environment.isWindows) []const u16 else void = undefined; if (comptime bun.Environment.isWindows) { - const inlen = bun.windows.kernel32.GetFinalPathNameByHandleW(pkg_in_cache_dir.fd, &buf1, buf1.len, 0); + const inlen = bun.windows.GetFinalPathNameByHandleW(pkg_in_cache_dir.fd, &buf1, buf1.len, 0); if (inlen == 0) { const e = bun.windows.Win32Error.get(); const err = if (e.toSystemErrno()) |sys_err| bun.errnoToZigErr(sys_err) else error.Unexpected; @@ -11640,7 +11635,7 @@ pub const PackageManager = struct { Global.crash(); } in_dir = buf1[0..inlen]; - const outlen = bun.windows.kernel32.GetFinalPathNameByHandleW(node_modules_folder.fd, &buf2, buf2.len, 0); + const outlen = bun.windows.GetFinalPathNameByHandleW(node_modules_folder.fd, &buf2, buf2.len, 0); if (outlen == 0) { const e = bun.windows.Win32Error.get(); const err = if (e.toSystemErrno()) |sys_err| bun.errnoToZigErr(sys_err) else error.Unexpected; @@ -12197,7 +12192,7 @@ pub const PackageManager = struct { const ESCAPE_TABLE: [256]EscapeVal = comptime brk: { var table: [256]EscapeVal = [_]EscapeVal{.other} ** 256; const ty = @typeInfo(EscapeVal); - for (ty.Enum.fields) |field| { + for (ty.@"enum".fields) |field| { if (field.name.len == 1) { const c = field.name[0]; table[c] = @enumFromInt(field.value); @@ -12238,7 +12233,7 @@ pub const PackageManager = struct { Output.prettyln("bun add v" ++ Global.package_json_version_with_sha ++ "\n", .{}); Output.flush(); } - return try switch (manager.options.log_level) { + return switch (manager.options.log_level) { inline else => |log_level| manager.updatePackageJSONAndInstallWithManager(ctx, original_cwd, log_level), }; } @@ -13372,7 +13367,7 @@ pub const PackageManager = struct { var lazy_package_dir: LazyPackageDestinationDir = .{ .dir = destination_dir }; - const install_result = switch (resolution.tag) { + const install_result: PackageInstall.Result = switch (resolution.tag) { .symlink, .workspace => installer.installFromLink(this.skip_delete, destination_dir), else => result: { if (resolution.tag == .root or (resolution.tag == .folder and !this.lockfile.isWorkspaceTreeId(this.current_tree_id))) { @@ -13381,15 +13376,15 @@ pub const PackageManager = struct { const dirname = std.fs.path.dirname(this.node_modules.path.items) orelse this.node_modules.path.items; installer.cache_dir = this.root_node_modules_folder.openDir(dirname, .{ .iterate = true, .access_sub_paths = true }) catch |err| - break :result PackageInstall.Result.fail(err, .opening_cache_dir, @errorReturnTrace()); + break :result .fail(err, .opening_cache_dir, @errorReturnTrace()); const result = if (resolution.tag == .root) installer.installFromLink(this.skip_delete, destination_dir) else installer.install(this.skip_delete, destination_dir, resolution.tag); - if (result.isFail() and (result.fail.err == error.ENOENT or result.fail.err == error.FileNotFound)) - break :result PackageInstall.Result.success(); + if (result.isFail() and (result.failure.err == error.ENOENT or result.failure.err == error.FileNotFound)) + break :result .success; break :result result; } @@ -13472,7 +13467,7 @@ pub const PackageManager = struct { if (!pkg_has_patch) this.incrementTreeInstallCount(this.current_tree_id, &lazy_package_dir, !is_pending_package_install, log_level); }, - .fail => |cause| { + .failure => |cause| { if (comptime Environment.allow_assert) { bun.assert(!cause.isPackageMissingFromCache() or (resolution.tag != .symlink and resolution.tag != .workspace)); } @@ -13541,7 +13536,7 @@ pub const PackageManager = struct { cause.err, "failed {s} for package {s}", .{ - install_result.fail.step.name(), + install_result.failure.step.name(), this.names[package_id].slice(this.lockfile.buffers.string_bytes.items), }, ); @@ -15021,7 +15016,7 @@ pub const PackageManager = struct { // added/removed/updated direct dependencies. Output.pretty( \\ - \\Saved {s} ({d} package{s}) + \\Saved {s} ({d} package{s}) , .{ switch (save_format) { .text => "bun.lock", diff --git a/src/install/lifecycle_script_runner.zig b/src/install/lifecycle_script_runner.zig index 42c71494eae16f..52f10c548c5209 100644 --- a/src/install/lifecycle_script_runner.zig +++ b/src/install/lifecycle_script_runner.zig @@ -27,7 +27,7 @@ pub const LifecycleScriptSubprocess = struct { stderr: OutputReader = OutputReader.init(@This()), has_called_process_exit: bool = false, manager: *PackageManager, - envp: [:null]?[*:0]u8, + envp: [:null]?[*:0]const u8, timer: ?Timer = null, @@ -484,7 +484,7 @@ pub const LifecycleScriptSubprocess = struct { pub fn spawnPackageScripts( manager: *PackageManager, list: Lockfile.Package.Scripts.List, - envp: [:null]?[*:0]u8, + envp: [:null]?[*:0]const u8, optional: bool, comptime log_level: PackageManager.Options.LogLevel, comptime foreground: bool, diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index edf351578f9d5e..71ddd3691b6dbc 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -1704,7 +1704,7 @@ pub const Printer = struct { input_lockfile_path: string, format: Format, ) !void { - @setCold(true); + @branchHint(.cold); // We truncate longer than allowed paths. We should probably throw an error instead. const path = input_lockfile_path[0..@min(input_lockfile_path.len, bun.MAX_PATH_BYTES)]; @@ -3763,11 +3763,11 @@ pub const Package = extern struct { field: string, behavior: Behavior, - pub const dependencies = DependencyGroup{ .prop = "dependencies", .field = "dependencies", .behavior = Behavior.prod }; - pub const dev = DependencyGroup{ .prop = "devDependencies", .field = "dev_dependencies", .behavior = Behavior.dev }; - pub const optional = DependencyGroup{ .prop = "optionalDependencies", .field = "optional_dependencies", .behavior = Behavior.optional }; - pub const peer = DependencyGroup{ .prop = "peerDependencies", .field = "peer_dependencies", .behavior = Behavior.peer }; - pub const workspaces = DependencyGroup{ .prop = "workspaces", .field = "workspaces", .behavior = Behavior.workspace }; + pub const dependencies = DependencyGroup{ .prop = "dependencies", .field = "dependencies", .behavior = .{ .prod = true } }; + pub const dev = DependencyGroup{ .prop = "devDependencies", .field = "dev_dependencies", .behavior = .{ .dev = true } }; + pub const optional = DependencyGroup{ .prop = "optionalDependencies", .field = "optional_dependencies", .behavior = .{ .optional = true } }; + pub const peer = DependencyGroup{ .prop = "peerDependencies", .field = "peer_dependencies", .behavior = .{ .peer = true } }; + pub const workspaces = DependencyGroup{ .prop = "workspaces", .field = "workspaces", .behavior = .{ .workspace = true } }; }; pub inline fn isDisabled(this: *const Lockfile.Package) bool { @@ -6074,7 +6074,7 @@ pub const Package = extern struct { }; }; - const FieldsEnum = @typeInfo(Lockfile.Package.List.Field).Enum; + const FieldsEnum = @typeInfo(Lockfile.Package.List.Field).@"enum"; pub fn byteSize(list: Lockfile.Package.List) usize { const sizes_vector: std.meta.Vector(sizes.bytes.len, usize) = sizes.bytes; @@ -7501,7 +7501,7 @@ pub fn jsonStringifyDependency(this: *const Lockfile, w: anytype, dep_id: Depend try w.beginObject(); defer w.endObject() catch {}; - const fields = @typeInfo(Behavior).Struct.fields; + const fields = @typeInfo(Behavior).@"struct".fields; inline for (fields[1 .. fields.len - 1]) |field| { if (@field(dep.behavior, field.name)) { try w.objectField(field.name); diff --git a/src/install/migration.zig b/src/install/migration.zig index 71ecc89e8fe7db..da2c8fa30119e9 100644 --- a/src/install/migration.zig +++ b/src/install/migration.zig @@ -675,7 +675,7 @@ pub fn migrateNPMLockfile( .workspace = wksp_path, }, }, - .behavior = Dependency.Behavior.workspace, + .behavior = .{ .workspace = true }, }; resolutions_buf[0] = entry1.new_package_id; diff --git a/src/install/padding_checker.zig b/src/install/padding_checker.zig index fffc6203a90403..597e6da559de4e 100644 --- a/src/install/padding_checker.zig +++ b/src/install/padding_checker.zig @@ -33,17 +33,17 @@ const std = @import("std"); pub fn assertNoUninitializedPadding(comptime T: type) void { const info_ = @typeInfo(T); const info = switch (info_) { - .Struct => info_.Struct, - .Union => info_.Union, - .Array => |a| { + .@"struct" => info_.@"struct", + .@"union" => info_.@"union", + .array => |a| { assertNoUninitializedPadding(a.child); return; }, - .Optional => |a| { + .optional => |a| { assertNoUninitializedPadding(a.child); return; }, - .Pointer => |ptr| { + .pointer => |ptr| { // Pointers aren't allowed, but this just makes the assertion easier to invoke. assertNoUninitializedPadding(ptr.child); return; @@ -58,18 +58,18 @@ pub fn assertNoUninitializedPadding(comptime T: type) void { for (info.fields) |field| { const fieldInfo = @typeInfo(field.type); switch (fieldInfo) { - .Struct => assertNoUninitializedPadding(field.type), - .Union => assertNoUninitializedPadding(field.type), - .Array => |a| assertNoUninitializedPadding(a.child), - .Optional => |a| assertNoUninitializedPadding(a.child), - .Pointer => { + .@"struct" => assertNoUninitializedPadding(field.type), + .@"union" => assertNoUninitializedPadding(field.type), + .array => |a| assertNoUninitializedPadding(a.child), + .optional => |a| assertNoUninitializedPadding(a.child), + .pointer => { @compileError("Expected no pointer types in " ++ @typeName(T) ++ ", found field '" ++ field.name ++ "' of type '" ++ @typeName(field.type) ++ "'"); }, else => {}, } } - if (info_ == .Union) { + if (info_ == .@"union") { return; } diff --git a/src/install/patch_install.zig b/src/install/patch_install.zig index 50e7eb49ecdbd5..a09fce928fa8cb 100644 --- a/src/install/patch_install.zig +++ b/src/install/patch_install.zig @@ -313,7 +313,7 @@ pub const PatchTask = struct { var resolution_buf: [512]u8 = undefined; const resolution_label = std.fmt.bufPrint(&resolution_buf, "{}", .{this.callback.apply.resolution.fmt(strbuf, .posix)}) catch unreachable; - const dummy_node_modules = .{ + const dummy_node_modules: PackageManager.NodeModulesFolder = .{ .path = std.ArrayList(u8).init(this.manager.allocator), .tree_id = 0, }; @@ -335,7 +335,7 @@ pub const PatchTask = struct { switch (pkg_install.installImpl(true, system_tmpdir, .copyfile, this.callback.apply.resolution.tag)) { .success => {}, - .fail => |reason| { + .failure => |reason| { return try log.addErrorFmtOpts( this.manager.allocator, "{s} while executing step: {s}", diff --git a/src/install/windows-shim/BinLinkingShim.zig b/src/install/windows-shim/BinLinkingShim.zig index 219e13dc16779e..b4000502bcdb31 100644 --- a/src/install/windows-shim/BinLinkingShim.zig +++ b/src/install/windows-shim/BinLinkingShim.zig @@ -77,6 +77,7 @@ pub const Flags = packed struct(u16) { pub const embedded_executable_data = @embedFile("bun_shim_impl.exe"); fn wU8(comptime s: []const u8) []const u8 { + @setEvalBranchQuota(1_000_000); const str = std.unicode.utf8ToUtf16LeStringLiteral(s); return @alignCast(std.mem.sliceAsBytes(str)); } diff --git a/src/install/windows-shim/bun_shim_impl.zig b/src/install/windows-shim/bun_shim_impl.zig index a40d7780d9df54..e3d78724136aa2 100644 --- a/src/install/windows-shim/bun_shim_impl.zig +++ b/src/install/windows-shim/bun_shim_impl.zig @@ -44,7 +44,7 @@ const dbg = builtin.mode == .Debug; const std = @import("std"); const w = std.os.windows; const assert = std.debug.assert; -const fmt16 = std.unicode.fmtUtf16le; +const fmt16 = std.unicode.fmtUtf16Le; const is_standalone = @import("root") == @This(); const bun = if (!is_standalone) @import("root").bun else @compileError("cannot use 'bun' in standalone build of bun_shim_impl"); @@ -260,7 +260,7 @@ var failure_reason_data: [512]u8 = undefined; var failure_reason_argument: ?[]const u8 = null; noinline fn failAndExitWithReason(reason: FailReason) noreturn { - @setCold(true); + @branchHint(.cold); const console_handle = w.teb().ProcessEnvironmentBlock.ProcessParameters.hStdError; var mode: w.DWORD = 0; @@ -310,7 +310,7 @@ pub const LauncherMode = enum { } noinline fn fail(comptime mode: LauncherMode, comptime reason: FailReason) mode.FailRetType() { - @setCold(true); + @branchHint(.cold); return switch (mode) { .launch => failAndExitWithReason(reason), .read_without_launch => ReadWithoutLaunchResult{ .err = reason }, @@ -363,8 +363,8 @@ fn launcher(comptime mode: LauncherMode, bun_ctx: anytype) mode.RetType() { const suffix = comptime (if (is_standalone) wliteral("exe") else wliteral("bunx")); if (dbg) if (!std.mem.endsWith(u16, image_path_u16, suffix)) { std.debug.panic("assert failed: image path expected to end with {}, got {}", .{ - std.unicode.fmtUtf16le(suffix), - std.unicode.fmtUtf16le(image_path_u16), + std.unicode.fmtUtf16Le(suffix), + std.unicode.fmtUtf16Le(image_path_u16), }); }; const image_path_to_copy_b_len = image_path_b_len - 2 * suffix.len; @@ -487,7 +487,7 @@ fn launcher(comptime mode: LauncherMode, bun_ctx: anytype) mode.RetType() { assert(ptr[1] == '.'); while (true) { - if (dbg) debug("1 - {}", .{std.unicode.fmtUtf16le(ptr[0..1])}); + if (dbg) debug("1 - {}", .{std.unicode.fmtUtf16Le(ptr[0..1])}); if (ptr[0] == '\\') { left -= 1; // ptr is of type [*]u16, which means -= operates on number of ITEMS, not BYTES @@ -505,7 +505,7 @@ fn launcher(comptime mode: LauncherMode, bun_ctx: anytype) mode.RetType() { // inlined loop to do this again, because the completion case is different // using `inline for` caused comptime issues that made the code much harder to read while (true) { - if (dbg) debug("2 - {}", .{std.unicode.fmtUtf16le(ptr[0..1])}); + if (dbg) debug("2 - {}", .{std.unicode.fmtUtf16Le(ptr[0..1])}); if (ptr[0] == '\\') { // ptr is at the position marked S, so move forward one *character* break :brk ptr + 1; diff --git a/src/io/PipeReader.zig b/src/io/PipeReader.zig index 3d800beae40bb5..04342020bb58d4 100644 --- a/src/io/PipeReader.zig +++ b/src/io/PipeReader.zig @@ -972,7 +972,7 @@ pub const WindowsBufferedReader = struct { parent: *anyopaque = undefined, vtable: WindowsOutputReaderVTable = undefined, ref_count: u32 = 1, - pub usingnamespace bun.NewRefCounted(@This(), deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); const WindowsOutputReader = @This(); diff --git a/src/io/io.zig b/src/io/io.zig index 7a4f36d3c2505b..279fbed6251d08 100644 --- a/src/io/io.zig +++ b/src/io/io.zig @@ -19,8 +19,8 @@ pub const Loop = struct { epoll_fd: if (Environment.isLinux) bun.FileDescriptor else u0 = if (Environment.isLinux) .zero else 0, cached_now: posix.timespec = .{ - .tv_nsec = 0, - .tv_sec = 0, + .nsec = 0, + .sec = 0, }, active: usize = 0, @@ -297,14 +297,14 @@ pub const Loop = struct { const rc = linux.clock_gettime(linux.CLOCK.MONOTONIC, timespec); assert(rc == 0); } else if (comptime Environment.isWindows) { - var tv_sec: i64 = 0; - var tv_nsec: i64 = 0; + var sec: i64 = 0; + var nsec: i64 = 0; - const rc = clock_gettime_monotonic(&tv_sec, &tv_nsec); + const rc = clock_gettime_monotonic(&sec, &nsec); assert(rc == 0); - timespec.tv_sec = @intCast(tv_sec); - timespec.tv_nsec = @intCast(tv_nsec); + timespec.sec = @intCast(sec); + timespec.nsec = @intCast(nsec); } else { std.posix.clock_gettime(std.posix.CLOCK.MONOTONIC, timespec) catch {}; } @@ -397,7 +397,7 @@ pub const Poll = struct { const GenerationNumberInt = if (Environment.isMac and Environment.allow_assert) u64 else u0; - var generation_number: GenerationNumberInt = 0; + var generation_number_monotonic: GenerationNumberInt = 0; pub const Tag = Pollable.Tag; @@ -446,24 +446,24 @@ pub const Poll = struct { pub fn fromKQueueEvent(kqueue_event: std.posix.system.kevent64_s) Flags.Set { var flags = Flags.Set{}; - if (kqueue_event.filter == std.posix.system.EVFILT_READ) { + if (kqueue_event.filter == std.posix.system.EVFILT.READ) { flags.insert(Flags.readable); log("readable", .{}); if (kqueue_event.flags & std.posix.system.EV_EOF != 0) { flags.insert(Flags.hup); log("hup", .{}); } - } else if (kqueue_event.filter == std.posix.system.EVFILT_WRITE) { + } else if (kqueue_event.filter == std.posix.system.EVFILT.WRITE) { flags.insert(Flags.writable); log("writable", .{}); if (kqueue_event.flags & std.posix.system.EV_EOF != 0) { flags.insert(Flags.hup); log("hup", .{}); } - } else if (kqueue_event.filter == std.posix.system.EVFILT_PROC) { + } else if (kqueue_event.filter == std.posix.system.EVFILT.PROC) { log("proc", .{}); flags.insert(Flags.process); - } else if (kqueue_event.filter == std.posix.system.EVFILT_MACHPORT) { + } else if (kqueue_event.filter == std.posix.system.EVFILT.MACHPORT) { log("machport", .{}); flags.insert(Flags.machport); } @@ -492,7 +492,7 @@ pub const Poll = struct { } pub fn applyKQueue( - comptime action: @Type(.EnumLiteral), + comptime action: @Type(.enum_literal), tag: Pollable.Tag, poll: *Poll, fd: bun.FileDescriptor, @@ -516,47 +516,47 @@ pub const Poll = struct { } if (comptime Environment.allow_assert and action != .cancel) { - generation_number += 1; - poll.generation_number = generation_number; + generation_number_monotonic += 1; + poll.generation_number = generation_number_monotonic; } } - const one_shot_flag = std.posix.system.EV_ONESHOT; + const one_shot_flag = std.posix.system.EV.ONESHOT; kqueue_event.* = switch (comptime action) { .readable => .{ .ident = @as(u64, @intCast(fd.int())), - .filter = std.posix.system.EVFILT_READ, + .filter = std.posix.system.EVFILT.READ, .data = 0, .fflags = 0, .udata = @intFromPtr(Pollable.init(tag, poll).ptr()), - .flags = std.c.EV_ADD | one_shot_flag, - .ext = .{ generation_number, 0 }, + .flags = std.c.EV.ADD | one_shot_flag, + .ext = .{ generation_number_monotonic, 0 }, }, .writable => .{ .ident = @as(u64, @intCast(fd.int())), - .filter = std.posix.system.EVFILT_WRITE, + .filter = std.posix.system.EVFILT.WRITE, .data = 0, .fflags = 0, .udata = @intFromPtr(Pollable.init(tag, poll).ptr()), - .flags = std.c.EV_ADD | one_shot_flag, - .ext = .{ generation_number, 0 }, + .flags = std.c.EV.ADD | one_shot_flag, + .ext = .{ generation_number_monotonic, 0 }, }, .cancel => if (poll.flags.contains(.poll_readable)) .{ .ident = @as(u64, @intCast(fd.int())), - .filter = std.posix.system.EVFILT_READ, + .filter = std.posix.system.EVFILT.READ, .data = 0, .fflags = 0, .udata = @intFromPtr(Pollable.init(tag, poll).ptr()), - .flags = std.c.EV_DELETE, + .flags = std.c.EV.DELETE, .ext = .{ poll.generation_number, 0 }, } else if (poll.flags.contains(.poll_writable)) .{ .ident = @as(u64, @intCast(fd.int())), - .filter = std.posix.system.EVFILT_WRITE, + .filter = std.posix.system.EVFILT.WRITE, .data = 0, .fflags = 0, .udata = @intFromPtr(Pollable.init(tag, poll).ptr()), - .flags = std.c.EV_DELETE, + .flags = std.c.EV.DELETE, .ext = .{ poll.generation_number, 0 }, } else unreachable, @@ -578,7 +578,7 @@ pub const Poll = struct { pub fn onUpdateKQueue( event: std.posix.system.kevent64_s, ) void { - if (event.filter == std.c.EVFILT_MACHPORT) + if (event.filter == std.c.EVFILT.MACHPORT) return; const pollable = Pollable.from(event.udata); @@ -590,7 +590,7 @@ pub const Poll = struct { inline else => |t| { var this: *Pollable.Tag.Type(t) = @alignCast(@fieldParentPtr("io_poll", poll)); - if (event.flags == std.c.EV_ERROR) { + if (event.flags == std.c.EV.ERROR) { log("error({d}) = {d}", .{ event.ident, event.data }); this.onIOError(bun.sys.Error.fromCode(@enumFromInt(event.data), .kevent)); } else { diff --git a/src/js_ast.zig b/src/js_ast.zig index 3736cbc11c45b4..caf47d46d17e35 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -3202,9 +3202,9 @@ pub const Stmt = struct { }; pub fn StoredData(tag: Tag) type { - const T = std.meta.FieldType(Data, tag); + const T = @FieldType(Data, tag); return switch (@typeInfo(T)) { - .Pointer => |ptr| ptr.child, + .pointer => |ptr| ptr.child, else => T, }; } @@ -5306,7 +5306,7 @@ pub const Expr = struct { bun.assert_eql(@sizeOf(Data), 24); // Do not increase the size of Expr } - pub fn as(data: Data, comptime tag: Tag) ?std.meta.FieldType(Data, tag) { + pub fn as(data: Data, comptime tag: Tag) ?@FieldType(Data, @tagName(tag)) { return if (data == tag) @field(data, @tagName(tag)) else null; } @@ -6043,7 +6043,7 @@ pub const Expr = struct { p: anytype, comptime kind: enum { loose, strict }, ) Equality { - comptime bun.assert(@typeInfo(@TypeOf(p)).Pointer.size == .One); // pass *Parser + comptime bun.assert(@typeInfo(@TypeOf(p)).pointer.size == .one); // pass *Parser // https://dorey.github.io/JavaScript-Equality-Table/ switch (left) { @@ -6329,9 +6329,9 @@ pub const Expr = struct { }; pub fn StoredData(tag: Tag) type { - const T = std.meta.FieldType(Data, tag); + const T = @FieldType(Data, tag); return switch (@typeInfo(T)) { - .Pointer => |ptr| ptr.child, + .pointer => |ptr| ptr.child, else => T, }; } @@ -8175,7 +8175,7 @@ pub const Macro = struct { this: *Run, value: JSC.JSValue, ) MacroError!Expr { - return try switch (JSC.ConsoleObject.Formatter.Tag.get(value, this.global).tag) { + return switch (JSC.ConsoleObject.Formatter.Tag.get(value, this.global).tag) { .Error => this.coerce(value, .Error), .Undefined => this.coerce(value, .Undefined), .Null => this.coerce(value, .Null), diff --git a/src/js_lexer.zig b/src/js_lexer.zig index 251cd7c08a5807..7c9dd41e91f5f7 100644 --- a/src/js_lexer.zig +++ b/src/js_lexer.zig @@ -187,7 +187,7 @@ fn NewLexer_( } pub fn syntaxError(self: *LexerType) !void { - @setCold(true); + @branchHint(.cold); // Only add this if there is not already an error. // It is possible that there is a more descriptive error already emitted. @@ -198,20 +198,20 @@ fn NewLexer_( } pub fn addDefaultError(self: *LexerType, msg: []const u8) !void { - @setCold(true); + @branchHint(.cold); self.addError(self.start, "{s}", .{msg}, true); return Error.SyntaxError; } pub fn addSyntaxError(self: *LexerType, _loc: usize, comptime fmt: []const u8, args: anytype) !void { - @setCold(true); + @branchHint(.cold); self.addError(_loc, fmt, args, false); return Error.SyntaxError; } pub fn addError(self: *LexerType, _loc: usize, comptime format: []const u8, args: anytype, _: bool) void { - @setCold(true); + @branchHint(.cold); if (self.is_log_disabled) return; var __loc = logger.usize2Loc(_loc); @@ -224,7 +224,7 @@ fn NewLexer_( } pub fn addRangeError(self: *LexerType, r: logger.Range, comptime format: []const u8, args: anytype, _: bool) !void { - @setCold(true); + @branchHint(.cold); if (self.is_log_disabled) return; if (self.prev_error_loc.eql(r.loc)) { @@ -241,7 +241,7 @@ fn NewLexer_( } pub fn addRangeErrorWithNotes(self: *LexerType, r: logger.Range, comptime format: []const u8, args: anytype, notes: []const logger.Data) !void { - @setCold(true); + @branchHint(.cold); if (self.is_log_disabled) return; if (self.prev_error_loc.eql(r.loc)) { diff --git a/src/js_parser.zig b/src/js_parser.zig index 69a7b91c021c9f..7d9a1d1389ae21 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -2600,7 +2600,7 @@ const InvalidLoc = struct { }; pub fn addError(loc: InvalidLoc, log: *logger.Log, source: *const logger.Source) void { - @setCold(true); + @branchHint(.cold); const text = switch (loc.kind) { .spread => "Unexpected trailing comma after rest element", .parentheses => "Unexpected parentheses in binding pattern", @@ -5617,7 +5617,7 @@ fn NewParser_( } // Output.print("\nStmt: {s} - {d}\n", .{ @typeName(@TypeOf(t)), loc.start }); - if (@typeInfo(Type) == .Pointer) { + if (@typeInfo(Type) == .pointer) { // ExportFrom normally becomes import records during the visiting pass // However, we skip the visiting pass in this mode // So we must generate a minimum version of it here. @@ -5707,7 +5707,7 @@ fn NewParser_( } // Output.print("\nExpr: {s} - {d}\n", .{ @typeName(@TypeOf(t)), loc.start }); - if (@typeInfo(Type) == .Pointer) { + if (@typeInfo(Type) == .pointer) { if (comptime only_scan_imports_and_do_not_visit) { if (Type == *E.Call) { const call: *E.Call = t; @@ -5743,7 +5743,7 @@ fn NewParser_( } pub fn b(p: *P, t: anytype, loc: logger.Loc) Binding { - if (@typeInfo(@TypeOf(t)) == .Pointer) { + if (@typeInfo(@TypeOf(t)) == .pointer) { return Binding.init(t, loc); } else { return Binding.alloc(p.allocator, t, loc); @@ -9213,7 +9213,7 @@ fn NewParser_( } fn validateImportType(p: *P, import_tag: ImportRecord.Tag, stmt: *S.Import) !void { - @setCold(true); + @branchHint(.cold); if (import_tag.loader() != null) { p.import_records.items[stmt.import_record_index].tag = import_tag; @@ -14760,8 +14760,8 @@ fn NewParser_( } pub fn panic(p: *P, comptime fmt: string, args: anytype) noreturn { + @branchHint(.cold); p.panicLoc(fmt, args, null); - @setCold(true); } pub fn panicLoc(p: *P, comptime fmt: string, args: anytype, loc: ?logger.Loc) noreturn { diff --git a/src/js_printer.zig b/src/js_printer.zig index 2f791393556cb7..42bfb573a6a9fd 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -1909,7 +1909,7 @@ fn NewPrinter( return printClauseItemAs(p, item, .@"export"); } - fn printClauseItemAs(p: *Printer, item: js_ast.ClauseItem, comptime as: @Type(.EnumLiteral)) void { + fn printClauseItemAs(p: *Printer, item: js_ast.ClauseItem, comptime as: @Type(.enum_literal)) void { const name = p.renamer.nameForSymbol(item.name.ref.?); if (comptime as == .import) { @@ -5978,7 +5978,7 @@ pub fn printWithWriterAndPlatform( printer.printFunc(func); } else { // Special-case lazy-export AST - // @branchHint(.unlikely) + @branchHint(.unlikely); printer.printFnArgs(func.open_parens_loc, func.args, func.flags.contains(.has_rest_arg), false); printer.printSpace(); printer.print("{\n"); diff --git a/src/json_parser.zig b/src/json_parser.zig index c7a95ead6e7a36..63cd0bfecd1b25 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -87,7 +87,7 @@ const HashMapPool = struct { fn newExpr(t: anytype, loc: logger.Loc) Expr { const Type = @TypeOf(t); - if (comptime @typeInfo(Type) == .Pointer) { + if (comptime @typeInfo(Type) == .pointer) { @compileError("Unexpected pointer"); } @@ -533,7 +533,7 @@ pub fn toAST( const type_info: std.builtin.Type = @typeInfo(Type); switch (type_info) { - .Bool => { + .bool => { return Expr{ .data = .{ .e_boolean = .{ .value = value, @@ -541,7 +541,7 @@ pub fn toAST( .loc = logger.Loc{}, }; }, - .Int => { + .int => { return Expr{ .data = .{ .e_number = .{ @@ -551,7 +551,7 @@ pub fn toAST( .loc = logger.Loc{}, }; }, - .Float => { + .float => { return Expr{ .data = .{ .e_number = .{ @@ -561,9 +561,9 @@ pub fn toAST( .loc = logger.Loc{}, }; }, - .Pointer => |ptr_info| switch (ptr_info.size) { - .One => switch (@typeInfo(ptr_info.child)) { - .Array => { + .pointer => |ptr_info| switch (ptr_info.size) { + .one => switch (@typeInfo(ptr_info.child)) { + .array => { const Slice = []const std.meta.Elem(ptr_info.child); return try toAST(allocator, Slice, value.*); }, @@ -571,7 +571,7 @@ pub fn toAST( return try toAST(allocator, @TypeOf(value.*), value.*); }, }, - .Slice => { + .slice => { if (ptr_info.child == u8) { return Expr.init(js_ast.E.String, js_ast.E.String.init(value), logger.Loc.Empty); } @@ -583,7 +583,7 @@ pub fn toAST( }, else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"), }, - .Array => |Array| { + .array => |Array| { if (Array.child == u8) { return Expr.init(js_ast.E.String, js_ast.E.String.init(value), logger.Loc.Empty); } @@ -593,7 +593,7 @@ pub fn toAST( return Expr.init(js_ast.E.Array, js_ast.E.Array{ .items = exprs }, logger.Loc.Empty); }, - .Struct => |Struct| { + .@"struct" => |Struct| { const fields: []const std.builtin.Type.StructField = Struct.fields; var properties = try allocator.alloc(js_ast.G.Property, fields.len); var property_i: usize = 0; @@ -614,25 +614,25 @@ pub fn toAST( logger.Loc.Empty, ); }, - .Null => { + .null => { return Expr{ .data = .{ .e_null = .{} }, .loc = logger.Loc{} }; }, - .Optional => { + .optional => { if (value) |_value| { return try toAST(allocator, @TypeOf(_value), _value); } else { return Expr{ .data = .{ .e_null = .{} }, .loc = logger.Loc{} }; } }, - .Enum => { + .@"enum" => { _ = std.meta.intToEnum(Type, @intFromEnum(value)) catch { return Expr{ .data = .{ .e_null = .{} }, .loc = logger.Loc{} }; }; return toAST(allocator, string, @as(string, @tagName(value))); }, - .ErrorSet => return try toAST(allocator, []const u8, bun.asByteSlice(@errorName(value))), - .Union => |Union| { + .error_set => return try toAST(allocator, []const u8, bun.asByteSlice(@errorName(value))), + .@"union" => |Union| { const info = Union; if (info.tag_type) |UnionTagType| { inline for (info.fields) |u_field| { @@ -650,7 +650,7 @@ pub fn toAST( @field(value, u_field.name), ), .is_comptime = false, - .default_value = undefined, + .default_value_ptr = undefined, .alignment = @alignOf( @TypeOf( @field(value, u_field.name), diff --git a/src/libarchive/libarchive-bindings.zig b/src/libarchive/libarchive-bindings.zig index d9bd69630a80bf..f8ec3f8f797389 100644 --- a/src/libarchive/libarchive-bindings.zig +++ b/src/libarchive/libarchive-bindings.zig @@ -874,11 +874,11 @@ pub const Archive = opaque { }, result: T, - pub fn err(arch: *Archive, msg: []const u8) @This() { + pub fn initErr(arch: *Archive, msg: []const u8) @This() { return .{ .err = .{ .message = msg, .archive = arch } }; } - pub fn res(value: T) @This() { + pub fn initRes(value: T) @This() { return .{ .result = value }; } }; @@ -891,38 +891,38 @@ pub const Archive = opaque { switch (archive.readSupportFormatTar()) { .failed, .fatal, .warn => { - return Return.err(archive, "failed to enable tar format support"); + return Return.initErr(archive, "failed to enable tar format support"); }, else => {}, } switch (archive.readSupportFormatGnutar()) { .failed, .fatal, .warn => { - return Return.err(archive, "failed to enable gnutar format support"); + return Return.initErr(archive, "failed to enable gnutar format support"); }, else => {}, } switch (archive.readSupportFilterGzip()) { .failed, .fatal, .warn => { - return Return.err(archive, "failed to enable support for gzip compression"); + return Return.initErr(archive, "failed to enable support for gzip compression"); }, else => {}, } switch (archive.readSetOptions("read_concatenated_archives")) { .failed, .fatal, .warn => { - return Return.err(archive, "failed to set option `read_concatenated_archives`"); + return Return.initErr(archive, "failed to set option `read_concatenated_archives`"); }, else => {}, } switch (archive.readOpenMemory(tarball_bytes)) { .failed, .fatal, .warn => { - return Return.err(archive, "failed to read tarball"); + return Return.initErr(archive, "failed to read tarball"); }, else => {}, } - return Return.res(.{ + return Return.initRes(.{ .archive = archive, .filter = std.EnumSet(std.fs.File.Kind).initEmpty(), }); @@ -935,15 +935,15 @@ pub const Archive = opaque { pub fn readEntryData(this: *const @This(), allocator: std.mem.Allocator, archive: *Archive) OOM!Iterator.Result([]const u8) { const Return = Iterator.Result([]const u8); const size = this.entry.size(); - if (size < 0) return Return.err(archive, "invalid archive entry size"); + if (size < 0) return Return.initErr(archive, "invalid archive entry size"); const buf = try allocator.alloc(u8, @intCast(size)); const read = archive.readData(buf); if (read < 0) { - return Return.err(archive, "failed to read archive data"); + return Return.initErr(archive, "failed to read archive data"); } - return Return.res(buf[0..@intCast(read)]); + return Return.initRes(buf[0..@intCast(read)]); } }; @@ -954,18 +954,18 @@ pub const Archive = opaque { while (true) { return switch (this.archive.readNextHeader(&entry)) { .retry => continue, - .eof => Return.res(null), + .eof => Return.initRes(null), .ok => { const kind = bun.C.kindFromMode(entry.filetype()); if (this.filter.contains(kind)) continue; - return Return.res(.{ + return Return.initRes(.{ .entry = entry, .kind = kind, }); }, - else => Return.err(this.archive, "failed to read archive header"), + else => Return.initErr(this.archive, "failed to read archive header"), }; } } @@ -975,18 +975,18 @@ pub const Archive = opaque { switch (this.archive.readClose()) { .failed, .fatal, .warn => { - return Return.err(this.archive, "failed to close archive read"); + return Return.initErr(this.archive, "failed to close archive read"); }, else => {}, } switch (this.archive.readFree()) { .failed, .fatal, .warn => { - return Return.err(this.archive, "failed to free archive read"); + return Return.initErr(this.archive, "failed to free archive read"); }, else => {}, } - return Return.res({}); + return Return.initRes({}); } }; }; diff --git a/src/libarchive/libarchive.zig b/src/libarchive/libarchive.zig index 29c56875b515b0..f19bcd570598ee 100644 --- a/src/libarchive/libarchive.zig +++ b/src/libarchive/libarchive.zig @@ -262,7 +262,7 @@ pub const Archiver = struct { // it will require us to pull in libiconv // though we should probably validate the utf8 here nonetheless var pathname = entry.pathname(); - var tokenizer = std.mem.tokenize(u8, bun.asByteSlice(pathname), std.fs.path.sep_str); + var tokenizer = std.mem.tokenizeScalar(u8, bun.asByteSlice(pathname), std.fs.path.sep); comptime var depth_i: usize = 0; inline while (depth_i < depth_to_skip) : (depth_i += 1) { if (tokenizer.next() == null) continue :loop; diff --git a/src/linux_c.zig b/src/linux_c.zig index da337bd791973a..7187f120b9afac 100644 --- a/src/linux_c.zig +++ b/src/linux_c.zig @@ -702,10 +702,10 @@ comptime { _ = fstat64; _ = fstatat; _ = statx; - @export(stat, .{ .name = "stat64" }); - @export(lstat, .{ .name = "lstat64" }); - @export(fstat, .{ .name = "fstat64" }); - @export(fstatat, .{ .name = "fstatat64" }); + @export(&stat, .{ .name = "stat64" }); + @export(&lstat, .{ .name = "lstat64" }); + @export(&fstat, .{ .name = "fstat64" }); + @export(&fstatat, .{ .name = "fstatat64" }); } // ********************************************************************************* diff --git a/src/logger.zig b/src/logger.zig index cddd6d75e9d43c..20457169915de6 100644 --- a/src/logger.zig +++ b/src/logger.zig @@ -729,23 +729,23 @@ pub const Log = struct { } pub fn addDebugFmt(log: *Log, source: ?*const Source, l: Loc, allocator: std.mem.Allocator, comptime text: string, args: anytype) OOM!void { - if (!Kind.shouldPrint(.debug, log.level)) return; - - @setCold(true); - try log.addMsg(.{ - .kind = .debug, - .data = try rangeData(source, Range{ .loc = l }, try allocPrint(allocator, text, args)).cloneLineText(log.clone_line_text, log.msgs.allocator), - }); + if (Kind.shouldPrint(.debug, log.level)) { + @branchHint(.cold); + try log.addMsg(.{ + .kind = .debug, + .data = try rangeData(source, Range{ .loc = l }, try allocPrint(allocator, text, args)).cloneLineText(log.clone_line_text, log.msgs.allocator), + }); + } } pub fn addVerbose(log: *Log, source: ?*const Source, loc: Loc, text: string) OOM!void { - if (!Kind.shouldPrint(.verbose, log.level)) return; - - @setCold(true); - try log.addMsg(.{ - .kind = .verbose, - .data = rangeData(source, Range{ .loc = loc }, text), - }); + if (Kind.shouldPrint(.verbose, log.level)) { + @branchHint(.cold); + try log.addMsg(.{ + .kind = .verbose, + .data = rangeData(source, Range{ .loc = loc }, text), + }); + } } pub fn toJS(this: Log, global: *JSC.JSGlobalObject, allocator: std.mem.Allocator, message: string) JSC.JSValue { @@ -871,7 +871,7 @@ pub const Log = struct { pub const clearAndFree = deinit; pub fn addVerboseWithNotes(log: *Log, source: ?*const Source, loc: Loc, text: string, notes: []Data) OOM!void { - @setCold(true); + @branchHint(.cold); if (!Kind.shouldPrint(.verbose, log.level)) return; try log.addMsg(.{ @@ -882,7 +882,7 @@ pub const Log = struct { } inline fn allocPrint(allocator: std.mem.Allocator, comptime fmt: string, args: anytype) OOM!string { - return try switch (Output.enable_ansi_colors) { + return switch (Output.enable_ansi_colors) { inline else => |enable_ansi_colors| std.fmt.allocPrint(allocator, Output.prettyFmt(fmt, enable_ansi_colors), args), }; } @@ -950,7 +950,7 @@ pub const Log = struct { import_kind: ImportKind, err: anyerror, ) OOM!void { - @setCold(true); + @branchHint(.cold); return try addResolveErrorWithLevel(log, source, r, allocator, fmt, args, import_kind, false, .err, err); } @@ -963,12 +963,12 @@ pub const Log = struct { args: anytype, import_kind: ImportKind, ) OOM!void { - @setCold(true); + @branchHint(.cold); return try addResolveErrorWithLevel(log, source, r, allocator, fmt, args, import_kind, true, .err, error.ModuleNotFound); } pub fn addRangeError(log: *Log, source: ?*const Source, r: Range, text: string) OOM!void { - @setCold(true); + @branchHint(.cold); log.errors += 1; try log.addMsg(.{ .kind = .err, @@ -977,7 +977,7 @@ pub const Log = struct { } pub fn addRangeErrorFmt(log: *Log, source: ?*const Source, r: Range, allocator: std.mem.Allocator, comptime text: string, args: anytype) OOM!void { - @setCold(true); + @branchHint(.cold); log.errors += 1; try log.addMsg(.{ .kind = .err, @@ -986,7 +986,7 @@ pub const Log = struct { } pub fn addRangeErrorFmtWithNotes(log: *Log, source: ?*const Source, r: Range, allocator: std.mem.Allocator, notes: []Data, comptime fmt: string, args: anytype) OOM!void { - @setCold(true); + @branchHint(.cold); log.errors += 1; try log.addMsg(.{ .kind = .err, @@ -996,7 +996,7 @@ pub const Log = struct { } pub fn addErrorFmt(log: *Log, source: ?*const Source, l: Loc, allocator: std.mem.Allocator, comptime text: string, args: anytype) OOM!void { - @setCold(true); + @branchHint(.cold); log.errors += 1; try log.addMsg(.{ .kind = .err, @@ -1006,7 +1006,7 @@ pub const Log = struct { // TODO(dylan-conway): rename and replace `addErrorFmt` pub fn addErrorFmtOpts(log: *Log, allocator: std.mem.Allocator, comptime fmt: string, args: anytype, opts: AddErrorOptions) OOM!void { - @setCold(true); + @branchHint(.cold); log.errors += 1; try log.addMsg(.{ .kind = .err, @@ -1035,7 +1035,7 @@ pub const Log = struct { } pub fn addZigErrorWithNote(log: *Log, allocator: std.mem.Allocator, err: anyerror, comptime noteFmt: string, args: anytype) OOM!void { - @setCold(true); + @branchHint(.cold); log.errors += 1; var notes = try allocator.alloc(Data, 1); @@ -1049,7 +1049,7 @@ pub const Log = struct { } pub fn addRangeWarning(log: *Log, source: ?*const Source, r: Range, text: string) OOM!void { - @setCold(true); + @branchHint(.cold); if (!Kind.shouldPrint(.warn, log.level)) return; log.warnings += 1; try log.addMsg(.{ @@ -1059,7 +1059,7 @@ pub const Log = struct { } pub fn addWarningFmt(log: *Log, source: ?*const Source, l: Loc, allocator: std.mem.Allocator, comptime text: string, args: anytype) OOM!void { - @setCold(true); + @branchHint(.cold); if (!Kind.shouldPrint(.warn, log.level)) return; log.warnings += 1; try log.addMsg(.{ @@ -1069,7 +1069,7 @@ pub const Log = struct { } pub fn addWarningFmtLineCol(log: *Log, filepath: []const u8, line: u32, col: u32, allocator: std.mem.Allocator, comptime text: string, args: anytype) OOM!void { - @setCold(true); + @branchHint(.cold); if (!Kind.shouldPrint(.warn, log.level)) return; log.warnings += 1; @@ -1089,7 +1089,7 @@ pub const Log = struct { } pub fn addRangeWarningFmt(log: *Log, source: ?*const Source, r: Range, allocator: std.mem.Allocator, comptime text: string, args: anytype) OOM!void { - @setCold(true); + @branchHint(.cold); if (!Kind.shouldPrint(.warn, log.level)) return; log.warnings += 1; try log.addMsg(.{ @@ -1109,7 +1109,7 @@ pub const Log = struct { note_args: anytype, note_range: Range, ) OOM!void { - @setCold(true); + @branchHint(.cold); if (!Kind.shouldPrint(.warn, log.level)) return; log.warnings += 1; @@ -1124,7 +1124,7 @@ pub const Log = struct { } pub fn addRangeWarningFmtWithNotes(log: *Log, source: ?*const Source, r: Range, allocator: std.mem.Allocator, notes: []Data, comptime fmt: string, args: anytype) OOM!void { - @setCold(true); + @branchHint(.cold); log.warnings += 1; try log.addMsg(.{ .kind = .warn, @@ -1144,7 +1144,7 @@ pub const Log = struct { note_args: anytype, note_range: Range, ) OOM!void { - @setCold(true); + @branchHint(.cold); if (!Kind.shouldPrint(.err, log.level)) return; log.errors += 1; @@ -1159,7 +1159,7 @@ pub const Log = struct { } pub fn addWarning(log: *Log, source: ?*const Source, l: Loc, text: string) OOM!void { - @setCold(true); + @branchHint(.cold); if (!Kind.shouldPrint(.warn, log.level)) return; log.warnings += 1; try log.addMsg(.{ @@ -1169,7 +1169,7 @@ pub const Log = struct { } pub fn addWarningWithNote(log: *Log, source: ?*const Source, l: Loc, allocator: std.mem.Allocator, warn: string, comptime note_fmt: string, note_args: anytype) OOM!void { - @setCold(true); + @branchHint(.cold); if (!Kind.shouldPrint(.warn, log.level)) return; log.warnings += 1; @@ -1184,7 +1184,7 @@ pub const Log = struct { } pub fn addRangeDebug(log: *Log, source: ?*const Source, r: Range, text: string) OOM!void { - @setCold(true); + @branchHint(.cold); if (!Kind.shouldPrint(.debug, log.level)) return; try log.addMsg(.{ .kind = .debug, @@ -1193,7 +1193,7 @@ pub const Log = struct { } pub fn addRangeDebugWithNotes(log: *Log, source: ?*const Source, r: Range, text: string, notes: []Data) OOM!void { - @setCold(true); + @branchHint(.cold); if (!Kind.shouldPrint(.debug, log.level)) return; // log.de += 1; try log.addMsg(.{ @@ -1204,7 +1204,7 @@ pub const Log = struct { } pub fn addRangeErrorWithNotes(log: *Log, source: ?*const Source, r: Range, text: string, notes: []Data) OOM!void { - @setCold(true); + @branchHint(.cold); log.errors += 1; try log.addMsg(.{ .kind = Kind.err, @@ -1214,7 +1214,7 @@ pub const Log = struct { } pub fn addRangeWarningWithNotes(log: *Log, source: ?*const Source, r: Range, text: string, notes: []Data) OOM!void { - @setCold(true); + @branchHint(.cold); if (!Kind.shouldPrint(.warn, log.level)) return; log.warnings += 1; try log.addMsg(.{ @@ -1229,7 +1229,7 @@ pub const Log = struct { } pub fn addError(self: *Log, _source: ?*const Source, loc: Loc, text: string) OOM!void { - @setCold(true); + @branchHint(.cold); self.errors += 1; try self.addMsg(.{ .kind = .err, .data = rangeData(_source, Range{ .loc = loc }, text) }); } @@ -1243,7 +1243,7 @@ pub const Log = struct { // TODO(dylan-conway): rename and replace `addError` pub fn addErrorOpts(self: *Log, text: string, opts: AddErrorOptions) OOM!void { - @setCold(true); + @branchHint(.cold); self.errors += 1; try self.addMsg(.{ .kind = .err, diff --git a/src/main.zig b/src/main.zig index 1df928f583cbe6..338fc2e2432ea4 100644 --- a/src/main.zig +++ b/src/main.zig @@ -27,8 +27,8 @@ pub fn main() void { .mask = std.posix.empty_sigset, .flags = 0, }; - std.posix.sigaction(std.posix.SIG.PIPE, &act, null) catch {}; - std.posix.sigaction(std.posix.SIG.XFSZ, &act, null) catch {}; + std.posix.sigaction(std.posix.SIG.PIPE, &act, null); + std.posix.sigaction(std.posix.SIG.XFSZ, &act, null); } // This should appear before we make any calls at all to libuv. @@ -59,28 +59,6 @@ pub fn main() void { bun.Global.exit(0); } -pub const overrides = struct { - pub const mem = struct { - extern "C" fn wcslen(s: [*:0]const u16) usize; - - pub fn indexOfSentinel(comptime T: type, comptime sentinel: T, p: [*:sentinel]const T) usize { - if (comptime T == u16 and sentinel == 0 and Environment.isWindows) { - return wcslen(p); - } - - if (comptime T == u8 and sentinel == 0) { - return bun.C.strlen(p); - } - - var i: usize = 0; - while (p[i] != sentinel) { - i += 1; - } - return i; - } - }; -}; - pub export fn Bun__panic(msg: [*]const u8, len: usize) noreturn { Output.panic("{s}", .{msg[0..len]}); } diff --git a/src/meta.zig b/src/meta.zig index 9385ea8066a19f..3b29f33bb080ec 100644 --- a/src/meta.zig +++ b/src/meta.zig @@ -5,8 +5,8 @@ pub usingnamespace std.meta; pub fn OptionalChild(comptime T: type) type { const tyinfo = @typeInfo(T); - if (tyinfo != .Pointer) @compileError("OptionalChild(T) requires that T be a pointer to an optional type."); - const child = @typeInfo(tyinfo.Pointer.child); + if (tyinfo != .pointer) @compileError("OptionalChild(T) requires that T be a pointer to an optional type."); + const child = @typeInfo(tyinfo.pointer.child); if (child != .Optional) @compileError("OptionalChild(T) requires that T be a pointer to an optional type."); return child.Optional.child; } @@ -14,8 +14,8 @@ pub fn OptionalChild(comptime T: type) type { pub fn EnumFields(comptime T: type) []const std.builtin.Type.EnumField { const tyinfo = @typeInfo(T); return switch (tyinfo) { - .Union => std.meta.fields(tyinfo.Union.tag_type.?), - .Enum => tyinfo.Enum.fields, + .@"union" => std.meta.fields(tyinfo.@"union".tag_type.?), + .@"enum" => tyinfo.@"enum".fields, else => { @compileError("Used `EnumFields(T)` on a type that is not an `enum` or a `union(enum)`"); }, @@ -24,7 +24,7 @@ pub fn EnumFields(comptime T: type) []const std.builtin.Type.EnumField { pub fn ReturnOfMaybe(comptime function: anytype) type { const Func = @TypeOf(function); - const typeinfo: std.builtin.Type.Fn = @typeInfo(Func).Fn; + const typeinfo: std.builtin.Type.Fn = @typeInfo(Func).@"fn"; const MaybeType = typeinfo.return_type orelse @compileError("Expected the function to have a return type"); return MaybeResult(MaybeType); } @@ -32,7 +32,7 @@ pub fn ReturnOfMaybe(comptime function: anytype) type { pub fn MaybeResult(comptime MaybeType: type) type { const maybe_ty_info = @typeInfo(MaybeType); - const maybe = maybe_ty_info.Union; + const maybe = maybe_ty_info.@"union"; if (maybe.fields.len != 2) @compileError("Expected the Maybe type to be a union(enum) with two variants"); if (!std.mem.eql(u8, maybe.fields[0].name, "err")) { @@ -51,7 +51,7 @@ pub fn ReturnOf(comptime function: anytype) type { } pub fn ReturnOfType(comptime Type: type) type { - const typeinfo: std.builtin.Type.Fn = @typeInfo(Type).Fn; + const typeinfo: std.builtin.Type.Fn = @typeInfo(Type).@"fn"; return typeinfo.return_type orelse void; } @@ -62,15 +62,16 @@ pub fn typeName(comptime Type: type) []const u8 { /// partially emulates behaviour of @typeName in previous Zig versions, /// converting "some.namespace.MyType" to "MyType" -pub fn typeBaseName(comptime fullname: [:0]const u8) [:0]const u8 { +pub inline fn typeBaseName(comptime fullname: [:0]const u8) [:0]const u8 { + @setEvalBranchQuota(1_000_000); // leave type name like "namespace.WrapperType(namespace.MyType)" as it is const baseidx = comptime std.mem.indexOf(u8, fullname, "("); - if (baseidx != null) return fullname; + if (baseidx != null) return comptime fullname; const idx = comptime std.mem.lastIndexOf(u8, fullname, "."); const name = if (idx == null) fullname else fullname[(idx.? + 1)..]; - return comptime std.fmt.comptimePrint("{s}", .{name}); + return comptime name; } pub fn enumFieldNames(comptime Type: type) []const []const u8 { @@ -103,10 +104,10 @@ pub fn banFieldType(comptime Container: type, comptime T: type) void { // *[n]T -> T pub fn Item(comptime T: type) type { switch (@typeInfo(T)) { - .Pointer => |ptr| { - if (ptr.size == .One) { + .pointer => |ptr| { + if (ptr.size == .one) { switch (@typeInfo(ptr.child)) { - .Array => |array| { + .array => |array| { return array.child; }, else => {}, @@ -183,14 +184,14 @@ fn CreateUniqueTuple(comptime N: comptime_int, comptime types: [N]type) type { tuple_fields[i] = .{ .name = std.fmt.bufPrintZ(&num_buf, "{d}", .{i}) catch unreachable, .type = T, - .default_value = null, + .default_value_ptr = null, .is_comptime = false, .alignment = if (@sizeOf(T) > 0) @alignOf(T) else 0, }; } return @Type(.{ - .Struct = .{ + .@"struct" = .{ .is_tuple = true, .layout = .auto, .decls = &.{}, @@ -207,14 +208,14 @@ pub fn hasStableMemoryLayout(comptime T: type) bool { .Bool => true, .Int => true, .Float => true, - .Enum => { + .@"enum" => { // not supporting this rn - if (tyinfo.Enum.is_exhaustive) return false; - return hasStableMemoryLayout(tyinfo.Enum.tag_type); + if (tyinfo.@"enum".is_exhaustive) return false; + return hasStableMemoryLayout(tyinfo.@"enum".tag_type); }, - .Struct => switch (tyinfo.Struct.layout) { + .@"struct" => switch (tyinfo.@"struct".layout) { .auto => { - inline for (tyinfo.Struct.fields) |field| { + inline for (tyinfo.@"struct".fields) |field| { if (!hasStableMemoryLayout(field.field_type)) return false; } return true; @@ -222,11 +223,11 @@ pub fn hasStableMemoryLayout(comptime T: type) bool { .@"extern" => true, .@"packed" => false, }, - .Union => switch (tyinfo.Union.layout) { + .@"union" => switch (tyinfo.@"union".layout) { .auto => { - if (tyinfo.Union.tag_type == null or !hasStableMemoryLayout(tyinfo.Union.tag_type.?)) return false; + if (tyinfo.@"union".tag_type == null or !hasStableMemoryLayout(tyinfo.@"union".tag_type.?)) return false; - inline for (tyinfo.Union.fields) |field| { + inline for (tyinfo.@"union".fields) |field| { if (!hasStableMemoryLayout(field.type)) return false; } @@ -240,26 +241,27 @@ pub fn hasStableMemoryLayout(comptime T: type) bool { } pub fn isSimpleCopyType(comptime T: type) bool { + @setEvalBranchQuota(1_000_000); const tyinfo = @typeInfo(T); return switch (tyinfo) { - .Void => true, - .Bool => true, - .Int => true, - .Float => true, - .Enum => true, - .Struct => { - inline for (tyinfo.Struct.fields) |field| { + .void => true, + .bool => true, + .int => true, + .float => true, + .@"enum" => true, + .@"struct" => { + inline for (tyinfo.@"struct".fields) |field| { if (!isSimpleCopyType(field.type)) return false; } return true; }, - .Union => { - inline for (tyinfo.Union.fields) |field| { + .@"union" => { + inline for (tyinfo.@"union".fields) |field| { if (!isSimpleCopyType(field.type)) return false; } return true; }, - .Optional => return isSimpleCopyType(tyinfo.Optional.child), + .optional => return isSimpleCopyType(tyinfo.optional.child), else => false, }; } @@ -269,7 +271,7 @@ pub fn isScalar(comptime T: type) bool { i32, u32, i64, u64, f32, f64, bool => true, else => { const tyinfo = @typeInfo(T); - if (tyinfo == .Enum) return true; + if (tyinfo == .@"enum") return true; return false; }, }; @@ -278,12 +280,12 @@ pub fn isScalar(comptime T: type) bool { pub fn isSimpleEqlType(comptime T: type) bool { const tyinfo = @typeInfo(T); return switch (tyinfo) { - .Type => true, - .Void => true, - .Bool => true, - .Int => true, - .Float => true, - .Enum => true, + .type => true, + .void => true, + .bool => true, + .int => true, + .float => true, + .@"enum" => true, else => false, }; } @@ -295,27 +297,27 @@ pub const ListContainerType = enum { }; pub fn looksLikeListContainerType(comptime T: type) ?struct { list: ListContainerType, child: type } { const tyinfo = @typeInfo(T); - if (tyinfo == .Struct) { + if (tyinfo == .@"struct") { // Looks like array list - if (tyinfo.Struct.fields.len == 2 and - std.mem.eql(u8, tyinfo.Struct.fields[0].name, "items") and - std.mem.eql(u8, tyinfo.Struct.fields[1].name, "capacity")) - return .{ .list = .array_list, .child = std.meta.Child(tyinfo.Struct.fields[0].type) }; + if (tyinfo.@"struct".fields.len == 2 and + std.mem.eql(u8, tyinfo.@"struct".fields[0].name, "items") and + std.mem.eql(u8, tyinfo.@"struct".fields[1].name, "capacity")) + return .{ .list = .array_list, .child = std.meta.Child(tyinfo.@"struct".fields[0].type) }; // Looks like babylist - if (tyinfo.Struct.fields.len == 3 and - std.mem.eql(u8, tyinfo.Struct.fields[0].name, "ptr") and - std.mem.eql(u8, tyinfo.Struct.fields[1].name, "len") and - std.mem.eql(u8, tyinfo.Struct.fields[2].name, "cap")) - return .{ .list = .baby_list, .child = std.meta.Child(tyinfo.Struct.fields[0].type) }; + if (tyinfo.@"struct".fields.len == 3 and + std.mem.eql(u8, tyinfo.@"struct".fields[0].name, "ptr") and + std.mem.eql(u8, tyinfo.@"struct".fields[1].name, "len") and + std.mem.eql(u8, tyinfo.@"struct".fields[2].name, "cap")) + return .{ .list = .baby_list, .child = std.meta.Child(tyinfo.@"struct".fields[0].type) }; // Looks like SmallList - if (tyinfo.Struct.fields.len == 2 and - std.mem.eql(u8, tyinfo.Struct.fields[0].name, "capacity") and - std.mem.eql(u8, tyinfo.Struct.fields[1].name, "data")) return .{ + if (tyinfo.@"struct".fields.len == 2 and + std.mem.eql(u8, tyinfo.@"struct".fields[0].name, "capacity") and + std.mem.eql(u8, tyinfo.@"struct".fields[1].name, "data")) return .{ .list = .small_list, .child = std.meta.Child( - @typeInfo(tyinfo.Struct.fields[1].type).Union.fields[0].type, + @typeInfo(tyinfo.@"struct".fields[1].type).@"union".fields[0].type, ), }; } @@ -324,8 +326,8 @@ pub fn looksLikeListContainerType(comptime T: type) ?struct { list: ListContaine } pub fn Tagged(comptime U: type, comptime T: type) type { - var info: std.builtin.Type.Union = @typeInfo(U).Union; + var info: std.builtin.Type.Union = @typeInfo(U).@"union"; info.tag_type = T; info.decls = &.{}; - return @Type(.{ .Union = info }); + return @Type(.{ .@"union" = info }); } diff --git a/src/multi_array_list.zig b/src/multi_array_list.zig index 46fbe6a936b0b0..087b8a42741a15 100644 --- a/src/multi_array_list.zig +++ b/src/multi_array_list.zig @@ -24,11 +24,16 @@ pub fn MultiArrayList(comptime T: type) type { len: usize = 0, capacity: usize = 0, - pub const Elem = switch (@typeInfo(T)) { - .Struct => T, - .Union => |u| struct { - pub const Bare = - @Type(.{ .Union = .{ + pub const empty: Self = .{ + .bytes = undefined, + .len = 0, + .capacity = 0, + }; + + const Elem = switch (@typeInfo(T)) { + .@"struct" => T, + .@"union" => |u| struct { + pub const Bare = @Type(.{ .@"union" = .{ .layout = u.layout, .tag_type = null, .fields = u.fields, @@ -70,6 +75,12 @@ pub fn MultiArrayList(comptime T: type) type { len: usize, capacity: usize, + pub const empty: Slice = .{ + .ptrs = undefined, + .len = 0, + .capacity = 0, + }; + pub fn items(self: Slice, comptime field: Field) []FieldType(field) { const F = FieldType(field); if (self.capacity == 0) { @@ -85,9 +96,9 @@ pub fn MultiArrayList(comptime T: type) type { pub fn set(self: *Slice, index: usize, elem: T) void { const e = switch (@typeInfo(T)) { - .Struct => elem, - .Union => Elem.fromT(elem), - else => @compileError("unreachable"), + .@"struct" => elem, + .@"union" => Elem.fromT(elem), + else => unreachable, }; inline for (fields, 0..) |field_info, i| { self.items(@as(Field, @enumFromInt(i)))[index] = @field(e, field_info.name); @@ -100,14 +111,14 @@ pub fn MultiArrayList(comptime T: type) type { @field(result, field_info.name) = self.items(@as(Field, @enumFromInt(i)))[index]; } return switch (@typeInfo(T)) { - .Struct => result, - .Union => Elem.toT(result.tags, result.data), - else => @compileError("unreachable"), + .@"struct" => result, + .@"union" => Elem.toT(result.tags, result.data), + else => unreachable, }; } pub fn toMultiArrayList(self: Slice) Self { - if (self.ptrs.len == 0) { + if (self.ptrs.len == 0 or self.capacity == 0) { return .{}; } const unaligned_ptr = self.ptrs[sizes.fields[0]]; @@ -279,6 +290,7 @@ pub fn MultiArrayList(comptime T: type) type { self.insertAssumeCapacity(index, elem); } + /// Invalidates all element pointers. pub fn clearRetainingCapacity(this: *Self) void { this.len = 0; } @@ -292,9 +304,9 @@ pub fn MultiArrayList(comptime T: type) type { assert(index <= self.len); self.len += 1; const entry = switch (@typeInfo(T)) { - .Struct => elem, - .Union => Elem.fromT(elem), - else => @compileError("unreachable"), + .@"struct" => elem, + .@"union" => Elem.fromT(elem), + else => unreachable, }; const slices = self.slice(); inline for (fields, 0..) |field_info, field_index| { @@ -359,11 +371,8 @@ pub fn MultiArrayList(comptime T: type) type { /// If `new_len` is greater than zero, this may fail to reduce the capacity, /// but the data remains intact and the length is updated to new_len. pub fn shrinkAndFree(self: *Self, gpa: Allocator, new_len: usize) void { - if (new_len == 0) { - gpa.free(self.allocatedBytes()); - self.* = .{}; - return; - } + if (new_len == 0) return clearAndFree(self, gpa); + assert(new_len <= self.capacity); assert(new_len <= self.len); @@ -404,6 +413,11 @@ pub fn MultiArrayList(comptime T: type) type { self.* = other; } + pub fn clearAndFree(self: *Self, gpa: Allocator) void { + gpa.free(self.allocatedBytes()); + self.* = .{}; + } + /// Reduce length to `new_len`. /// Invalidates pointers to elements `items[new_len..]`. /// Keeps capacity the same. @@ -485,7 +499,7 @@ pub fn MultiArrayList(comptime T: type) type { /// `ctx` has the following method: /// `fn lessThan(ctx: @TypeOf(ctx), a_index: usize, b_index: usize) bool` - fn sortInternal(self: Self, a: usize, b: usize, ctx: anytype, comptime mode: enum { stable, unstable }) void { + fn sortInternal(self: Self, a: usize, b: usize, ctx: anytype, comptime mode: std.sort.Mode) void { const sort_context: struct { sub_ctx: @TypeOf(ctx), slice: Slice, @@ -493,7 +507,7 @@ pub fn MultiArrayList(comptime T: type) type { pub fn swap(sc: @This(), a_index: usize, b_index: usize) void { inline for (fields, 0..) |field_info, i| { if (@sizeOf(field_info.type) != 0) { - const field = @as(Field, @enumFromInt(i)); + const field: Field = @enumFromInt(i); const ptr = sc.slice.items(field); mem.swap(field_info.type, &ptr[a_index], &ptr[b_index]); } @@ -571,7 +585,7 @@ pub fn MultiArrayList(comptime T: type) type { } fn FieldType(comptime field: Field) type { - return meta.fieldInfo(Elem, field).type; + return @FieldType(Elem, @tagName(field)); } const Entry = entry: { @@ -579,11 +593,11 @@ pub fn MultiArrayList(comptime T: type) type { for (&entry_fields, sizes.fields) |*entry_field, i| entry_field.* = .{ .name = fields[i].name ++ "_ptr", .type = *fields[i].type, - .default_value = null, + .default_value_ptr = null, .is_comptime = fields[i].is_comptime, .alignment = fields[i].alignment, }; - break :entry @Type(.{ .Struct = .{ + break :entry @Type(.{ .@"struct" = .{ .layout = .@"extern", .fields = &entry_fields, .decls = &.{}, @@ -600,329 +614,10 @@ pub fn MultiArrayList(comptime T: type) type { } comptime { - if (builtin.mode == .Debug) { + if (builtin.zig_backend == .stage2_llvm and !builtin.strip_debug_info) { _ = &dbHelper; _ = &Slice.dbHelper; } } }; } - -test "basic usage" { - const ally = testing.allocator; - - const Foo = struct { - a: u32, - b: []const u8, - c: u8, - }; - - var list = MultiArrayList(Foo){}; - defer list.deinit(ally); - - try testing.expectEqual(@as(usize, 0), list.items(.a).len); - - try list.ensureTotalCapacity(ally, 2); - - list.appendAssumeCapacity(.{ - .a = 1, - .b = "foobar", - .c = 'a', - }); - - list.appendAssumeCapacity(.{ - .a = 2, - .b = "zigzag", - .c = 'b', - }); - - try testing.expectEqualSlices(u32, list.items(.a), &[_]u32{ 1, 2 }); - try testing.expectEqualSlices(u8, list.items(.c), &[_]u8{ 'a', 'b' }); - - try testing.expectEqual(@as(usize, 2), list.items(.b).len); - try testing.expectEqualStrings("foobar", list.items(.b)[0]); - try testing.expectEqualStrings("zigzag", list.items(.b)[1]); - - try list.append(ally, .{ - .a = 3, - .b = "fizzbuzz", - .c = 'c', - }); - - try testing.expectEqualSlices(u32, list.items(.a), &[_]u32{ 1, 2, 3 }); - try testing.expectEqualSlices(u8, list.items(.c), &[_]u8{ 'a', 'b', 'c' }); - - try testing.expectEqual(@as(usize, 3), list.items(.b).len); - try testing.expectEqualStrings("foobar", list.items(.b)[0]); - try testing.expectEqualStrings("zigzag", list.items(.b)[1]); - try testing.expectEqualStrings("fizzbuzz", list.items(.b)[2]); - - // Add 6 more things to force a capacity increase. - for (0..6) |i| { - try list.append(ally, .{ - .a = @as(u32, @intCast(4 + i)), - .b = "whatever", - .c = @as(u8, @intCast('d' + i)), - }); - } - - try testing.expectEqualSlices( - u32, - &[_]u32{ 1, 2, 3, 4, 5, 6, 7, 8, 9 }, - list.items(.a), - ); - try testing.expectEqualSlices( - u8, - &[_]u8{ 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i' }, - list.items(.c), - ); - - list.shrinkAndFree(ally, 3); - - try testing.expectEqualSlices(u32, list.items(.a), &[_]u32{ 1, 2, 3 }); - try testing.expectEqualSlices(u8, list.items(.c), &[_]u8{ 'a', 'b', 'c' }); - - try testing.expectEqual(@as(usize, 3), list.items(.b).len); - try testing.expectEqualStrings("foobar", list.items(.b)[0]); - try testing.expectEqualStrings("zigzag", list.items(.b)[1]); - try testing.expectEqualStrings("fizzbuzz", list.items(.b)[2]); - - list.set(try list.addOne(ally), .{ - .a = 4, - .b = "xnopyt", - .c = 'd', - }); - try testing.expectEqualStrings("xnopyt", list.pop().b); - try testing.expectEqual(@as(?u8, 'c'), if (list.popOrNull()) |elem| elem.c else null); - try testing.expectEqual(@as(u32, 2), list.pop().a); - try testing.expectEqual(@as(u8, 'a'), list.pop().c); - try testing.expectEqual(@as(?Foo, null), list.popOrNull()); -} - -// This was observed to fail on aarch64 with LLVM 11, when the capacityInBytes -// function used the @reduce code path. -test "regression test for @reduce bug" { - const ally = testing.allocator; - var list = MultiArrayList(struct { - tag: std.zig.Token.Tag, - start: u32, - }){}; - defer list.deinit(ally); - - try list.ensureTotalCapacity(ally, 20); - - try list.append(ally, .{ .tag = .keyword_const, .start = 0 }); - try list.append(ally, .{ .tag = .identifier, .start = 6 }); - try list.append(ally, .{ .tag = .equal, .start = 10 }); - try list.append(ally, .{ .tag = .builtin, .start = 12 }); - try list.append(ally, .{ .tag = .l_paren, .start = 19 }); - try list.append(ally, .{ .tag = .string_literal, .start = 20 }); - try list.append(ally, .{ .tag = .r_paren, .start = 25 }); - try list.append(ally, .{ .tag = .semicolon, .start = 26 }); - try list.append(ally, .{ .tag = .keyword_pub, .start = 29 }); - try list.append(ally, .{ .tag = .keyword_fn, .start = 33 }); - try list.append(ally, .{ .tag = .identifier, .start = 36 }); - try list.append(ally, .{ .tag = .l_paren, .start = 40 }); - try list.append(ally, .{ .tag = .r_paren, .start = 41 }); - try list.append(ally, .{ .tag = .identifier, .start = 43 }); - try list.append(ally, .{ .tag = .bang, .start = 51 }); - try list.append(ally, .{ .tag = .identifier, .start = 52 }); - try list.append(ally, .{ .tag = .l_brace, .start = 57 }); - try list.append(ally, .{ .tag = .identifier, .start = 63 }); - try list.append(ally, .{ .tag = .period, .start = 66 }); - try list.append(ally, .{ .tag = .identifier, .start = 67 }); - try list.append(ally, .{ .tag = .period, .start = 70 }); - try list.append(ally, .{ .tag = .identifier, .start = 71 }); - try list.append(ally, .{ .tag = .l_paren, .start = 75 }); - try list.append(ally, .{ .tag = .string_literal, .start = 76 }); - try list.append(ally, .{ .tag = .comma, .start = 113 }); - try list.append(ally, .{ .tag = .period, .start = 115 }); - try list.append(ally, .{ .tag = .l_brace, .start = 116 }); - try list.append(ally, .{ .tag = .r_brace, .start = 117 }); - try list.append(ally, .{ .tag = .r_paren, .start = 118 }); - try list.append(ally, .{ .tag = .semicolon, .start = 119 }); - try list.append(ally, .{ .tag = .r_brace, .start = 121 }); - try list.append(ally, .{ .tag = .eof, .start = 123 }); - - const tags = list.items(.tag); - try testing.expectEqual(tags[1], .identifier); - try testing.expectEqual(tags[2], .equal); - try testing.expectEqual(tags[3], .builtin); - try testing.expectEqual(tags[4], .l_paren); - try testing.expectEqual(tags[5], .string_literal); - try testing.expectEqual(tags[6], .r_paren); - try testing.expectEqual(tags[7], .semicolon); - try testing.expectEqual(tags[8], .keyword_pub); - try testing.expectEqual(tags[9], .keyword_fn); - try testing.expectEqual(tags[10], .identifier); - try testing.expectEqual(tags[11], .l_paren); - try testing.expectEqual(tags[12], .r_paren); - try testing.expectEqual(tags[13], .identifier); - try testing.expectEqual(tags[14], .bang); - try testing.expectEqual(tags[15], .identifier); - try testing.expectEqual(tags[16], .l_brace); - try testing.expectEqual(tags[17], .identifier); - try testing.expectEqual(tags[18], .period); - try testing.expectEqual(tags[19], .identifier); - try testing.expectEqual(tags[20], .period); - try testing.expectEqual(tags[21], .identifier); - try testing.expectEqual(tags[22], .l_paren); - try testing.expectEqual(tags[23], .string_literal); - try testing.expectEqual(tags[24], .comma); - try testing.expectEqual(tags[25], .period); - try testing.expectEqual(tags[26], .l_brace); - try testing.expectEqual(tags[27], .r_brace); - try testing.expectEqual(tags[28], .r_paren); - try testing.expectEqual(tags[29], .semicolon); - try testing.expectEqual(tags[30], .r_brace); - try testing.expectEqual(tags[31], .eof); -} - -test "ensure capacity on empty list" { - const ally = testing.allocator; - - const Foo = struct { - a: u32, - b: u8, - }; - - var list = MultiArrayList(Foo){}; - defer list.deinit(ally); - - try list.ensureTotalCapacity(ally, 2); - list.appendAssumeCapacity(.{ .a = 1, .b = 2 }); - list.appendAssumeCapacity(.{ .a = 3, .b = 4 }); - - try testing.expectEqualSlices(u32, &[_]u32{ 1, 3 }, list.items(.a)); - try testing.expectEqualSlices(u8, &[_]u8{ 2, 4 }, list.items(.b)); - - list.len = 0; - list.appendAssumeCapacity(.{ .a = 5, .b = 6 }); - list.appendAssumeCapacity(.{ .a = 7, .b = 8 }); - - try testing.expectEqualSlices(u32, &[_]u32{ 5, 7 }, list.items(.a)); - try testing.expectEqualSlices(u8, &[_]u8{ 6, 8 }, list.items(.b)); - - list.len = 0; - try list.ensureTotalCapacity(ally, 16); - - list.appendAssumeCapacity(.{ .a = 9, .b = 10 }); - list.appendAssumeCapacity(.{ .a = 11, .b = 12 }); - - try testing.expectEqualSlices(u32, &[_]u32{ 9, 11 }, list.items(.a)); - try testing.expectEqualSlices(u8, &[_]u8{ 10, 12 }, list.items(.b)); -} - -test "insert elements" { - const ally = testing.allocator; - - const Foo = struct { - a: u8, - b: u32, - }; - - var list = MultiArrayList(Foo){}; - defer list.deinit(ally); - - try list.insert(ally, 0, .{ .a = 1, .b = 2 }); - try list.ensureUnusedCapacity(ally, 1); - list.insertAssumeCapacity(1, .{ .a = 2, .b = 3 }); - - try testing.expectEqualSlices(u8, &[_]u8{ 1, 2 }, list.items(.a)); - try testing.expectEqualSlices(u32, &[_]u32{ 2, 3 }, list.items(.b)); -} - -test "union" { - const ally = testing.allocator; - - const Foo = union(enum) { - a: u32, - b: []const u8, - }; - - var list = MultiArrayList(Foo){}; - defer list.deinit(ally); - - try testing.expectEqual(@as(usize, 0), list.items(.tags).len); - - try list.ensureTotalCapacity(ally, 2); - - list.appendAssumeCapacity(.{ .a = 1 }); - list.appendAssumeCapacity(.{ .b = "zigzag" }); - - try testing.expectEqualSlices(meta.Tag(Foo), list.items(.tags), &.{ .a, .b }); - try testing.expectEqual(@as(usize, 2), list.items(.tags).len); - - list.appendAssumeCapacity(.{ .b = "foobar" }); - try testing.expectEqualStrings("zigzag", list.items(.data)[1].b); - try testing.expectEqualStrings("foobar", list.items(.data)[2].b); - - // Add 6 more things to force a capacity increase. - for (0..6) |i| { - try list.append(ally, .{ .a = @as(u32, @intCast(4 + i)) }); - } - - try testing.expectEqualSlices( - meta.Tag(Foo), - &.{ .a, .b, .b, .a, .a, .a, .a, .a, .a }, - list.items(.tags), - ); - try testing.expectEqual(list.get(0), .{ .a = 1 }); - try testing.expectEqual(list.get(1), .{ .b = "zigzag" }); - try testing.expectEqual(list.get(2), .{ .b = "foobar" }); - try testing.expectEqual(list.get(3), .{ .a = 4 }); - try testing.expectEqual(list.get(4), .{ .a = 5 }); - try testing.expectEqual(list.get(5), .{ .a = 6 }); - try testing.expectEqual(list.get(6), .{ .a = 7 }); - try testing.expectEqual(list.get(7), .{ .a = 8 }); - try testing.expectEqual(list.get(8), .{ .a = 9 }); - - list.shrinkAndFree(ally, 3); - - try testing.expectEqual(@as(usize, 3), list.items(.tags).len); - try testing.expectEqualSlices(meta.Tag(Foo), list.items(.tags), &.{ .a, .b, .b }); - - try testing.expectEqual(list.get(0), .{ .a = 1 }); - try testing.expectEqual(list.get(1), .{ .b = "zigzag" }); - try testing.expectEqual(list.get(2), .{ .b = "foobar" }); -} - -test "sorting a span" { - var list: MultiArrayList(struct { score: u32, chr: u8 }) = .{}; - defer list.deinit(testing.allocator); - - try list.ensureTotalCapacity(testing.allocator, 42); - for ( - // zig fmt: off - [42]u8{ 'b', 'a', 'c', 'a', 'b', 'c', 'b', 'c', 'b', 'a', 'b', 'a', 'b', 'c', 'b', 'a', 'a', 'c', 'c', 'a', 'c', 'b', 'a', 'c', 'a', 'b', 'b', 'c', 'c', 'b', 'a', 'b', 'a', 'b', 'c', 'b', 'a', 'a', 'c', 'c', 'a', 'c' }, - [42]u32{ 1, 1, 1, 2, 2, 2, 3, 3, 4, 3, 5, 4, 6, 4, 7, 5, 6, 5, 6, 7, 7, 8, 8, 8, 9, 9, 10, 9, 10, 11, 10, 12, 11, 13, 11, 14, 12, 13, 12, 13, 14, 14 }, - // zig fmt: on - ) |chr, score| { - list.appendAssumeCapacity(.{ .chr = chr, .score = score }); - } - - const sliced = list.slice(); - list.sortSpan(6, 21, struct { - chars: []const u8, - - fn lessThan(ctx: @This(), a: usize, b: usize) bool { - return ctx.chars[a] < ctx.chars[b]; - } - }{ .chars = sliced.items(.chr) }); - - var i: u32 = 0; - var j: u32 = 6; - var c: u8 = 'a'; - - while (j < 21) { - i = j; - j += 5; - var n: u32 = 3; - for (sliced.items(.chr)[i..j], sliced.items(.score)[i..j]) |chr, score| { - try testing.expectEqual(score, n); - try testing.expectEqual(chr, c); - n += 1; - } - c += 1; - } -} diff --git a/src/options.zig b/src/options.zig index c73fa65806a087..93a309ddc3a4b9 100644 --- a/src/options.zig +++ b/src/options.zig @@ -1106,7 +1106,7 @@ pub const JSX = struct { // ...unless new is "React.createElement" and original is ["React", "createElement"] // saves an allocation for the majority case pub fn memberListToComponentsIfDifferent(allocator: std.mem.Allocator, original: []const string, new: string) ![]const string { - var splitter = std.mem.split(u8, new, "."); + var splitter = std.mem.splitScalar(u8, new, '.'); const count = strings.countChar(new, '.') + 1; var needs_alloc = false; @@ -1131,7 +1131,7 @@ pub const JSX = struct { var out = try allocator.alloc(string, count); - splitter = std.mem.split(u8, new, "."); + splitter = std.mem.splitScalar(u8, new, '.'); var i: usize = 0; while (splitter.next()) |str| { if (str.len == 0) continue; diff --git a/src/output.zig b/src/output.zig index 3181b2788c1db3..47b9808cb5a8be 100644 --- a/src/output.zig +++ b/src/output.zig @@ -488,7 +488,7 @@ pub fn disableBuffering() void { } pub fn panic(comptime fmt: string, args: anytype) noreturn { - @setCold(true); + @branchHint(.cold); if (isEmojiEnabled()) { std.debug.panic(comptime prettyFmt(fmt, true), args); @@ -714,7 +714,7 @@ pub const LogFunction = fn (comptime fmt: string, args: anytype) callconv(bun.ca pub fn Scoped(comptime tag: anytype, comptime disabled: bool) type { const tagname = comptime brk: { const input = switch (@TypeOf(tag)) { - @Type(.EnumLiteral) => @tagName(tag), + @Type(.enum_literal) => @tagName(tag), else => tag, }; var ascii_slice: [input.len]u8 = undefined; @@ -1064,7 +1064,7 @@ pub inline fn err(error_name: anytype, comptime fmt: []const u8, args: anytype) const T = @TypeOf(error_name); const info = @typeInfo(T); - if (comptime T == bun.sys.Error or info == .Pointer and info.Pointer.child == bun.sys.Error) { + if (comptime T == bun.sys.Error or info == .pointer and info.pointer.child == bun.sys.Error) { const e: bun.sys.Error = error_name; const tag_name, const sys_errno = e.getErrorCodeTagName() orelse { err("unknown error", fmt, args); @@ -1081,10 +1081,10 @@ pub inline fn err(error_name: anytype, comptime fmt: []const u8, args: anytype) const display_name, const is_comptime_name = display_name: { // Zig string literals are of type *const [n:0]u8 // we assume that no one will pass this type from not using a string literal. - if (info == .Pointer and info.Pointer.size == .One and info.Pointer.is_const) { - const child_info = @typeInfo(info.Pointer.child); - if (child_info == .Array and child_info.Array.child == u8) { - if (child_info.Array.len == 0) @compileError("Output.err should not be passed an empty string (use errGeneric)"); + if (info == .pointer and info.pointer.size == .one and info.pointer.is_const) { + const child_info = @typeInfo(info.pointer.child); + if (child_info == .array and child_info.array.child == u8) { + if (child_info.array.len == 0) @compileError("Output.err should not be passed an empty string (use errGeneric)"); break :display_name .{ error_name, true }; } } @@ -1095,8 +1095,8 @@ pub inline fn err(error_name: anytype, comptime fmt: []const u8, args: anytype) } // error unions - if (info == .ErrorSet) { - if (info.ErrorSet) |errors| { + if (info == .error_set) { + if (info.error_set) |errors| { if (errors.len == 0) { @compileError("Output.err was given an empty error set"); } @@ -1109,7 +1109,7 @@ pub inline fn err(error_name: anytype, comptime fmt: []const u8, args: anytype) } // enum literals - if (info == .EnumLiteral) { + if (info == .enum_literal) { const tag = @tagName(info); comptime bun.assert(tag.len > 0); // how? if (tag[0] != 'E') break :display_name .{ "E" ++ tag, true }; @@ -1117,7 +1117,7 @@ pub inline fn err(error_name: anytype, comptime fmt: []const u8, args: anytype) } // enums - if (info == .Enum) { + if (info == .@"enum") { const errno: bun.C.SystemErrno = @enumFromInt(@intFromEnum(info)); break :display_name .{ @tagName(errno), false }; } diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 4273c9893173ee..fd87fd9a82d782 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -341,17 +341,17 @@ pub const DebugLogs = struct { } pub fn increaseIndent(d: *DebugLogs) void { - @setCold(true); + @branchHint(.cold); d.indent.append(" ") catch unreachable; } pub fn decreaseIndent(d: *DebugLogs) void { - @setCold(true); + @branchHint(.cold); d.indent.list.shrinkRetainingCapacity(d.indent.list.items.len - 1); } pub fn addNote(d: *DebugLogs, _text: string) void { - @setCold(true); + @branchHint(.cold); var text = _text; const len = d.indent.len(); if (len > 0) { @@ -366,7 +366,7 @@ pub const DebugLogs = struct { } pub fn addNoteFmt(d: *DebugLogs, comptime fmt: string, args: anytype) void { - @setCold(true); + @branchHint(.cold); return d.addNote(std.fmt.allocPrint(d.notes.allocator, fmt, args) catch unreachable); } }; @@ -1843,7 +1843,7 @@ pub const Resolver = struct { // https://nodejs.org/api/modules.html#loading-from-the-global-folders const node_path: []const u8 = if (r.env_loader) |env_loader| env_loader.get("NODE_PATH") orelse "" else ""; if (node_path.len > 0) { - var it = std.mem.tokenize(u8, node_path, if (Environment.isWindows) ";" else ":"); + var it = std.mem.tokenizeScalar(u8, node_path, if (Environment.isWindows) ';' else ':'); while (it.next()) |path| { const abs_path = r.fs.absBuf(&[_]string{ path, import_path }, bufs(.node_modules_check)); if (r.debug_logs) |*debug| { @@ -1866,7 +1866,7 @@ pub const Resolver = struct { // check the global cache directory for a package.json file. const manager = r.getPackageManager(); var dependency_version = Dependency.Version{}; - var dependency_behavior = Dependency.Behavior.prod; + var dependency_behavior: Dependency.Behavior = .{ .prod = true }; var string_buf = esm.version; // const initial_pending_tasks = manager.pending_tasks; @@ -3362,7 +3362,7 @@ pub const Resolver = struct { comptime { const Resolver__nodeModulePathsForJS = JSC.toJSHostFunction(Resolver__nodeModulePathsForJS_); - @export(Resolver__nodeModulePathsForJS, .{ .name = "Resolver__nodeModulePathsForJS" }); + @export(&Resolver__nodeModulePathsForJS, .{ .name = "Resolver__nodeModulePathsForJS" }); } pub fn Resolver__nodeModulePathsForJS_(globalThis: *bun.JSC.JSGlobalObject, callframe: *bun.JSC.CallFrame) bun.JSError!JSC.JSValue { bun.JSC.markBinding(@src()); diff --git a/src/resolver/tsconfig_json.zig b/src/resolver/tsconfig_json.zig index c4e40a70569ef4..0eb8a1832154e2 100644 --- a/src/resolver/tsconfig_json.zig +++ b/src/resolver/tsconfig_json.zig @@ -424,7 +424,7 @@ pub const TSConfigJSON = struct { return parts.items; } - var iter = std.mem.tokenize(u8, text, "."); + var iter = std.mem.tokenizeScalar(u8, text, '.'); while (iter.next()) |part| { if (!js_lexer.isIdentifier(part)) { diff --git a/src/s3/client.zig b/src/s3/client.zig index 6d527ff9f2afa0..a5f9fdfd424a53 100644 --- a/src/s3/client.zig +++ b/src/s3/client.zig @@ -202,7 +202,7 @@ const S3UploadStreamWrapper = struct { callback_context: *anyopaque, ref_count: u32 = 1, path: []const u8, // this is owned by the task not by the wrapper - pub usingnamespace bun.NewRefCounted(@This(), @This().deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); pub fn resolve(result: S3UploadResult, self: *@This()) void { const sink = self.sink; defer self.deref(); @@ -282,9 +282,9 @@ pub const Export = shim.exportFunctions(.{ }); comptime { const jsonResolveRequestStream = JSC.toJSHostFunction(onUploadStreamResolveRequestStream); - @export(jsonResolveRequestStream, .{ .name = Export[0].symbol_name }); + @export(&jsonResolveRequestStream, .{ .name = Export[0].symbol_name }); const jsonRejectRequestStream = JSC.toJSHostFunction(onUploadStreamRejectRequestStream); - @export(jsonRejectRequestStream, .{ .name = Export[1].symbol_name }); + @export(&jsonRejectRequestStream, .{ .name = Export[1].symbol_name }); } /// consumes the readable stream and upload to s3 diff --git a/src/s3/credentials.zig b/src/s3/credentials.zig index 2c10db4d8ad2a0..a8701f335354b6 100644 --- a/src/s3/credentials.zig +++ b/src/s3/credentials.zig @@ -23,7 +23,7 @@ pub const S3Credentials = struct { insecure_http: bool = false, ref_count: u32 = 1, - pub usingnamespace bun.NewRefCounted(@This(), @This().deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); pub fn estimatedSize(this: *const @This()) usize { return @sizeOf(S3Credentials) + this.accessKeyId.len + this.region.len + this.secretAccessKey.len + this.endpoint.len + this.bucket.len; diff --git a/src/s3/multipart.zig b/src/s3/multipart.zig index 0ff45d5da43b2b..5fa8502cd7f375 100644 --- a/src/s3/multipart.zig +++ b/src/s3/multipart.zig @@ -145,7 +145,7 @@ pub const MultiPartUpload = struct { callback: *const fn (S3SimpleRequest.S3UploadResult, *anyopaque) void, callback_context: *anyopaque, - pub usingnamespace bun.NewRefCounted(@This(), @This().deinit); + pub usingnamespace bun.NewRefCounted(@This(), deinit, null); const log = bun.Output.scoped(.S3MultiPartUpload, true); diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig index f515e175c67036..d2a08a3f4d869a 100644 --- a/src/shell/interpreter.zig +++ b/src/shell/interpreter.zig @@ -1792,18 +1792,15 @@ pub const Interpreter = struct { } pub fn hasPendingActivity(this: *ThisInterpreter) bool { - @fence(.seq_cst); return this.has_pending_activity.load(.seq_cst) > 0; } fn incrPendingActivityFlag(has_pending_activity: *std.atomic.Value(u32)) void { - @fence(.seq_cst); _ = has_pending_activity.fetchAdd(1, .seq_cst); log("Interpreter incr pending activity {d}", .{has_pending_activity.load(.seq_cst)}); } fn decrPendingActivityFlag(has_pending_activity: *std.atomic.Value(u32)) void { - @fence(.seq_cst); _ = has_pending_activity.fetchSub(1, .seq_cst); log("Interpreter decr pending activity {d}", .{has_pending_activity.load(.seq_cst)}); } @@ -5338,9 +5335,9 @@ pub const Interpreter = struct { const Blob = struct { ref_count: usize = 1, blob: bun.JSC.WebCore.Blob, - pub usingnamespace bun.NewRefCounted(Blob, Blob.deinit); + pub usingnamespace bun.NewRefCounted(Blob, _deinit, null); - pub fn deinit(this: *Blob) void { + fn _deinit(this: *Blob) void { this.blob.deinit(); bun.destroy(this); } @@ -5680,7 +5677,7 @@ pub const Interpreter = struct { } /// If the stdout/stderr is supposed to be captured then get the bytelist associated with that - pub fn stdBufferedBytelist(this: *Builtin, comptime io_kind: @Type(.EnumLiteral)) ?*bun.ByteList { + pub fn stdBufferedBytelist(this: *Builtin, comptime io_kind: @Type(.enum_literal)) ?*bun.ByteList { if (comptime io_kind != .stdout and io_kind != .stderr) { @compileError("Bad IO" ++ @tagName(io_kind)); } @@ -5702,7 +5699,7 @@ pub const Interpreter = struct { } /// **WARNING** You should make sure that stdout/stderr does not need IO (e.g. `.needsIO(.stderr)` is false before caling `.writeNoIO(.stderr, buf)`) - pub fn writeNoIO(this: *Builtin, comptime io_kind: @Type(.EnumLiteral), buf: []const u8) Maybe(usize) { + pub fn writeNoIO(this: *Builtin, comptime io_kind: @Type(.enum_literal), buf: []const u8) Maybe(usize) { if (comptime io_kind != .stdout and io_kind != .stderr) { @compileError("Bad IO" ++ @tagName(io_kind)); } @@ -6368,8 +6365,8 @@ pub const Interpreter = struct { var node_fs = JSC.Node.NodeFS{}; const milliseconds: f64 = @floatFromInt(std.time.milliTimestamp()); const atime: JSC.Node.TimeLike = if (bun.Environment.isWindows) milliseconds / 1000.0 else JSC.Node.TimeLike{ - .tv_sec = @intFromFloat(@divFloor(milliseconds, std.time.ms_per_s)), - .tv_nsec = @intFromFloat(@mod(milliseconds, std.time.ms_per_s) * std.time.ns_per_ms), + .sec = @intFromFloat(@divFloor(milliseconds, std.time.ms_per_s)), + .nsec = @intFromFloat(@mod(milliseconds, std.time.ms_per_s) * std.time.ns_per_ms), }; const mtime = atime; const args = JSC.Node.Arguments.Utimes{ @@ -6906,7 +6903,7 @@ pub const Interpreter = struct { } }; - pub fn writeOutput(this: *Export, comptime io_kind: @Type(.EnumLiteral), comptime fmt: []const u8, args: anytype) Maybe(void) { + pub fn writeOutput(this: *Export, comptime io_kind: @Type(.enum_literal), comptime fmt: []const u8, args: anytype) Maybe(void) { if (this.bltn.stdout.needsIO()) |safeguard| { var output: *BuiltinIO.Output = &@field(this.bltn, @tagName(io_kind)); this.printing = true; @@ -8733,16 +8730,14 @@ pub const Interpreter = struct { } }, - fn incrementOutputCount(this: *@This(), comptime thevar: @Type(.EnumLiteral)) void { - @fence(.seq_cst); + fn incrementOutputCount(this: *@This(), comptime thevar: @Type(.enum_literal)) void { var atomicvar = &@field(this, @tagName(thevar)); const result = atomicvar.fetchAdd(1, .seq_cst); log("[rm] {s}: {d} + 1", .{ @tagName(thevar), result }); return; } - fn getOutputCount(this: *@This(), comptime thevar: @Type(.EnumLiteral)) usize { - @fence(.seq_cst); + fn getOutputCount(this: *@This(), comptime thevar: @Type(.enum_literal)) usize { var atomicvar = &@field(this, @tagName(thevar)); return atomicvar.load(.seq_cst); } @@ -10109,8 +10104,8 @@ pub const Interpreter = struct { bltn: *Builtin, state: enum { idle, waiting_io, err, done } = .idle, buf: std.ArrayListUnmanaged(u8) = .{}, - start: f32 = 1, - end: f32 = 1, + _start: f32 = 1, + _end: f32 = 1, increment: f32 = 1, separator: string = "\n", terminator: string = "", @@ -10155,27 +10150,27 @@ pub const Interpreter = struct { const maybe1 = iter.next().?; const int1 = bun.fmt.parseFloat(f32, bun.sliceTo(maybe1, 0)) catch return this.fail("seq: invalid argument\n"); - this.end = int1; - if (this.start > this.end) this.increment = -1; + this._end = int1; + if (this._start > this._end) this.increment = -1; const maybe2 = iter.next(); if (maybe2 == null) return this.do(); const int2 = bun.fmt.parseFloat(f32, bun.sliceTo(maybe2.?, 0)) catch return this.fail("seq: invalid argument\n"); - this.start = int1; - this.end = int2; - if (this.start < this.end) this.increment = 1; - if (this.start > this.end) this.increment = -1; + this._start = int1; + this._end = int2; + if (this._start < this._end) this.increment = 1; + if (this._start > this._end) this.increment = -1; const maybe3 = iter.next(); if (maybe3 == null) return this.do(); const int3 = bun.fmt.parseFloat(f32, bun.sliceTo(maybe3.?, 0)) catch return this.fail("seq: invalid argument\n"); - this.start = int1; + this._start = int1; this.increment = int2; - this.end = int3; + this._end = int3; if (this.increment == 0) return this.fail("seq: zero increment\n"); - if (this.start > this.end and this.increment > 0) return this.fail("seq: needs negative decrement\n"); - if (this.start < this.end and this.increment < 0) return this.fail("seq: needs positive increment\n"); + if (this._start > this._end and this.increment > 0) return this.fail("seq: needs negative decrement\n"); + if (this._start < this._end and this.increment < 0) return this.fail("seq: needs positive increment\n"); return this.do(); } @@ -10192,11 +10187,11 @@ pub const Interpreter = struct { } fn do(this: *@This()) Maybe(void) { - var current = this.start; + var current = this._start; var arena = std.heap.ArenaAllocator.init(bun.default_allocator); defer arena.deinit(); - while (if (this.increment > 0) current <= this.end else current >= this.end) : (current += this.increment) { + while (if (this.increment > 0) current <= this._end else current >= this._end) : (current += this.increment) { const str = std.fmt.allocPrint(arena.allocator(), "{d}", .{current}) catch bun.outOfMemory(); defer _ = arena.reset(.retain_capacity); _ = this.print(str); @@ -11136,8 +11131,7 @@ pub const Interpreter = struct { pub const ChildPtr = IOReaderChildPtr; pub const ReaderImpl = bun.io.BufferedReader; - pub const DEBUG_REFCOUNT_NAME: []const u8 = "IOReaderRefCount"; - pub usingnamespace bun.NewRefCounted(@This(), IOReader.asyncDeinit); + pub usingnamespace bun.NewRefCounted(@This(), asyncDeinit, "IOReaderRefCount"); const InitFlags = packed struct(u8) { pollable: bool = false, @@ -11403,8 +11397,6 @@ pub const Interpreter = struct { started: bool = false, flags: InitFlags = .{}, - pub const DEBUG_REFCOUNT_NAME: []const u8 = "IOWriterRefCount"; - const debug = bun.Output.scoped(.IOWriter, true); const ChildPtr = IOWriterChildPtr; @@ -11416,7 +11408,7 @@ pub const Interpreter = struct { pub const auto_poll = false; - pub usingnamespace bun.NewRefCounted(@This(), asyncDeinit); + pub usingnamespace bun.NewRefCounted(@This(), asyncDeinit, "IOWriterRefCount"); const This = @This(); pub const WriterImpl = bun.io.BufferedWriter( This, @@ -11940,7 +11932,7 @@ pub fn StatePtrUnion(comptime TypesValue: anytype) type { pub fn init(_ptr: anytype) @This() { const tyinfo = @typeInfo(@TypeOf(_ptr)); - if (tyinfo != .Pointer) @compileError("Only pass pointers to StatePtrUnion.init(), you gave us a: " ++ @typeName(@TypeOf(_ptr))); + if (tyinfo != .pointer) @compileError("Only pass pointers to StatePtrUnion.init(), you gave us a: " ++ @typeName(@TypeOf(_ptr))); const Type = std.meta.Child(@TypeOf(_ptr)); Ptr.assert_type(Type); @@ -11957,7 +11949,7 @@ pub fn MaybeChild(comptime T: type) type { return switch (@typeInfo(T)) { .Array => |info| info.child, .Vector => |info| info.child, - .Pointer => |info| info.child, + .pointer => |info| info.child, .Optional => |info| info.child, else => T, }; @@ -12089,8 +12081,8 @@ inline fn errnocast(errno: anytype) u16 { inline fn fastMod(val: anytype, comptime rhs: comptime_int) @TypeOf(val) { const Value = @typeInfo(@TypeOf(val)); - if (Value != .Int) @compileError("LHS of fastMod should be an int"); - if (Value.Int.signedness != .unsigned) @compileError("LHS of fastMod should be unsigned"); + if (Value != .int) @compileError("LHS of fastMod should be an int"); + if (Value.int.signedness != .unsigned) @compileError("LHS of fastMod should be unsigned"); if (!comptime std.math.isPowerOfTwo(rhs)) @compileError("RHS of fastMod should be power of 2"); return val & (rhs - 1); @@ -12242,7 +12234,7 @@ const ShellSyscall = struct { return Syscall.fstatat(dir, path_); } - fn openat(dir: bun.FileDescriptor, path: [:0]const u8, flags: bun.Mode, perm: bun.Mode) Maybe(bun.FileDescriptor) { + fn openat(dir: bun.FileDescriptor, path: [:0]const u8, flags: i32, perm: bun.Mode) Maybe(bun.FileDescriptor) { if (bun.Environment.isWindows) { if (flags & bun.O.DIRECTORY != 0) { if (ResolvePath.Platform.posix.isAbsolute(path[0..path.len])) { diff --git a/src/shell/shell.zig b/src/shell/shell.zig index 6133dc62c17935..352ca854908a4d 100644 --- a/src/shell/shell.zig +++ b/src/shell/shell.zig @@ -226,7 +226,7 @@ pub const GlobalJS = struct { }; } - pub inline fn createNullDelimitedEnvMap(this: @This(), alloc: Allocator) ![:null]?[*:0]u8 { + pub inline fn createNullDelimitedEnvMap(this: @This(), alloc: Allocator) ![:null]?[*:0]const u8 { return this.globalThis.bunVM().transpiler.env.map.createNullDelimitedEnvMap(alloc); } @@ -298,7 +298,7 @@ pub const GlobalMini = struct { }; } - pub inline fn createNullDelimitedEnvMap(this: @This(), alloc: Allocator) ![:null]?[*:0]u8 { + pub inline fn createNullDelimitedEnvMap(this: @This(), alloc: Allocator) ![:null]?[*:0]const u8 { return this.mini.env.?.map.createNullDelimitedEnvMap(alloc); } @@ -770,10 +770,10 @@ pub const AST = struct { return .{ .stdout = true, .duplicate = true }; } - pub fn toFlags(this: RedirectFlags) bun.Mode { - const read_write_flags: bun.Mode = if (this.stdin) bun.O.RDONLY else bun.O.WRONLY | bun.O.CREAT; - const extra: bun.Mode = if (this.append) bun.O.APPEND else bun.O.TRUNC; - const final_flags: bun.Mode = if (this.stdin) read_write_flags else extra | read_write_flags; + pub fn toFlags(this: RedirectFlags) i32 { + const read_write_flags: i32 = if (this.stdin) bun.O.RDONLY else bun.O.WRONLY | bun.O.CREAT; + const extra: i32 = if (this.append) bun.O.APPEND else bun.O.TRUNC; + const final_flags: i32 = if (this.stdin) read_write_flags else extra | read_write_flags; return final_flags; } @@ -986,7 +986,7 @@ pub const Parser = struct { /// If you make a subparser and call some fallible functions on it, you need to catch the errors and call `.continue_from_subparser()`, otherwise errors /// will not propagate upwards to the parent. pub fn make_subparser(this: *Parser, kind: SubshellKind) Parser { - const subparser = .{ + const subparser: Parser = .{ .strpool = this.strpool, .tokens = this.tokens, .alloc = this.alloc, @@ -1146,7 +1146,7 @@ pub const Parser = struct { return expr; } - fn extractIfClauseTextToken(comptime if_clause_token: @TypeOf(.EnumLiteral)) []const u8 { + fn extractIfClauseTextToken(comptime if_clause_token: @TypeOf(.enum_literal)) []const u8 { const tagname = comptime switch (if_clause_token) { .@"if" => "if", .@"else" => "else", @@ -1158,7 +1158,7 @@ pub const Parser = struct { return tagname; } - fn expectIfClauseTextToken(self: *Parser, comptime if_clause_token: @TypeOf(.EnumLiteral)) Token { + fn expectIfClauseTextToken(self: *Parser, comptime if_clause_token: @TypeOf(.enum_literal)) Token { const tagname = comptime extractIfClauseTextToken(if_clause_token); if (bun.Environment.allow_assert) assert(@as(TokenTag, self.peek()) == .Text); if (self.peek() == .Text and @@ -1172,14 +1172,14 @@ pub const Parser = struct { @panic("Expected: " ++ @tagName(if_clause_token)); } - fn isIfClauseTextToken(self: *Parser, comptime if_clause_token: @TypeOf(.EnumLiteral)) bool { + fn isIfClauseTextToken(self: *Parser, comptime if_clause_token: @TypeOf(.enum_literal)) bool { return switch (self.peek()) { .Text => |range| self.isIfClauseTextTokenImpl(range, if_clause_token), else => false, }; } - fn isIfClauseTextTokenImpl(self: *Parser, range: Token.TextRange, comptime if_clause_token: @TypeOf(.EnumLiteral)) bool { + fn isIfClauseTextTokenImpl(self: *Parser, range: Token.TextRange, comptime if_clause_token: @TypeOf(.enum_literal)) bool { const tagname = comptime extractIfClauseTextToken(if_clause_token); return bun.strings.eqlComptime(self.text(range), tagname); } @@ -2248,7 +2248,7 @@ pub fn NewLexer(comptime encoding: StringEncoding) type { fn make_sublexer(self: *@This(), kind: SubShellKind) @This() { log("[lex] make sublexer", .{}); - var sublexer = .{ + var sublexer: @This() = .{ .chars = self.chars, .strpool = self.strpool, .tokens = self.tokens, @@ -2727,7 +2727,7 @@ pub fn NewLexer(comptime encoding: StringEncoding) type { } fn appendUnicodeCharToStrPool(self: *@This(), char: Chars.CodepointType) !void { - @setCold(true); + @branchHint(.cold); const ichar: i32 = @intCast(char); var bytes: [4]u8 = undefined; diff --git a/src/shell/subproc.zig b/src/shell/subproc.zig index 940e117562ebe4..9ced2d1fa7f91c 100644 --- a/src/shell/subproc.zig +++ b/src/shell/subproc.zig @@ -511,7 +511,7 @@ pub const ShellSubprocess = struct { return this.process.kill(@intCast(sig)); } - // fn hasCalledGetter(this: *Subprocess, comptime getter: @Type(.EnumLiteral)) bool { + // fn hasCalledGetter(this: *Subprocess, comptime getter: @Type(.enum_literal)) bool { // return this.observable_getters.contains(getter); // } @@ -528,7 +528,7 @@ pub const ShellSubprocess = struct { // this.ipc_mode = .none; } - pub fn closeIO(this: *@This(), comptime io: @Type(.EnumLiteral)) void { + pub fn closeIO(this: *@This(), comptime io: @Type(.enum_literal)) void { if (this.closed.contains(io)) return; log("close IO {s}", .{@tagName(io)}); this.closed.insert(io); @@ -1020,7 +1020,7 @@ pub const PipeReader = struct { } }; - pub usingnamespace bun.NewRefCounted(PipeReader, deinit); + pub usingnamespace bun.NewRefCounted(PipeReader, deinit, null); pub const CapturedWriter = struct { dead: bool = true, diff --git a/src/sourcemap/CodeCoverage.zig b/src/sourcemap/CodeCoverage.zig index 4385ec626b67d2..2075f1d4c94571 100644 --- a/src/sourcemap/CodeCoverage.zig +++ b/src/sourcemap/CodeCoverage.zig @@ -706,10 +706,10 @@ pub const ByteRangeMapping = struct { comptime { if (bun.Environment.isNative) { - @export(ByteRangeMapping.generate, .{ .name = "ByteRangeMapping__generate" }); - @export(ByteRangeMapping.findExecutedLines, .{ .name = "ByteRangeMapping__findExecutedLines" }); - @export(ByteRangeMapping.find, .{ .name = "ByteRangeMapping__find" }); - @export(ByteRangeMapping.getSourceID, .{ .name = "ByteRangeMapping__getSourceID" }); + @export(&ByteRangeMapping.generate, .{ .name = "ByteRangeMapping__generate" }); + @export(&ByteRangeMapping.findExecutedLines, .{ .name = "ByteRangeMapping__findExecutedLines" }); + @export(&ByteRangeMapping.find, .{ .name = "ByteRangeMapping__find" }); + @export(&ByteRangeMapping.getSourceID, .{ .name = "ByteRangeMapping__getSourceID" }); } } diff --git a/src/sourcemap/sourcemap.zig b/src/sourcemap/sourcemap.zig index 41ddfd67f13d67..50c37980c19953 100644 --- a/src/sourcemap/sourcemap.zig +++ b/src/sourcemap/sourcemap.zig @@ -640,7 +640,7 @@ pub const ParsedSourceMap = struct { is_standalone_module_graph: bool = false, - pub usingnamespace bun.NewThreadSafeRefCounted(ParsedSourceMap, deinitFn); + pub usingnamespace bun.NewThreadSafeRefCounted(ParsedSourceMap, deinitFn, null); const SourceContentPtr = packed struct(u64) { load_hint: SourceMapLoadHint = .none, diff --git a/src/sql/postgres.zig b/src/sql/postgres.zig index 4ca76484e4ae9d..cacf84965719b9 100644 --- a/src/sql/postgres.zig +++ b/src/sql/postgres.zig @@ -244,7 +244,7 @@ pub const PostgresSQLContext = struct { comptime { const js_init = JSC.toJSHostFunction(init); - @export(js_init, .{ .name = "PostgresSQLContext__init" }); + @export(&js_init, .{ .name = "PostgresSQLContext__init" }); } }; pub const PostgresSQLQueryResultMode = enum(u8) { @@ -800,7 +800,7 @@ pub const PostgresSQLQuery = struct { comptime { const jscall = JSC.toJSHostFunction(call); - @export(jscall, .{ .name = "PostgresSQLQuery__createInstance" }); + @export(&jscall, .{ .name = "PostgresSQLQuery__createInstance" }); } }; @@ -879,7 +879,7 @@ pub const PostgresRequest = struct { continue; } if (comptime bun.Environment.enable_logs) { - debug(" -> {s}", .{tag.name() orelse "(unknown)"}); + debug(" -> {s}", .{tag.tagName() orelse "(unknown)"}); } switch ( @@ -1419,12 +1419,10 @@ pub const PostgresSQLConnection = struct { } pub fn hasPendingActivity(this: *PostgresSQLConnection) bool { - @fence(.acquire); return this.pending_activity_count.load(.acquire) > 0; } fn updateHasPendingActivity(this: *PostgresSQLConnection) void { - @fence(.release); const a: u32 = if (this.requests.readableLength() > 0) 1 else 0; const b: u32 = if (this.status != .disconnected) 1 else 0; this.pending_activity_count.store(a + b, .release); @@ -1728,7 +1726,7 @@ pub const PostgresSQLConnection = struct { comptime { const jscall = JSC.toJSHostFunction(call); - @export(jscall, .{ .name = "PostgresSQLConnection__createInstance" }); + @export(&jscall, .{ .name = "PostgresSQLConnection__createInstance" }); } pub fn call(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { @@ -2412,7 +2410,7 @@ pub const PostgresSQLConnection = struct { fn pg_ntoT(comptime IntSize: usize, i: anytype) std.meta.Int(.unsigned, IntSize) { @setRuntimeSafety(false); const T = @TypeOf(i); - if (@typeInfo(T) == .Array) { + if (@typeInfo(T) == .array) { return pg_ntoT(IntSize, @as(std.meta.Int(.unsigned, IntSize), @bitCast(i))); } @@ -2668,7 +2666,7 @@ pub const PostgresSQLConnection = struct { return PostgresSQLConnection.queriesGetCached(this.js_value) orelse .zero; } - pub fn on(this: *PostgresSQLConnection, comptime MessageType: @Type(.EnumLiteral), comptime Context: type, reader: protocol.NewReader(Context)) AnyPostgresError!void { + pub fn on(this: *PostgresSQLConnection, comptime MessageType: @Type(.enum_literal), comptime Context: type, reader: protocol.NewReader(Context)) AnyPostgresError!void { debug("on({s})", .{@tagName(MessageType)}); if (comptime MessageType != .ReadyForQuery) { this.is_ready_for_query = false; diff --git a/src/sql/postgres/postgres_types.zig b/src/sql/postgres/postgres_types.zig index a6fa23ff73f906..83a1e8488138cc 100644 --- a/src/sql/postgres/postgres_types.zig +++ b/src/sql/postgres/postgres_types.zig @@ -177,7 +177,7 @@ pub const Tag = enum(short) { jsonpath_array = 4073, _, - pub fn name(this: Tag) ?[]const u8 { + pub fn tagName(this: Tag) ?[]const u8 { return std.enums.tagName(Tag, this); } diff --git a/src/string.zig b/src/string.zig index f0dda92cbb6d73..411be37d49fe35 100644 --- a/src/string.zig +++ b/src/string.zig @@ -592,10 +592,10 @@ pub const String = extern struct { const info = @typeInfo(Type); // Zig string literals - if (info == .Pointer and info.Pointer.size == .One and info.Pointer.is_const) { - const child_info = @typeInfo(info.Pointer.child); - if (child_info == .Array and child_info.Array.child == u8) { - if (child_info.Array.len == 0) return String.empty; + if (info == .pointer and info.pointer.size == .one and info.pointer.is_const) { + const child_info = @typeInfo(info.pointer.child); + if (child_info == .array and child_info.array.child == u8) { + if (child_info.array.len == 0) return String.empty; return static(value); } } diff --git a/src/string_immutable.zig b/src/string_immutable.zig index f3446289df69a0..365d1acda0bc56 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -1091,7 +1091,7 @@ fn eqlComptimeCheckLenWithKnownType(comptime Type: type, a: []const Type, compti /// strings.eqlComptime(input, "hello world"); /// strings.eqlComptime(input, "hai"); pub fn eqlComptimeCheckLenWithType(comptime Type: type, a: []const Type, comptime b: anytype, comptime check_len: bool) bool { - return eqlComptimeCheckLenWithKnownType(comptime Type, a, if (@typeInfo(@TypeOf(b)) != .Pointer) &b else b, comptime check_len); + return eqlComptimeCheckLenWithKnownType(comptime Type, a, if (@typeInfo(@TypeOf(b)) != .pointer) &b else b, comptime check_len); } pub fn eqlCaseInsensitiveASCIIIgnoreLength( @@ -4762,6 +4762,7 @@ pub fn indexOfLineRanges(text: []const u8, target_line: u32, comptime line_range else => continue, } } + @panic("unreachable"); }; if (ranges.len == line_range_count and current_line <= target_line) { @@ -5553,7 +5554,7 @@ pub fn cloneNormalizingSeparators( ) ![]u8 { // remove duplicate slashes in the file path const base = withoutTrailingSlash(input); - var tokenized = std.mem.tokenize(u8, base, std.fs.path.sep_str); + var tokenized = std.mem.tokenizeScalar(u8, base, std.fs.path.sep); var buf = try allocator.alloc(u8, base.len + 2); if (comptime Environment.allow_assert) assert(base.len > 0); if (base[0] == std.fs.path.sep) { diff --git a/src/sync.zig b/src/sync.zig index b71040f6fab026..e147116ec6485c 100644 --- a/src/sync.zig +++ b/src/sync.zig @@ -634,7 +634,7 @@ pub const RwLock = if (@import("builtin").os.tag != .windows and @import("builti writer_count: i32 = 0, waiters: [2]?*anyopaque = [_]?*anyopaque{ null, null }, }, - .kfreebsd, .freebsd, .openbsd => extern struct { + .freebsd, .openbsd => extern struct { ptr: ?*anyopaque = null, }, .hermit => extern struct { @@ -946,7 +946,7 @@ else if (@import("builtin").os.tag == .linux) } fn lockSlow(self: *Mutex, current_state: State) void { - @setCold(true); + @branchHint(.cold); var new_state = current_state; while (true) { @@ -992,7 +992,7 @@ else if (@import("builtin").os.tag == .linux) } fn unlockSlow(self: *Mutex) void { - @setCold(true); + @branchHint(.cold); Futex.wake(@as(*const i32, @ptrCast(&self.state))); } diff --git a/src/sys.zig b/src/sys.zig index 039a9625537833..1d6dc575beb23c 100644 --- a/src/sys.zig +++ b/src/sys.zig @@ -65,11 +65,11 @@ pub const O = switch (Environment.os) { pub const NOFOLLOW = 0x0100; pub const SYMLINK = 0x200000; pub const EVTONLY = 0x8000; - pub const CLOEXEC = 0x1000000; + pub const CLOEXEC = 0x01000000; pub const ACCMODE = 3; pub const ALERT = 536870912; pub const ASYNC = 64; - pub const DIRECTORY = 1048576; + pub const DIRECTORY = 0x00100000; pub const DP_GETRAWENCRYPTED = 1; pub const DP_GETRAWUNENCRYPTED = 2; pub const DSYNC = 4194304; @@ -456,6 +456,7 @@ pub const Error = struct { /// Simpler formatting which does not allocate a message pub fn toShellSystemError(this: Error) SystemError { + @setEvalBranchQuota(1_000_000); var err = SystemError{ .errno = @as(c_int, this.errno) * -1, .syscall = bun.String.static(@tagName(this.syscall)), @@ -878,7 +879,7 @@ pub fn fstatat(fd: bun.FileDescriptor, path: [:0]const u8) Maybe(bun.Stat) { return Maybe(bun.Stat){ .result = stat_buf }; } -pub fn mkdir(file_path: [:0]const u8, flags: bun.Mode) Maybe(void) { +pub fn mkdir(file_path: [:0]const u8, flags: mode_t) Maybe(void) { return switch (Environment.os) { .mac => Maybe(void).errnoSysP(syscall.mkdir(file_path, flags), .mkdir, file_path) orelse Maybe(void).success, @@ -888,7 +889,7 @@ pub fn mkdir(file_path: [:0]const u8, flags: bun.Mode) Maybe(void) { const wbuf = bun.WPathBufferPool.get(); defer bun.WPathBufferPool.put(wbuf); return Maybe(void).errnoSysP( - kernel32.CreateDirectoryW(bun.strings.toKernel32Path(wbuf, file_path).ptr, null), + bun.windows.CreateDirectoryW(bun.strings.toKernel32Path(wbuf, file_path).ptr, null), .mkdir, file_path, ) orelse Maybe(void).success; @@ -898,7 +899,7 @@ pub fn mkdir(file_path: [:0]const u8, flags: bun.Mode) Maybe(void) { }; } -pub fn mkdirA(file_path: []const u8, flags: bun.Mode) Maybe(void) { +pub fn mkdirA(file_path: []const u8, flags: mode_t) Maybe(void) { if (comptime Environment.isMac) { return Maybe(void).errnoSysP(syscall.mkdir(&(std.posix.toPosixPath(file_path) catch return Maybe(void){ .err = .{ @@ -930,7 +931,7 @@ pub fn mkdirA(file_path: []const u8, flags: bun.Mode) Maybe(void) { } } -pub fn mkdirOSPath(file_path: bun.OSPathSliceZ, flags: bun.Mode) Maybe(void) { +pub fn mkdirOSPath(file_path: bun.OSPathSliceZ, flags: mode_t) Maybe(void) { return switch (Environment.os) { else => mkdir(file_path, flags), .windows => { @@ -1572,11 +1573,11 @@ pub noinline fn openFileAtWindowsA( return openFileAtWindowsT(u8, dirFd, path, opts); } -pub fn openatWindowsT(comptime T: type, dir: bun.FileDescriptor, path: []const T, flags: bun.Mode, perm: bun.Mode) Maybe(bun.FileDescriptor) { +pub fn openatWindowsT(comptime T: type, dir: bun.FileDescriptor, path: []const T, flags: i32, perm: bun.Mode) Maybe(bun.FileDescriptor) { return openatWindowsTMaybeNormalize(T, dir, path, flags, perm, true); } -fn openatWindowsTMaybeNormalize(comptime T: type, dir: bun.FileDescriptor, path: []const T, flags: bun.Mode, perm: bun.Mode, comptime normalize: bool) Maybe(bun.FileDescriptor) { +fn openatWindowsTMaybeNormalize(comptime T: type, dir: bun.FileDescriptor, path: []const T, flags: i32, perm: bun.Mode, comptime normalize: bool) Maybe(bun.FileDescriptor) { if (flags & O.DIRECTORY != 0) { const windows_options: WindowsOpenDirOptions = .{ .iterable = flags & O.PATH == 0, @@ -1652,7 +1653,7 @@ fn openatWindowsTMaybeNormalize(comptime T: type, dir: bun.FileDescriptor, path: pub fn openatWindows( dir: anytype, path: []const u16, - flags: bun.Mode, + flags: i32, perm: bun.Mode, ) Maybe(bun.FileDescriptor) { return openatWindowsT(u16, bun.toFD(dir), path, flags, perm); @@ -1661,13 +1662,13 @@ pub fn openatWindows( pub fn openatWindowsA( dir: bun.FileDescriptor, path: []const u8, - flags: bun.Mode, + flags: i32, perm: bun.Mode, ) Maybe(bun.FileDescriptor) { return openatWindowsT(u8, dir, path, flags, perm); } -pub fn openatOSPath(dirfd: bun.FileDescriptor, file_path: bun.OSPathSliceZ, flags: bun.Mode, perm: bun.Mode) Maybe(bun.FileDescriptor) { +pub fn openatOSPath(dirfd: bun.FileDescriptor, file_path: bun.OSPathSliceZ, flags: i32, perm: bun.Mode) Maybe(bun.FileDescriptor) { if (comptime Environment.isMac) { // https://opensource.apple.com/source/xnu/xnu-7195.81.3/libsyscall/wrappers/open-base.c const rc = syscall.@"openat$NOCANCEL"(dirfd.cast(), file_path.ptr, @as(c_uint, @intCast(flags)), @as(c_int, @intCast(perm))); @@ -1699,7 +1700,7 @@ pub fn openatOSPath(dirfd: bun.FileDescriptor, file_path: bun.OSPathSliceZ, flag } } -pub fn access(path: bun.OSPathSliceZ, mode: bun.Mode) Maybe(void) { +pub fn access(path: bun.OSPathSliceZ, mode: i32) Maybe(void) { if (Environment.isWindows) { const attrs = getFileAttributes(path) orelse { return .{ .err = .{ @@ -1723,7 +1724,7 @@ pub fn access(path: bun.OSPathSliceZ, mode: bun.Mode) Maybe(void) { return Maybe(void).errnoSysP(syscall.access(path, mode), .access, path) orelse .{ .result = {} }; } -pub fn openat(dirfd: bun.FileDescriptor, file_path: [:0]const u8, flags: bun.Mode, perm: bun.Mode) Maybe(bun.FileDescriptor) { +pub fn openat(dirfd: bun.FileDescriptor, file_path: [:0]const u8, flags: i32, perm: bun.Mode) Maybe(bun.FileDescriptor) { if (comptime Environment.isWindows) { return openatWindowsT(u8, dirfd, file_path, flags, perm); } else { @@ -1745,7 +1746,7 @@ pub fn openatFileWithLibuvFlags(dirfd: bun.FileDescriptor, file_path: [:0]const } } -pub fn openatA(dirfd: bun.FileDescriptor, file_path: []const u8, flags: bun.Mode, perm: bun.Mode) Maybe(bun.FileDescriptor) { +pub fn openatA(dirfd: bun.FileDescriptor, file_path: []const u8, flags: i32, perm: bun.Mode) Maybe(bun.FileDescriptor) { if (comptime Environment.isWindows) { return openatWindowsT(u8, dirfd, file_path, flags, perm); } @@ -1765,12 +1766,12 @@ pub fn openatA(dirfd: bun.FileDescriptor, file_path: []const u8, flags: bun.Mode ); } -pub fn openA(file_path: []const u8, flags: bun.Mode, perm: bun.Mode) Maybe(bun.FileDescriptor) { +pub fn openA(file_path: []const u8, flags: i32, perm: bun.Mode) Maybe(bun.FileDescriptor) { // this is what open() does anyway. return openatA(bun.toFD((std.fs.cwd().fd)), file_path, flags, perm); } -pub fn open(file_path: [:0]const u8, flags: bun.Mode, perm: bun.Mode) Maybe(bun.FileDescriptor) { +pub fn open(file_path: [:0]const u8, flags: i32, perm: bun.Mode) Maybe(bun.FileDescriptor) { // TODO(@paperclover): this should not use libuv; when the libuv path is // removed here, the call sites in node_fs.zig should make sure they parse // the libuv specific file flags using the WindowsOpenFlags structure. @@ -2135,7 +2136,7 @@ pub fn read(fd: bun.FileDescriptor, buf: []u8) Maybe(usize) { var amount_read: u32 = 0; const rc = kernel32.ReadFile(fd.cast(), buf.ptr, @as(u32, @intCast(adjusted_len)), &amount_read, null); if (rc == windows.FALSE) { - const ret = .{ + const ret: Maybe(usize) = .{ .err = Syscall.Error{ .errno = @intFromEnum(bun.windows.getLastErrno()), .syscall = .read, @@ -2534,7 +2535,7 @@ pub fn symlinkW(dest: [:0]const u16, target: [:0]const u16, options: WindowsSyml while (true) { const flags = options.flags(); - if (windows.kernel32.CreateSymbolicLinkW(dest, target, flags) == 0) { + if (windows.CreateSymbolicLinkW(dest, target, flags) == 0) { const errno = bun.windows.Win32Error.get(); log("CreateSymbolicLinkW({}, {}, {any}) = {s}", .{ bun.fmt.fmtPath(u16, dest, .{}), @@ -2587,7 +2588,7 @@ pub fn clonefile(from: [:0]const u8, to: [:0]const u8) Maybe(void) { } } -pub fn copyfile(from: [:0]const u8, to: [:0]const u8, flags: c_int) Maybe(void) { +pub fn copyfile(from: [:0]const u8, to: [:0]const u8, flags: posix.system.COPYFILE) Maybe(void) { if (comptime !Environment.isMac) @compileError("macOS only"); while (true) { @@ -2599,7 +2600,7 @@ pub fn copyfile(from: [:0]const u8, to: [:0]const u8, flags: c_int) Maybe(void) } } -pub fn fcopyfile(fd_in: bun.FileDescriptor, fd_out: bun.FileDescriptor, flags: u32) Maybe(void) { +pub fn fcopyfile(fd_in: bun.FileDescriptor, fd_out: bun.FileDescriptor, flags: posix.system.COPYFILE) Maybe(void) { if (comptime !Environment.isMac) @compileError("macOS only"); while (true) { @@ -3097,7 +3098,7 @@ pub fn existsOSPath(path: bun.OSPathSliceZ, file_only: bool) bool { null, ); if (rc == w.INVALID_HANDLE_VALUE) return false; - defer _ = std.os.windows.kernel32.CloseHandle(rc); + defer _ = bun.windows.CloseHandle(rc); return true; } return true; @@ -3243,11 +3244,11 @@ pub fn futimens(fd: bun.FileDescriptor, atime: JSC.Node.TimeLike, mtime: JSC.Nod while (true) { const rc = syscall.futimens(fd.cast(), &[2]syscall.timespec{ - .{ .tv_sec = @intCast(atime.tv_sec), .tv_nsec = atime.tv_nsec }, - .{ .tv_sec = @intCast(mtime.tv_sec), .tv_nsec = mtime.tv_nsec }, + .{ .sec = @intCast(atime.sec), .nsec = atime.nsec }, + .{ .sec = @intCast(mtime.sec), .nsec = mtime.nsec }, }); - log("futimens({}, accessed=({d}, {d}), modified=({d}, {d})) = {d}", .{ fd, atime.tv_sec, atime.tv_nsec, mtime.tv_sec, mtime.tv_nsec, rc }); + log("futimens({}, accessed=({d}, {d}), modified=({d}, {d})) = {d}", .{ fd, atime.sec, atime.nsec, mtime.sec, mtime.nsec, rc }); if (rc == 0) { return Maybe(void).success; @@ -3267,8 +3268,8 @@ fn utimensWithFlags(path: bun.OSPathSliceZ, atime: JSC.Node.TimeLike, mtime: JSC while (true) { var times: [2]syscall.timespec = .{ - .{ .tv_sec = @intCast(atime.tv_sec), .tv_nsec = atime.tv_nsec }, - .{ .tv_sec = @intCast(mtime.tv_sec), .tv_nsec = mtime.tv_nsec }, + .{ .sec = @intCast(atime.sec), .nsec = atime.nsec }, + .{ .sec = @intCast(mtime.sec), .nsec = mtime.nsec }, }; const rc = syscall.utimensat( std.fs.cwd().fd, @@ -3278,7 +3279,7 @@ fn utimensWithFlags(path: bun.OSPathSliceZ, atime: JSC.Node.TimeLike, mtime: JSC flags, ); - log("utimensat({d}, atime=({d}, {d}), mtime=({d}, {d})) = {d}", .{ std.fs.cwd().fd, atime.tv_sec, atime.tv_nsec, mtime.tv_sec, mtime.tv_nsec, rc }); + log("utimensat({d}, atime=({d}, {d}), mtime=({d}, {d})) = {d}", .{ std.fs.cwd().fd, atime.sec, atime.nsec, mtime.sec, mtime.nsec, rc }); if (rc == 0) { return Maybe(void).success; @@ -3752,18 +3753,18 @@ pub const File = struct { // "handle" matches std.fs.File handle: bun.FileDescriptor, - pub fn openat(other: anytype, path: [:0]const u8, flags: bun.Mode, mode: bun.Mode) Maybe(File) { + pub fn openat(other: anytype, path: [:0]const u8, flags: i32, mode: bun.Mode) Maybe(File) { return switch (This.openat(bun.toFD(other), path, flags, mode)) { .result => |fd| .{ .result = .{ .handle = fd } }, .err => |err| .{ .err = err }, }; } - pub fn open(path: [:0]const u8, flags: bun.Mode, mode: bun.Mode) Maybe(File) { + pub fn open(path: [:0]const u8, flags: i32, mode: bun.Mode) Maybe(File) { return File.openat(bun.FD.cwd(), path, flags, mode); } - pub fn openatOSPath(other: anytype, path: bun.OSPathSliceZ, flags: bun.Mode, mode: bun.Mode) Maybe(File) { + pub fn openatOSPath(other: anytype, path: bun.OSPathSliceZ, flags: i32, mode: bun.Mode) Maybe(File) { return switch (This.openatOSPath(bun.toFD(other), path, flags, mode)) { .result => |fd| .{ .result = .{ .handle = fd } }, .err => |err| .{ .err = err }, diff --git a/src/sys_uv.zig b/src/sys_uv.zig index 9fc18d100f3bfa..9a574bd581bcd3 100644 --- a/src/sys_uv.zig +++ b/src/sys_uv.zig @@ -38,7 +38,7 @@ pub const access = bun.sys.access; // Note: `req = undefined; req.deinit()` has a saftey-check in a debug build -pub fn open(file_path: [:0]const u8, c_flags: bun.Mode, _perm: bun.Mode) Maybe(bun.FileDescriptor) { +pub fn open(file_path: [:0]const u8, c_flags: i32, _perm: bun.Mode) Maybe(bun.FileDescriptor) { assertIsValidWindowsPath(u8, file_path); var req: uv.fs_t = uv.fs_t.uninitialized; diff --git a/src/tagged_pointer.zig b/src/tagged_pointer.zig index f81aa656b3a111..cb0d798feb533d 100644 --- a/src/tagged_pointer.zig +++ b/src/tagged_pointer.zig @@ -7,7 +7,6 @@ const Environment = bun.Environment; const strings = bun.strings; const default_allocator = bun.default_allocator; const C = bun.C; -const typeBaseName = @import("./meta.zig").typeBaseName; const TagSize = u15; const AddressableSize = u49; @@ -24,7 +23,7 @@ pub const TaggedPointer = packed struct { return .{ ._ptr = 0, .data = data }; } - if (comptime @typeInfo(Ptr) != .Pointer and Ptr != ?*anyopaque) { + if (comptime @typeInfo(Ptr) != .pointer and Ptr != ?*anyopaque) { @compileError(@typeName(Ptr) ++ " must be a ptr, received: " ++ @tagName(@typeInfo(Ptr))); } @@ -74,7 +73,7 @@ pub fn TagTypeEnumWithTypeMap(comptime Types: anytype) struct { @memset(&typeMap, TypeMapT{ .value = 0, .ty = void, .name = "" }); inline for (Types, 0..) |field, i| { - const name = comptime typeBaseName(@typeName(field)); + const name = comptime @typeName(field); enumFields[i] = .{ .name = name, .value = 1024 - i, @@ -84,7 +83,7 @@ pub fn TagTypeEnumWithTypeMap(comptime Types: anytype) struct { return .{ .tag_type = @Type(.{ - .Enum = .{ + .@"enum" = .{ .tag_type = TagSize, .fields = &enumFields, .decls = &.{}, @@ -106,7 +105,7 @@ pub fn TaggedPointerUnion(comptime Types: anytype) type { pub const type_map: TypeMap(Types) = result.ty_map; repr: TaggedPointer, - pub const Null = .{ .repr = .{ ._ptr = 0, .data = 0 } }; + pub const Null: @This() = .{ .repr = .{ ._ptr = 0, .data = 0 } }; pub fn clear(this: *@This()) void { this.* = Null; @@ -132,7 +131,7 @@ pub fn TaggedPointerUnion(comptime Types: anytype) type { const This = @This(); pub fn assert_type(comptime Type: type) void { - const name = comptime typeBaseName(@typeName(Type)); + const name = comptime @typeName(Type); if (!comptime @hasField(Tag, name)) { @compileError("TaggedPointerUnion does not have " ++ name ++ "."); } @@ -163,7 +162,7 @@ pub fn TaggedPointerUnion(comptime Types: anytype) type { pub inline fn is(this: This, comptime Type: type) bool { comptime assert_type(Type); - return this.repr.data == comptime @intFromEnum(@field(Tag, typeBaseName(@typeName(Type)))); + return this.repr.data == comptime @intFromEnum(@field(Tag, @typeName(Type))); } pub fn set(this: *@This(), _ptr: anytype) void { @@ -177,9 +176,9 @@ pub fn TaggedPointerUnion(comptime Types: anytype) type { pub inline fn isValid(this: This) bool { return switch (this.repr.data) { @intFromEnum( - @field(Tag, typeBaseName(@typeName(Types[Types.len - 1]))), + @field(Tag, @typeName(Types[Types.len - 1])), )...@intFromEnum( - @field(Tag, typeBaseName(@typeName(Types[0]))), + @field(Tag, @typeName(Types[0])), ) => true, else => false, }; @@ -200,7 +199,7 @@ pub fn TaggedPointerUnion(comptime Types: anytype) type { pub inline fn init(_ptr: anytype) @This() { const tyinfo = @typeInfo(@TypeOf(_ptr)); - if (tyinfo != .Pointer) @compileError("Only pass pointers to TaggedPointerUnion.init(), you gave us a: " ++ @typeName(@TypeOf(_ptr))); + if (tyinfo != .pointer) @compileError("Only pass pointers to TaggedPointerUnion.init(), you gave us a: " ++ @typeName(@TypeOf(_ptr))); const Type = std.meta.Child(@TypeOf(_ptr)); return initWithType(Type, _ptr); @@ -208,8 +207,8 @@ pub fn TaggedPointerUnion(comptime Types: anytype) type { pub inline fn initWithType(comptime Type: type, _ptr: anytype) @This() { const tyinfo = @typeInfo(@TypeOf(_ptr)); - if (tyinfo != .Pointer) @compileError("Only pass pointers to TaggedPointerUnion.init(), you gave us a: " ++ @typeName(@TypeOf(_ptr))); - const name = comptime typeBaseName(@typeName(Type)); + if (tyinfo != .pointer) @compileError("Only pass pointers to TaggedPointerUnion.init(), you gave us a: " ++ @typeName(@TypeOf(_ptr))); + const name = comptime @typeName(Type); // there will be a compiler error if the passed in type doesn't exist in the enum return This{ .repr = TaggedPointer.init(_ptr, @intFromEnum(@field(Tag, name))) }; diff --git a/src/thread_pool.zig b/src/thread_pool.zig index 4186d489c2a08c..2cf8c738e351a0 100644 --- a/src/thread_pool.zig +++ b/src/thread_pool.zig @@ -766,7 +766,6 @@ const Event = struct { // Acquire barrier to ensure operations before the shutdown() are seen after the wait(). // Shutdown is rare so it's better to have an Acquire barrier here instead of on CAS failure + load which are common. if (state == SHUTDOWN) { - @fence(.acquire); return; } diff --git a/src/toml/toml_lexer.zig b/src/toml/toml_lexer.zig index 5984f33d26cfc4..6467efd7c342fe 100644 --- a/src/toml/toml_lexer.zig +++ b/src/toml/toml_lexer.zig @@ -77,7 +77,7 @@ pub const Lexer = struct { } pub fn syntaxError(self: *Lexer) !void { - @setCold(true); + @branchHint(.cold); // Only add this if there is not already an error. // It is possible that there is a more descriptive error already emitted. @@ -88,7 +88,7 @@ pub const Lexer = struct { } pub fn addError(self: *Lexer, _loc: usize, comptime format: []const u8, args: anytype) void { - @setCold(true); + @branchHint(.cold); var __loc = logger.usize2Loc(_loc); if (__loc.eql(self.prev_error_loc)) { @@ -109,20 +109,20 @@ pub const Lexer = struct { } pub fn addDefaultError(self: *Lexer, msg: []const u8) !void { - @setCold(true); + @branchHint(.cold); self.addError(self.start, "{s}", .{msg}); return Error.SyntaxError; } pub fn addSyntaxError(self: *Lexer, _loc: usize, comptime fmt: []const u8, args: anytype) !void { - @setCold(true); + @branchHint(.cold); self.addError(_loc, fmt, args); return Error.SyntaxError; } pub fn addRangeError(self: *Lexer, r: logger.Range, comptime format: []const u8, args: anytype) !void { - @setCold(true); + @branchHint(.cold); if (self.prev_error_loc.eql(r.loc)) { return; diff --git a/src/toml/toml_parser.zig b/src/toml/toml_parser.zig index 0d07d49714394f..3d5a69e097748c 100644 --- a/src/toml/toml_parser.zig +++ b/src/toml/toml_parser.zig @@ -95,7 +95,7 @@ pub const TOML = struct { pub fn e(_: *TOML, t: anytype, loc: logger.Loc) Expr { const Type = @TypeOf(t); - if (@typeInfo(Type) == .Pointer) { + if (@typeInfo(Type) == .pointer) { return Expr.init(std.meta.Child(Type), t.*, loc); } else { return Expr.init(Type, t, loc); diff --git a/src/tracy.zig b/src/tracy.zig index b83c6946abd63d..2ca3874a81c20b 100644 --- a/src/tracy.zig +++ b/src/tracy.zig @@ -531,10 +531,7 @@ fn dlsym(comptime Type: type, comptime symbol: [:0]const u8) ?Type { "tracy.dll", } else .{}; - const RLTD = if (bun.Environment.isMac) - -2 - else - 0; + const RLTD: std.c.RTLD = if (bun.Environment.isMac) @bitCast(@as(i32, -2)) else if (bun.Environment.isLinux) .{} else {}; if (bun.getenvZ("BUN_TRACY_PATH")) |path| { const handle = bun.C.dlopen(&(std.posix.toPosixPath(path) catch unreachable), RLTD); diff --git a/src/trait.zig b/src/trait.zig index 5c3db377499b6b..57bac0db03ed09 100644 --- a/src/trait.zig +++ b/src/trait.zig @@ -14,14 +14,14 @@ pub inline fn isZigString(comptime T: type) bool { return comptime blk: { // Only pointer types can be strings, no optionals const info = @typeInfo(T); - if (info != .Pointer) break :blk false; + if (info != .pointer) break :blk false; - const ptr = &info.Pointer; + const ptr = &info.pointer; // Check for CV qualifiers that would prevent coerction to []const u8 if (ptr.is_volatile or ptr.is_allowzero) break :blk false; // If it's already a slice, simple check. - if (ptr.size == .Slice) { + if (ptr.size == .slice) { break :blk ptr.child == u8; } @@ -40,50 +40,50 @@ pub inline fn isZigString(comptime T: type) bool { pub inline fn isSlice(comptime T: type) bool { const info = @typeInfo(T); - return info == .Pointer and info.Pointer.size == .Slice; + return info == .pointer and info.pointer.size == .slice; } pub inline fn isNumber(comptime T: type) bool { return switch (@typeInfo(T)) { - .Int, .Float, .ComptimeInt, .ComptimeFloat => true, + .int, .float, .comptime_int, .comptime_float => true, else => false, }; } pub inline fn isContainer(comptime T: type) bool { return switch (@typeInfo(T)) { - .Struct, .Enum, .Opaque, .Union => true, + .@"struct", .@"enum", .@"opaque", .@"union" => true, else => false, }; } pub inline fn isSingleItemPtr(comptime T: type) bool { const info = @typeInfo(T); - return info == .Pointer and .Pointer.size == .One; + return info == .pointer and .pointer.size == .One; } pub fn isExternContainer(comptime T: type) bool { return switch (@typeInfo(T)) { - .Struct => |s| s.layout == .@"extern", - .Union => |u| u.layout == .@"extern", + .@"struct" => |s| s.layout == .@"extern", + .@"union" => |u| u.layout == .@"extern", else => false, }; } pub fn isConstPtr(comptime T: type) bool { const info = @typeInfo(T); - return info == .Pointer and info.Pointer.is_const; + return info == .pointer and info.pointer.is_const; } pub fn isIndexable(comptime T: type) bool { const info = @typeInfo(T); return switch (info) { - .Pointer => |ptr| switch (ptr.size) { - .One => @typeInfo(ptr.child) == .Array, + .pointer => |ptr| switch (ptr.size) { + .One => @typeInfo(ptr.child) == .array, else => true, }, - .Array, .Vector => true, - .Struct => |s| s.is_tuple, + .array, .vector => true, + .@"struct" => |s| s.is_tuple, else => false, }; } diff --git a/src/transpiler.zig b/src/transpiler.zig index a3b41f967f65e1..7ac0299e04de43 100644 --- a/src/transpiler.zig +++ b/src/transpiler.zig @@ -613,7 +613,7 @@ pub const Transpiler = struct { } pub noinline fn dumpEnvironmentVariables(transpiler: *const Transpiler) void { - @setCold(true); + @branchHint(.cold); const opts = std.json.StringifyOptions{ .whitespace = .indent_2, }; diff --git a/src/url.zig b/src/url.zig index 451060cfa12e89..c3a31bc9206d95 100644 --- a/src/url.zig +++ b/src/url.zig @@ -1043,7 +1043,7 @@ pub const FormData = struct { comptime { const jsFunctionFromMultipartData = JSC.toJSHostFunction(fromMultipartData); - @export(jsFunctionFromMultipartData, .{ .name = "FormData__jsFunctionFromMultipartData" }); + @export(&jsFunctionFromMultipartData, .{ .name = "FormData__jsFunctionFromMultipartData" }); } pub fn toJSFromMultipartData( diff --git a/src/util.zig b/src/util.zig index fbf2e5838d5cf3..d4eadfcf991579 100644 --- a/src/util.zig +++ b/src/util.zig @@ -82,7 +82,7 @@ pub fn fromEntries( pub fn fromMapLike( comptime Map: type, allocator: std.mem.Allocator, - entries: anytype, + entries: []const struct { @FieldType(Map.KV, "key"), @FieldType(Map.KV, "value") }, ) !Map { var map: Map = undefined; if (comptime @hasField(Map, "allocator")) { @@ -91,11 +91,10 @@ pub fn fromMapLike( map = Map{}; } - try map.ensureUnusedCapacity(entries.count()); + try map.ensureUnusedCapacity(allocator, entries.len); - var iter = entries.iterator(); - while (iter.next()) |entry| { - map.putAssumeCapacityNoClobber(entry.key_ptr.*, entry.value_ptr.*); + for (entries) |entry| { + map.putAssumeCapacityNoClobber(entry[0], entry[1]); } return map; @@ -156,11 +155,11 @@ pub inline fn from( return fromEntries(Array, allocator, DefaultType, default); } - if (comptime @typeInfo(DefaultType) == .Struct) { + if (comptime @typeInfo(DefaultType) == .@"struct") { return fromSlice(Array, allocator, DefaultType, default); } - if (comptime @typeInfo(DefaultType) == .Array) { + if (comptime @typeInfo(DefaultType) == .array) { return fromSlice(Array, allocator, []const Of(Array), @as([]const Of(Array), &default)); } diff --git a/src/watcher.zig b/src/watcher.zig index 7639bc11bed75f..3f710c64ceb354 100644 --- a/src/watcher.zig +++ b/src/watcher.zig @@ -333,11 +333,11 @@ fn appendFileAssumeCapacity( // https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man2/kqueue.2.html var event = std.mem.zeroes(KEvent); - event.flags = std.c.EV_ADD | std.c.EV_CLEAR | std.c.EV_ENABLE; + event.flags = std.c.EV.ADD | std.c.EV.CLEAR | std.c.EV.ENABLE; // we want to know about the vnode - event.filter = std.c.EVFILT_VNODE; + event.filter = std.c.EVFILT.VNODE; - event.fflags = std.c.NOTE_WRITE | std.c.NOTE_RENAME | std.c.NOTE_DELETE; + event.fflags = std.c.NOTE.WRITE | std.c.NOTE.RENAME | std.c.NOTE.DELETE; // id event.ident = @intCast(fd.int()); @@ -425,15 +425,15 @@ fn appendDirectoryAssumeCapacity( // https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man2/kqueue.2.html var event = std.mem.zeroes(KEvent); - event.flags = std.c.EV_ADD | std.c.EV_CLEAR | std.c.EV_ENABLE; + event.flags = std.c.EV.ADD | std.c.EV.CLEAR | std.c.EV.ENABLE; // we want to know about the vnode - event.filter = std.c.EVFILT_VNODE; + event.filter = std.c.EVFILT.VNODE; // monitor: // - Write // - Rename // - Delete - event.fflags = std.c.NOTE_WRITE | std.c.NOTE_RENAME | std.c.NOTE_DELETE; + event.fflags = std.c.NOTE.WRITE | std.c.NOTE.RENAME | std.c.NOTE.DELETE; // id event.ident = @intCast(fd.int()); diff --git a/src/watcher/INotifyWatcher.zig b/src/watcher/INotifyWatcher.zig index 25ba6248e74112..d86694d6338bf3 100644 --- a/src/watcher/INotifyWatcher.zig +++ b/src/watcher/INotifyWatcher.zig @@ -142,7 +142,7 @@ pub fn read(this: *INotifyWatcher) bun.JSC.Maybe([]const *align(1) Event) { .events = std.posix.POLL.IN | std.posix.POLL.ERR, .revents = 0, }}; - var timespec = std.posix.timespec{ .tv_sec = 0, .tv_nsec = this.coalesce_interval }; + var timespec = std.posix.timespec{ .sec = 0, .nsec = this.coalesce_interval }; if ((std.posix.ppoll(&fds, ×pec, null) catch 0) > 0) { inner: while (true) { const rest = this.eventlist_bytes[read_eventlist_bytes.len..]; diff --git a/src/watcher/KEventWatcher.zig b/src/watcher/KEventWatcher.zig index b5c4436d13da9e..e1037db01a3506 100644 --- a/src/watcher/KEventWatcher.zig +++ b/src/watcher/KEventWatcher.zig @@ -28,10 +28,10 @@ pub fn stop(this: *KEventWatcher) void { pub fn watchEventFromKEvent(kevent: KEvent) Watcher.Event { return .{ .op = .{ - .delete = (kevent.fflags & std.c.NOTE_DELETE) > 0, - .metadata = (kevent.fflags & std.c.NOTE_ATTRIB) > 0, - .rename = (kevent.fflags & (std.c.NOTE_RENAME | std.c.NOTE_LINK)) > 0, - .write = (kevent.fflags & std.c.NOTE_WRITE) > 0, + .delete = (kevent.fflags & std.c.NOTE.DELETE) > 0, + .metadata = (kevent.fflags & std.c.NOTE.ATTRIB) > 0, + .rename = (kevent.fflags & (std.c.NOTE.RENAME | std.c.NOTE.LINK)) > 0, + .write = (kevent.fflags & std.c.NOTE.WRITE) > 0, }, .index = @truncate(kevent.udata), }; @@ -59,7 +59,7 @@ pub fn watchLoopCycle(this: *Watcher) bun.JSC.Maybe(void) { // Give the events more time to coalesce if (count < 128 / 2) { const remain = 128 - count; - var timespec = std.posix.timespec{ .tv_sec = 0, .tv_nsec = 100_000 }; + var timespec = std.posix.timespec{ .sec = 0, .nsec = 100_000 }; const extra = std.posix.system.kevent( this.platform.fd.cast(), @as([*]KEvent, changelist[@as(usize, @intCast(count))..].ptr), diff --git a/src/watcher/WindowsWatcher.zig b/src/watcher/WindowsWatcher.zig index 294e9275df4697..9e62d53381e6f5 100644 --- a/src/watcher/WindowsWatcher.zig +++ b/src/watcher/WindowsWatcher.zig @@ -38,7 +38,7 @@ const DirWatcher = struct { // invalidates any EventIterators fn prepare(this: *DirWatcher) bun.JSC.Maybe(void) { - const filter = w.FILE_NOTIFY_CHANGE_FILE_NAME | w.FILE_NOTIFY_CHANGE_DIR_NAME | w.FILE_NOTIFY_CHANGE_LAST_WRITE | w.FILE_NOTIFY_CHANGE_CREATION; + const filter: w.FileNotifyChangeFilter = .{ .file_name = true, .dir_name = true, .last_write = true, .creation = true }; if (w.kernel32.ReadDirectoryChangesW(this.dirHandle, &this.buf, this.buf.len, 1, filter, null, &this.overlapped, null) == 0) { const err = w.kernel32.GetLastError(); log("failed to start watching directory: {s}", .{@tagName(err)}); @@ -117,10 +117,10 @@ pub fn init(this: *WindowsWatcher, root: []const u8) !void { log("failed to open directory for watching: {s}", .{@tagName(err)}); return Error.CreateFileFailed; } - errdefer _ = w.kernel32.CloseHandle(handle); + errdefer _ = bun.windows.CloseHandle(handle); this.iocp = try w.CreateIoCompletionPort(handle, null, 0, 1); - errdefer _ = w.kernel32.CloseHandle(this.iocp); + errdefer _ = bun.windows.CloseHandle(this.iocp); this.watcher = .{ .dirHandle = handle }; diff --git a/src/windows.zig b/src/windows.zig index 8f6cbc5f5e326b..ecf2de533494f8 100644 --- a/src/windows.zig +++ b/src/windows.zig @@ -7,6 +7,7 @@ pub const WORD = windows.WORD; pub const DWORD = windows.DWORD; pub const CHAR = windows.CHAR; pub const BOOL = windows.BOOL; +pub const BOOLEAN = windows.BOOLEAN; pub const LPVOID = windows.LPVOID; pub const LPCVOID = windows.LPCVOID; pub const LPWSTR = windows.LPWSTR; @@ -33,6 +34,7 @@ pub const STATUS_SUCCESS = windows.STATUS_SUCCESS; pub const MOVEFILE_COPY_ALLOWED = 0x2; pub const MOVEFILE_REPLACE_EXISTING = 0x1; pub const MOVEFILE_WRITE_THROUGH = 0x8; +pub const FILETIME = windows.FILETIME; pub const DUPLICATE_SAME_ACCESS = windows.DUPLICATE_SAME_ACCESS; pub const OBJECT_ATTRIBUTES = windows.OBJECT_ATTRIBUTES; @@ -3259,8 +3261,8 @@ fn Bun__UVSignalHandle__close(signal: *libuv.uv_signal_t) callconv(.C) void { comptime { if (Environment.isWindows) { - @export(Bun__UVSignalHandle__init, .{ .name = "Bun__UVSignalHandle__init" }); - @export(Bun__UVSignalHandle__close, .{ .name = "Bun__UVSignalHandle__close" }); + @export(&Bun__UVSignalHandle__init, .{ .name = "Bun__UVSignalHandle__init" }); + @export(&Bun__UVSignalHandle__close, .{ .name = "Bun__UVSignalHandle__close" }); } } @@ -3658,3 +3660,13 @@ pub const rescle = struct { }; } }; + +pub extern "kernel32" fn CloseHandle(hObject: HANDLE) callconv(.winapi) BOOL; +pub extern "kernel32" fn GetFinalPathNameByHandleW(hFile: HANDLE, lpszFilePath: [*]u16, cchFilePath: DWORD, dwFlags: DWORD) callconv(.winapi) DWORD; +pub extern "kernel32" fn DeleteFileW(lpFileName: [*:0]const u16) callconv(.winapi) BOOL; +pub extern "kernel32" fn CreateSymbolicLinkW(lpSymlinkFileName: [*:0]const u16, lpTargetFileName: [*:0]const u16, dwFlags: DWORD) callconv(.winapi) BOOLEAN; +pub extern "kernel32" fn GetCurrentThread() callconv(.winapi) HANDLE; +pub extern "kernel32" fn GetCommandLineW() callconv(.winapi) LPWSTR; +pub extern "kernel32" fn CreateDirectoryW(lpPathName: [*:0]const u16, lpSecurityAttributes: ?*windows.SECURITY_ATTRIBUTES) callconv(.winapi) BOOL; +pub extern "kernel32" fn SetEndOfFile(hFile: HANDLE) callconv(.winapi) BOOL; +pub extern "kernel32" fn GetProcessTimes(in_hProcess: HANDLE, out_lpCreationTime: *FILETIME, out_lpExitTime: *FILETIME, out_lpKernelTime: *FILETIME, out_lpUserTime: *FILETIME) callconv(.winapi) BOOL; diff --git a/src/windows_c.zig b/src/windows_c.zig index f4f6e08c3d6857..8a0dd420014fe3 100644 --- a/src/windows_c.zig +++ b/src/windows_c.zig @@ -27,7 +27,7 @@ pub export fn memmem(haystack: ?[*]const u8, haystacklen: usize, needle: ?[*]con } comptime { - @export(memmem, .{ .name = "zig_memmem" }); + @export(&memmem, .{ .name = "zig_memmem" }); } pub const lstat = blk: { @@ -57,7 +57,7 @@ pub fn getSystemLoadavg() [3]f32 { return .{ 0, 0, 0 }; } -pub const Mode = i32; +pub const Mode = u16; const Win32Error = bun.windows.Win32Error; // The way we do errors in Bun needs to get cleaned up. diff --git a/src/work_pool.zig b/src/work_pool.zig index 380dfacfd8a715..53b04a10027118 100644 --- a/src/work_pool.zig +++ b/src/work_pool.zig @@ -11,7 +11,7 @@ pub fn NewWorkPool(comptime max_threads: ?usize) type { var loaded: bool = false; fn create() *ThreadPool { - @setCold(true); + @branchHint(.cold); pool = ThreadPool.init(.{ .max_threads = max_threads orelse bun.getThreadCount(), diff --git a/test/js/bun/util/password.test.ts b/test/js/bun/util/password.test.ts index f838ad3a485406..0d8fd5cc25f9cd 100644 --- a/test/js/bun/util/password.test.ts +++ b/test/js/bun/util/password.test.ts @@ -217,7 +217,7 @@ for (let algorithmValue of algorithms) { expect(verifySync(input + "\0", hashed)).toBeFalse(); }); - test("password", async () => { + describe("password", async () => { async function runSlowTest(algorithm = algorithmValue as any) { const hashed = await password.hash(input, algorithm); const prefix = "$" + algorithm; @@ -228,12 +228,13 @@ for (let algorithmValue of algorithms) { } async function runSlowTestWithOptions(algorithmLabel: any) { - const algorithm = { algorithm: algorithmLabel, timeCost: 5, memoryCost: 4 }; + const algorithm = { algorithm: algorithmLabel, timeCost: 5, memoryCost: 8 }; const hashed = await password.hash(input, algorithm); const prefix = "$" + algorithmLabel; expect(hashed).toStartWith(prefix); expect(hashed).toContain("t=5"); - expect(hashed).toContain("m=4"); + expect(hashed).toContain("m=8"); + expect(hashed).toContain("p=1"); expect(await password.verify(input, hashed, algorithmLabel)).toBeTrue(); expect(() => password.verify(hashed, input, algorithmLabel)).toThrow(); expect(await password.verify(input + "\0", hashed, algorithmLabel)).toBeFalse(); @@ -252,7 +253,12 @@ for (let algorithmValue of algorithms) { if (algorithmValue === defaultAlgorithm) { // these tests are very slow // run the hashing tests in parallel - await Promise.all([...argons.map(runSlowTest), ...argons.map(runSlowTestWithOptions)]); + for (const a of argons) { + test(`${a}`, async () => { + await runSlowTest(a); + await runSlowTestWithOptions(a); + }) + } return; } @@ -265,9 +271,14 @@ for (let algorithmValue of algorithms) { } if (algorithmValue === "bcrypt") { - await Promise.all([defaultTest(), runSlowBCryptTest()]); + test("bcrypt", async () => { + await defaultTest(); + await runSlowBCryptTest(); + }); } else { - await defaultTest(); + test("default", async () => { + await defaultTest(); + }); } }); });