From d01ebeac80f8dc7816016f40c8e032f16d4cd6a2 Mon Sep 17 00:00:00 2001 From: dave caruso Date: Thu, 25 Jul 2024 16:25:47 -0700 Subject: [PATCH 01/46] fix 12805 --- src/deps/WebKit | 1 + 1 file changed, 1 insertion(+) create mode 160000 src/deps/WebKit diff --git a/src/deps/WebKit b/src/deps/WebKit new file mode 160000 index 00000000000000..c737b24765cddf --- /dev/null +++ b/src/deps/WebKit @@ -0,0 +1 @@ +Subproject commit c737b24765cddf5294c425b2e23dd381f1e0b33e From 4b8e988208885bbca0970ec0bd20e68580df57f6 Mon Sep 17 00:00:00 2001 From: dave caruso Date: Thu, 25 Jul 2024 19:12:46 -0700 Subject: [PATCH 02/46] hi --- src/bundler/bundle_v2.zig | 7 ++ src/cli.zig | 4 +- src/cli/build_command.zig | 3 + src/js_ast.zig | 67 +++++++++++---- src/js_parser.zig | 119 ++++++++++++++++++++------- src/js_printer.zig | 41 ++++++++- src/options.zig | 1 + test/bundler/bundler_compile.test.ts | 16 ++++ test/bundler/bundler_minify.test.ts | 54 ++++++++++++ 9 files changed, 265 insertions(+), 47 deletions(-) diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 7ec527443354e8..1025a0c67c7575 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -2871,6 +2871,13 @@ pub const ParseTask = struct { opts.features.minify_identifiers = bundler.options.minify_identifiers; opts.features.emit_decorator_metadata = bundler.options.emit_decorator_metadata; + if (bundler.options.inline_entrypoint_import_meta_main) { + // TODO: how can i determine if this file is ANY of the + // specified entrypoints. the following will only work for + // --compile or other single file entrypoints + opts.import_meta_main_value = source.index.get() == 1; + } + opts.tree_shaking = if (source.index.isRuntime()) true else bundler.options.tree_shaking; opts.module_type = task.module_type; opts.features.unwrap_commonjs_packages = bundler.options.unwrap_commonjs_packages; diff --git a/src/cli.zig b/src/cli.zig index d7135e36122380..36dccb9646db66 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -81,7 +81,7 @@ pub const debug_flags = if (Environment.isDebug) struct { } return false; } -} else @compileError("Do not access this namespace []const u8; in a release build"); +} else @compileError("Do not access this namespace in a release build"); const LoaderMatcher = strings.ExactSizeMatcher(4); const ColonListType = @import("./cli/colon_list_type.zig").ColonListType; @@ -767,6 +767,7 @@ pub const Arguments = struct { if (args.flag("--compile")) { ctx.bundler_options.compile = true; + ctx.bundler_options.inline_entrypoint_import_meta_main = true; } if (args.option("--outdir")) |outdir| { @@ -1272,6 +1273,7 @@ pub const Command = struct { react_server_components: bool = false, code_splitting: bool = false, transform_only: bool = false, + inline_entrypoint_import_meta_main: bool = false, minify_syntax: bool = false, minify_whitespace: bool = false, minify_identifiers: bool = false, diff --git a/src/cli/build_command.zig b/src/cli/build_command.zig index 631278298854f7..14c4259f711b16 100644 --- a/src/cli/build_command.zig +++ b/src/cli/build_command.zig @@ -102,6 +102,9 @@ pub const BuildCommand = struct { this_bundler.options.react_server_components = ctx.bundler_options.react_server_components; this_bundler.resolver.opts.react_server_components = ctx.bundler_options.react_server_components; + this_bundler.options.inline_entrypoint_import_meta_main = ctx.bundler_options.inline_entrypoint_import_meta_main; + this_bundler.resolver.opts.inline_entrypoint_import_meta_main = ctx.bundler_options.inline_entrypoint_import_meta_main; + this_bundler.options.code_splitting = ctx.bundler_options.code_splitting; this_bundler.resolver.opts.code_splitting = ctx.bundler_options.code_splitting; diff --git a/src/js_ast.zig b/src/js_ast.zig index 323ccbda76582b..34d6c652927637 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -1550,6 +1550,12 @@ pub const E = struct { range: logger.Range, }; pub const ImportMeta = struct {}; + pub const ImportMetaMain = struct { + /// If we want to print `!import.meta.main`, set this flag to true + /// instead of wrapping in a unary not. This way, the printer can easily + /// print `require.main != module` instead of `!(require.main == module)` + inverted: bool = false, + }; pub const Call = struct { // Node: @@ -4468,14 +4474,13 @@ pub const Expr = struct { e_undefined, e_new_target, e_import_meta, + e_import_meta_main, + e_require_main, e_inlined_enum, /// A string that is UTF-8 encoded without escaping for use in JavaScript. e_utf8_string, - // This should never make it to the printer - inline_identifier, - // object, regex and array may have had side effects pub fn isPrimitiveLiteral(tag: Tag) bool { return switch (tag) { @@ -5156,8 +5161,8 @@ pub const Expr = struct { e_require_string: E.RequireString, e_require_resolve_string: E.RequireResolveString, - e_require_call_target: void, - e_require_resolve_call_target: void, + e_require_call_target, + e_require_resolve_call_target, e_missing: E.Missing, e_this: E.This, @@ -5166,14 +5171,12 @@ pub const Expr = struct { e_undefined: E.Undefined, e_new_target: E.NewTarget, e_import_meta: E.ImportMeta, - e_inlined_enum: *E.InlinedEnum, + e_import_meta_main: E.ImportMetaMain, + e_require_main, + e_inlined_enum: *E.InlinedEnum, e_utf8_string: *E.UTF8String, - // This type should not exist outside of MacroContext - // If it ends up in JSParser or JSPrinter, it is a bug. - inline_identifier: i32, - pub fn as(data: Data, comptime tag: Tag) ?std.meta.FieldType(Data, tag) { return if (data == tag) @field(data, @tagName(tag)) else null; } @@ -5754,6 +5757,14 @@ pub const Expr = struct { equal: bool = false, ok: bool = false, + /// This extra flag is unfortunately required for the case of visiting the expression + /// `require.main === module` (and any combination of !==, ==, !=, either ordering) + /// + /// We want to replace this with the dedicated import_meta_main node, which: + /// - Stops this module from having p.require_ref, allowing conversion to ESM + /// - Allows us to inline `import.meta.main`'s value, if it is known (bun build --compile) + is_require_main_and_module: bool = false, + pub const @"true" = Equality{ .ok = true, .equal = true }; pub const @"false" = Equality{ .ok = true, .equal = false }; pub const unknown = Equality{ .ok = false }; @@ -5765,12 +5776,14 @@ pub const Expr = struct { pub fn eql( left: Expr.Data, right: Expr.Data, - allocator: std.mem.Allocator, + p: anytype, comptime kind: enum { loose, strict }, ) Equality { + comptime bun.assert(@typeInfo(@TypeOf(p)).Pointer.size == .One); // pass *Parser + // https://dorey.github.io/JavaScript-Equality-Table/ switch (left) { - .e_inlined_enum => |inlined| return inlined.value.data.eql(right, allocator, kind), + .e_inlined_enum => |inlined| return inlined.value.data.eql(right, p, kind), .e_null, .e_undefined => { const ok = switch (@as(Expr.Tag, right)) { @@ -5881,8 +5894,8 @@ pub const Expr = struct { .e_string => |l| { switch (right) { .e_string => |r| { - r.resolveRopeIfNeeded(allocator); - l.resolveRopeIfNeeded(allocator); + r.resolveRopeIfNeeded(p.allocator); + l.resolveRopeIfNeeded(p.allocator); return .{ .ok = true, .equal = r.eql(E.String, l), @@ -5892,8 +5905,8 @@ pub const Expr = struct { if (inlined.value.data == .e_string) { const r = inlined.value.data.e_string; - r.resolveRopeIfNeeded(allocator); - l.resolveRopeIfNeeded(allocator); + r.resolveRopeIfNeeded(p.allocator); + l.resolveRopeIfNeeded(p.allocator); return .{ .ok = true, @@ -5924,6 +5937,27 @@ pub const Expr = struct { else => {}, } }, + + .e_require_main => { + if (right.as(.e_identifier)) |id| { + if (id.ref.eql(p.module_ref)) return .{ + .ok = true, + .equal = true, + .is_require_main_and_module = true, + }; + } + }, + + .e_identifier => |id| { + if (right == .e_require_main) { + if (id.ref.eql(p.module_ref)) return .{ + .ok = true, + .equal = true, + .is_require_main_and_module = true, + }; + } + }, + else => {}, } @@ -5949,7 +5983,6 @@ pub const Expr = struct { .e_identifier, .e_import_identifier, - .inline_identifier, .e_private_identifier, .e_commonjs_export_identifier, => error.@"Cannot convert identifier to JS. Try a statically-known value", diff --git a/src/js_parser.zig b/src/js_parser.zig index 01a73111b53a50..5bc62827ee46db 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -3099,6 +3099,9 @@ pub const Parser = struct { transform_only: bool = false, + /// Used for inlining the state of import.meta.main during visiting + import_meta_main_value: ?bool = null, + pub fn hashForRuntimeTranspiler(this: *const Options, hasher: *std.hash.Wyhash, did_use_jsx: bool) void { bun.assert(!this.bundle); @@ -7527,8 +7530,20 @@ fn NewParser_( } }, .bin_loose_eq => { - const equality = e_.left.data.eql(e_.right.data, p.allocator, .loose); + const equality = e_.left.data.eql(e_.right.data, p, .loose); if (equality.ok) { + if (equality.is_require_main_and_module) { + p.ignoreUsageOfRuntimeRequire(); + p.ignoreUsage(p.module_ref); + return .{ + .loc = v.loc, + .data = if (p.options.import_meta_main_value) |known| + .{ .e_boolean = .{ .value = known } } + else + .{ .e_import_meta_main = .{} }, + }; + } + return p.newExpr( E.Boolean{ .value = equality.equal }, v.loc, @@ -7550,8 +7565,20 @@ fn NewParser_( }, .bin_strict_eq => { - const equality = e_.left.data.eql(e_.right.data, p.allocator, .strict); + const equality = e_.left.data.eql(e_.right.data, p, .strict); if (equality.ok) { + if (equality.is_require_main_and_module) { + p.ignoreUsage(p.module_ref); + p.ignoreUsageOfRuntimeRequire(); + return .{ + .loc = v.loc, + .data = if (p.options.import_meta_main_value) |known| + .{ .e_boolean = .{ .value = known } } + else + .{ .e_import_meta_main = .{} }, + }; + } + return p.newExpr(E.Boolean{ .value = equality.equal }, v.loc); } @@ -7560,8 +7587,20 @@ fn NewParser_( // TODO: warn about typeof string }, .bin_loose_ne => { - const equality = e_.left.data.eql(e_.right.data, p.allocator, .loose); + const equality = e_.left.data.eql(e_.right.data, p, .loose); if (equality.ok) { + if (equality.is_require_main_and_module) { + p.ignoreUsage(p.module_ref); + p.ignoreUsageOfRuntimeRequire(); + return .{ + .loc = v.loc, + .data = if (p.options.import_meta_main_value) |known| + .{ .e_boolean = .{ .value = !known } } + else + .{ .e_import_meta_main = .{ .inverted = true } }, + }; + } + return p.newExpr(E.Boolean{ .value = !equality.equal }, v.loc); } // const after_op_loc = locAfterOp(e_.); @@ -7574,8 +7613,20 @@ fn NewParser_( } }, .bin_strict_ne => { - const equality = e_.left.data.eql(e_.right.data, p.allocator, .strict); + const equality = e_.left.data.eql(e_.right.data, p, .strict); if (equality.ok) { + if (equality.is_require_main_and_module) { + p.ignoreUsage(p.module_ref); + p.ignoreUsageOfRuntimeRequire(); + return .{ + .loc = v.loc, + .data = if (p.options.import_meta_main_value) |known| + .{ .e_boolean = .{ .value = !known } } + else + .{ .e_import_meta_main = .{ .inverted = true } }, + }; + } + return p.newExpr(E.Boolean{ .value = !equality.equal }, v.loc); } }, @@ -16894,24 +16945,6 @@ fn NewParser_( } }, - .inline_identifier => |id| { - const ref = p.macro.imports.get(id) orelse { - p.panic("Internal error: missing identifier from macro: {d}", .{id}); - }; - - if (!p.is_control_flow_dead) { - p.recordUsage(ref); - } - - return p.newExpr( - E.ImportIdentifier{ - .was_originally_identifier = false, - .ref = ref, - }, - expr.loc, - ); - }, - .e_binary => |e_| { // The handling of binary expressions is convoluted because we're using @@ -17182,6 +17215,11 @@ fn NewParser_( ); } + if (e_.value.data == .e_require_call_target) { + p.ignoreUsageOfRuntimeRequire(); + return p.newExpr(E.String{ .data = "function" }, expr.loc); + } + if (SideEffects.typeof(e_.value.data)) |typeof| { return p.newExpr(E.String{ .data = typeof }, expr.loc); } @@ -17207,6 +17245,9 @@ fn NewParser_( if (e_.value.maybeSimplifyNot(p.allocator)) |exp| { return exp; } + if (e_.value.data == .e_import_meta_main) { + e_.value.data.e_import_meta_main.inverted = !e_.value.data.e_import_meta_main.inverted; + } } }, .un_cpl => { @@ -17882,6 +17923,15 @@ fn NewParser_( } } + fn ignoreUsageOfRuntimeRequire(p: *P) void { + // target bun does not have __require + if (!p.options.features.use_import_meta_require) { + bun.assert(p.options.features.allow_runtime); + bun.assert(p.runtime_imports.__require != null); + p.ignoreUsage(p.runtimeIdentifierRef(logger.Loc.Empty, "__require")); + } + } + inline fn valueForRequire(p: *P, loc: logger.Loc) Expr { bun.assert(!p.isSourceRuntime()); return Expr{ @@ -18961,6 +19011,21 @@ fn NewParser_( target.loc, ); } + + if (strings.eqlComptime(name, "main")) { + return .{ + .loc = target.loc, + .data = if (p.options.import_meta_main_value) |known| + .{ .e_boolean = .{ .value = known } } + else + .{ .e_import_meta_main = .{} }, + }; + } + }, + .e_require_call_target => { + if (strings.eqlComptime(name, "main")) { + return .{ .loc = loc, .data = .e_require_main }; + } }, .e_import_identifier => |id| { // Symbol uses due to a property access off of an imported symbol are tracked @@ -23898,12 +23963,10 @@ fn NewParser_( p.require_ref, .force_cjs_to_esm = p.unwrap_all_requires or exports_kind == .esm_with_dynamic_fallback_from_cjs, - .uses_module_ref = (p.symbols.items[p.module_ref.innerIndex()].use_count_estimate > 0), - .uses_exports_ref = (p.symbols.items[p.exports_ref.innerIndex()].use_count_estimate > 0), - .uses_require_ref = if (p.runtime_imports.__require != null) - (p.symbols.items[p.runtime_imports.__require.?.ref.innerIndex()].use_count_estimate > 0) - else - false, + .uses_module_ref = p.symbols.items[p.module_ref.inner_index].use_count_estimate > 0, + .uses_exports_ref = p.symbols.items[p.exports_ref.inner_index].use_count_estimate > 0, + .uses_require_ref = p.runtime_imports.__require != null and + p.symbols.items[p.runtime_imports.__require.?.ref.inner_index].use_count_estimate > 0, // .top_Level_await_keyword = p.top_level_await_keyword, .commonjs_named_exports = p.commonjs_named_exports, .commonjs_export_names = p.commonjs_export_names.keys(), diff --git a/src/js_printer.zig b/src/js_printer.zig index 63438f717e3a07..9732daf5ddc7f8 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -532,6 +532,7 @@ pub const Options = struct { commonjs_named_exports: js_ast.Ast.CommonJSNamedExports = .{}, commonjs_named_exports_deoptimized: bool = false, commonjs_named_exports_ref: Ref = Ref.None, + commonjs_module_ref: Ref = Ref.None, minify_whitespace: bool = false, minify_identifiers: bool = false, @@ -2329,6 +2330,27 @@ fn NewPrinter( p.printSymbol(p.options.import_meta_ref); } }, + .e_import_meta_main => |data| { + p.addSourceMapping(expr.loc); + if (p.options.module_type == .esm) { + // Most of the time, leave it in there + if (data.inverted) { + p.print("!"); + } else { + p.printSpaceBeforeIdentifier(); + } + p.print("import.meta.main"); + } else { + p.printSpaceBeforeIdentifier(); + p.printSymbol(p.options.require_ref orelse Ref.None); + if (data.inverted) { + p.printWhitespacer(ws(".main != ")); + } else { + p.printWhitespacer(ws(".main == ")); + } + p.printSymbol(p.options.commonjs_module_ref); + } + }, .e_commonjs_export_identifier => |id| { p.printSpaceBeforeIdentifier(); p.addSourceMapping(expr.loc); @@ -2457,6 +2479,19 @@ fn NewPrinter( p.print(")"); } }, + .e_require_main => { + p.printSpaceBeforeIdentifier(); + p.addSourceMapping(expr.loc); + + if (p.options.module_type == .esm and is_bun_platform) { + p.print("import.meta.require.main"); + } else if (p.options.require_ref) |require_ref| { + p.printSymbol(require_ref); + p.print(".main"); + } else { + p.print("require.main"); + } + }, .e_require_call_target => { p.printSpaceBeforeIdentifier(); p.addSourceMapping(expr.loc); @@ -3221,7 +3256,11 @@ fn NewPrinter( p.print(" */"); } }, - else => { + + .e_jsx_element, + .e_private_identifier, + .e_template_part, + => { if (Environment.isDebug) Output.panic("Unexpected expression of type .{s}", .{@tagName(expr.data)}); }, diff --git a/src/options.zig b/src/options.zig index b2b60949cb701a..69d884f4c03b03 100644 --- a/src/options.zig +++ b/src/options.zig @@ -1510,6 +1510,7 @@ pub const BundleOptions = struct { install: ?*Api.BunInstall = null, inlining: bool = false, + inline_entrypoint_import_meta_main: bool = false, minify_whitespace: bool = false, minify_syntax: bool = false, minify_identifiers: bool = false, diff --git a/test/bundler/bundler_compile.test.ts b/test/bundler/bundler_compile.test.ts index 934b8e95713697..b07c72552c60ec 100644 --- a/test/bundler/bundler_compile.test.ts +++ b/test/bundler/bundler_compile.test.ts @@ -296,4 +296,20 @@ describe("bundler", () => { }, run: { stdout: '{"\u{6211}":"\u{6211}"}' }, }); + itBundled("compile/ImportMetaMain", { + compile: true, + files: { + "/entry.ts": /* js */ ` + // test toString on function to observe what the inlined value was + console.log((() => import.meta.main).toString().includes('true')); + console.log((() => !import.meta.main).toString().includes('false')); + console.log((() => !!import.meta.main).toString().includes('true')); + console.log((() => require.main == module).toString().includes('true')); + console.log((() => require.main === module).toString().includes('true')); + console.log((() => require.main !== module).toString().includes('false')); + console.log((() => require.main !== module).toString().includes('false')); + `, + }, + run: { stdout: new Array(7).fill("true").join("\n") }, + }); }); diff --git a/test/bundler/bundler_minify.test.ts b/test/bundler/bundler_minify.test.ts index da3902179130c0..03593233adf9d0 100644 --- a/test/bundler/bundler_minify.test.ts +++ b/test/bundler/bundler_minify.test.ts @@ -397,4 +397,58 @@ describe("bundler", () => { stdout: "PASS", }, }); + itBundled("minify/TypeOfRequire", { + files: { + "/entry.ts": /* js */ ` + capture(typeof require); + `, + }, + outfile: "/out.js", + capture: ['"function"'], + minifySyntax: true, + onAfterBundle(api) { + // This should not be marked as a CommonJS module + api.expectFile("/out.js").not.toContain("require"); + api.expectFile("/out.js").not.toContain("module"); + }, + }); + itBundled("minify/RequireMainToImportMetaMain", { + files: { + "/entry.ts": /* js */ ` + capture(require.main === module); + capture(require.main !== module); + capture(require.main == module); + capture(require.main != module); + capture(!(require.main === module)); + capture(!(require.main !== module)); + capture(!(require.main == module)); + capture(!(require.main != module)); + capture(!!(require.main === module)); + capture(!!(require.main !== module)); + capture(!!(require.main == module)); + capture(!!(require.main != module)); + `, + }, + outfile: "/out.js", + capture: [ + "import.meta.main", + "!import.meta.main", + "import.meta.main", + "!import.meta.main", + "!import.meta.main", + "import.meta.main", + "!import.meta.main", + "import.meta.main", + "import.meta.main", + "!import.meta.main", + "import.meta.main", + "!import.meta.main", + ], + minifySyntax: true, + onAfterBundle(api) { + // This should not be marked as a CommonJS module + api.expectFile("/out.js").not.toContain("require"); + api.expectFile("/out.js").not.toContain("module"); + }, + }); }); From 032cf51285e0a27a8f1c1f66fa1ab5f6a2c4ec7d Mon Sep 17 00:00:00 2001 From: dave caruso Date: Fri, 26 Jul 2024 23:01:44 -0700 Subject: [PATCH 03/46] fix this --- src/bun.js/RuntimeTranspilerCache.zig | 3 ++- src/bundler/bundle_v2.zig | 17 ++++++++++------- src/crash_handler.zig | 2 +- src/js_parser.zig | 19 +++++++------------ src/js_printer.zig | 23 +++++++++++++++-------- test/bundler/bundler_edgecase.test.ts | 14 ++++++++++++++ test/bundler/bundler_minify.test.ts | 16 ++++++++++++++++ 7 files changed, 65 insertions(+), 29 deletions(-) diff --git a/src/bun.js/RuntimeTranspilerCache.zig b/src/bun.js/RuntimeTranspilerCache.zig index 551659c6e08962..380679cc04f11c 100644 --- a/src/bun.js/RuntimeTranspilerCache.zig +++ b/src/bun.js/RuntimeTranspilerCache.zig @@ -1,7 +1,8 @@ /// ** Update the version number when any breaking changes are made to the cache format or to the JS parser ** /// Version 3: "Infinity" becomes "1/0". /// Version 4: TypeScript enums are properly handled + more constant folding -const expected_version = 4; +/// Version 5: `require.main === module` no longer marks a module as CJS +const expected_version = 5; const bun = @import("root").bun; const std = @import("std"); diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 1025a0c67c7575..8387da17605d2d 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -655,6 +655,7 @@ pub const BundleV2 = struct { hash: ?u64, batch: *ThreadPoolLib.Batch, resolve: _resolver.Result, + is_entry_point: bool, ) !?Index.Int { var result = resolve; var path = result.path() orelse return null; @@ -692,6 +693,7 @@ pub const BundleV2 = struct { task.loader = loader; task.task.node.next = null; task.tree_shaking = this.linker.options.tree_shaking; + task.is_entry_point = is_entry_point; // Handle onLoad plugins as entry points if (!this.enqueueOnLoadPluginIfNeeded(task)) { @@ -819,7 +821,7 @@ pub const BundleV2 = struct { for (entry_points) |entry_point| { const resolved = this.bundler.resolveEntryPoint(entry_point) catch continue; - if (try this.enqueueItem(null, &batch, resolved)) |source_index| { + if (try this.enqueueItem(null, &batch, resolved, true)) |source_index| { this.graph.entry_points.append(this.graph.allocator, Index.source(source_index)) catch unreachable; } else {} } @@ -833,7 +835,7 @@ pub const BundleV2 = struct { for (user_entry_points) |entry_point| { const resolved = this.bundler.resolveEntryPoint(entry_point) catch continue; - if (try this.enqueueItem(null, &batch, resolved)) |source_index| { + if (try this.enqueueItem(null, &batch, resolved, true)) |source_index| { this.graph.entry_points.append(this.graph.allocator, Index.source(source_index)) catch unreachable; } else {} } @@ -2312,6 +2314,7 @@ pub const ParseTask = struct { emit_decorator_metadata: bool = false, ctx: *BundleV2, package_version: string = "", + is_entry_point: bool = false, /// Used by generated client components presolved_source_indices: []const Index.Int = &.{}, @@ -2871,11 +2874,11 @@ pub const ParseTask = struct { opts.features.minify_identifiers = bundler.options.minify_identifiers; opts.features.emit_decorator_metadata = bundler.options.emit_decorator_metadata; - if (bundler.options.inline_entrypoint_import_meta_main) { - // TODO: how can i determine if this file is ANY of the - // specified entrypoints. the following will only work for - // --compile or other single file entrypoints - opts.import_meta_main_value = source.index.get() == 1; + // For files that are not user-specified entrypoints, set `import.meta.main` to `false`. + // Entrypoints will have `import.meta.main` set as "unknown", unless we use `--compile`, + // in which we inline `true`. + if (bundler.options.inline_entrypoint_import_meta_main or !task.is_entry_point) { + opts.import_meta_main_value = task.is_entry_point; } opts.tree_shaking = if (source.index.isRuntime()) true else bundler.options.tree_shaking; diff --git a/src/crash_handler.zig b/src/crash_handler.zig index 3603b1de02be71..74813acd5e23d5 100644 --- a/src/crash_handler.zig +++ b/src/crash_handler.zig @@ -1485,7 +1485,7 @@ pub fn dumpStackTrace(trace: std.builtin.StackTrace) void { }, .linux => { // Linux doesnt seem to be able to decode it's own debug info. - // TODO(@paperdave): see if zig 0.12 fixes this + // TODO(@paperdave): see if zig 0.14 fixes this }, else => { stdDumpStackTrace(trace); diff --git a/src/js_parser.zig b/src/js_parser.zig index 5bc62827ee46db..e2076b4df523d9 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -510,13 +510,6 @@ const TransposeState = struct { import_options: Expr = Expr.empty, }; -var true_args = &[_]Expr{ - .{ - .data = .{ .e_boolean = .{ .value = true } }, - .loc = logger.Loc.Empty, - }, -}; - const JSXTag = struct { pub const TagType = enum { fragment, tag }; pub const Data = union(TagType) { @@ -17201,9 +17194,9 @@ fn NewParser_( .e_unary => |e_| { switch (e_.op) { .un_typeof => { - const id_before = std.meta.activeTag(e_.value.data) == Expr.Tag.e_identifier; + const id_before = e_.value.data == .e_identifier; e_.value = p.visitExprInOut(e_.value, ExprIn{ .assign_target = e_.op.unaryAssignTarget() }); - const id_after = std.meta.activeTag(e_.value.data) == Expr.Tag.e_identifier; + const id_after = e_.value.data == .e_identifier; // The expression "typeof (0, x)" must not become "typeof x" if "x" // is unbound because that could suppress a ReferenceError from "x" @@ -17247,6 +17240,7 @@ fn NewParser_( } if (e_.value.data == .e_import_meta_main) { e_.value.data.e_import_meta_main.inverted = !e_.value.data.e_import_meta_main.inverted; + return e_.value; } } }, @@ -17924,11 +17918,12 @@ fn NewParser_( } fn ignoreUsageOfRuntimeRequire(p: *P) void { - // target bun does not have __require - if (!p.options.features.use_import_meta_require) { - bun.assert(p.options.features.allow_runtime); + if (!p.options.features.use_import_meta_require and + p.options.features.allow_runtime) + { bun.assert(p.runtime_imports.__require != null); p.ignoreUsage(p.runtimeIdentifierRef(logger.Loc.Empty, "__require")); + p.symbols.items[p.require_ref.innerIndex()].use_count_estimate -|= 1; } } diff --git a/src/js_printer.zig b/src/js_printer.zig index 9732daf5ddc7f8..79c1db66b2ac1a 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -2279,8 +2279,8 @@ fn NewPrinter( } } - pub fn printExpr(p: *Printer, expr: Expr, level: Level, _flags: ExprFlag.Set) void { - var flags = _flags; + pub fn printExpr(p: *Printer, expr: Expr, level: Level, in_flags: ExprFlag.Set) void { + var flags = in_flags; switch (expr.data) { .e_missing => {}, @@ -2331,23 +2331,30 @@ fn NewPrinter( } }, .e_import_meta_main => |data| { - p.addSourceMapping(expr.loc); if (p.options.module_type == .esm) { // Most of the time, leave it in there if (data.inverted) { + p.addSourceMapping(expr.loc); p.print("!"); } else { p.printSpaceBeforeIdentifier(); + p.addSourceMapping(expr.loc); } p.print("import.meta.main"); } else { p.printSpaceBeforeIdentifier(); - p.printSymbol(p.options.require_ref orelse Ref.None); - if (data.inverted) { - p.printWhitespacer(ws(".main != ")); - } else { + p.addSourceMapping(expr.loc); + + if (p.options.require_ref) |require| + p.printSymbol(require) + else + p.print("require"); + + if (data.inverted) + p.printWhitespacer(ws(".main != ")) + else p.printWhitespacer(ws(".main == ")); - } + p.printSymbol(p.options.commonjs_module_ref); } }, diff --git a/test/bundler/bundler_edgecase.test.ts b/test/bundler/bundler_edgecase.test.ts index 6bc239e3cdb2a5..c0398f9d2d23be 100644 --- a/test/bundler/bundler_edgecase.test.ts +++ b/test/bundler/bundler_edgecase.test.ts @@ -1767,6 +1767,20 @@ describe("bundler", () => { `, }, }); + itBundled("edgecase/ImportMetaMain", { + files: { + "/entry.ts": /* js */ ` + import {other} from './other'; + console.log(capture(import.meta.main), capture(require.main === module), ...other); + `, + "/other.ts": ` + globalThis['ca' + 'pture'] = x => x; + + export const other = [capture(require.main === module), capture(import.meta.main)]; + `, + }, + capture: ["false", "false", "import.meta.main", "import.meta.main"], + }); // TODO(@paperdave): test every case of this. I had already tested it manually, but it may break later const requireTranspilationListESM = [ diff --git a/test/bundler/bundler_minify.test.ts b/test/bundler/bundler_minify.test.ts index 03593233adf9d0..54ad7ab6144016 100644 --- a/test/bundler/bundler_minify.test.ts +++ b/test/bundler/bundler_minify.test.ts @@ -397,6 +397,22 @@ describe("bundler", () => { stdout: "PASS", }, }); + itBundled("minify/RequireInDeadBranch", { + files: { + "/entry.ts": /* js */ ` + if (0 !== 0) { + require; + } + `, + }, + outfile: "/out.js", + minifySyntax: true, + onAfterBundle(api) { + // This should not be marked as a CommonJS module + api.expectFile("/out.js").not.toContain("require"); + api.expectFile("/out.js").not.toContain("module"); + }, + }); itBundled("minify/TypeOfRequire", { files: { "/entry.ts": /* js */ ` From 962c21bbc2f528c8e98bf695ddb5416c64ca2148 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 26 Jul 2024 03:47:41 -0700 Subject: [PATCH 04/46] cpp: missing uses of propertyNames (#12835) --- src/bun.js/bindings/BunObject.cpp | 2 +- src/bun.js/bindings/BunProcess.cpp | 4 ++-- src/bun.js/bindings/CommonJSModuleRecord.cpp | 2 +- src/bun.js/bindings/webcore/EventEmitter.cpp | 2 +- src/bun.js/bindings/webcore/JSErrorEvent.cpp | 4 ++-- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/bun.js/bindings/BunObject.cpp b/src/bun.js/bindings/BunObject.cpp index f29203a99552ca..8f7b40708e9478 100644 --- a/src/bun.js/bindings/BunObject.cpp +++ b/src/bun.js/bindings/BunObject.cpp @@ -311,7 +311,7 @@ static JSValue constructDNSObject(VM& vm, JSObject* bunObject) JSC::JSObject* dnsObject = JSC::constructEmptyObject(globalObject); dnsObject->putDirectNativeFunction(vm, globalObject, JSC::Identifier::fromString(vm, "lookup"_s), 2, Bun__DNSResolver__lookup, ImplementationVisibility::Public, NoIntrinsic, JSC::PropertyAttribute::DontDelete | 0); - dnsObject->putDirectNativeFunction(vm, globalObject, builtinNames(vm).resolvePublicName(), 2, Bun__DNSResolver__resolve, ImplementationVisibility::Public, NoIntrinsic, + dnsObject->putDirectNativeFunction(vm, globalObject, vm.propertyNames->resolve, 2, Bun__DNSResolver__resolve, ImplementationVisibility::Public, NoIntrinsic, JSC::PropertyAttribute::DontDelete | 0); dnsObject->putDirectNativeFunction(vm, globalObject, JSC::Identifier::fromString(vm, "resolveSrv"_s), 2, Bun__DNSResolver__resolveSrv, ImplementationVisibility::Public, NoIntrinsic, JSC::PropertyAttribute::DontDelete | 0); diff --git a/src/bun.js/bindings/BunProcess.cpp b/src/bun.js/bindings/BunProcess.cpp index c0f52800ed0e17..e03743ba1768df 100644 --- a/src/bun.js/bindings/BunProcess.cpp +++ b/src/bun.js/bindings/BunProcess.cpp @@ -1616,7 +1616,7 @@ static JSValue constructReportObjectComplete(VM& vm, Zig::GlobalObject* globalOb auto constructJavaScriptStack = [&]() -> JSC::JSValue { JSC::JSObject* javascriptStack = JSC::constructEmptyObject(globalObject, globalObject->objectPrototype(), 3); - javascriptStack->putDirect(vm, JSC::Identifier::fromString(vm, "message"_s), JSC::jsString(vm, String("Error [ERR_SYNTHETIC]: JavaScript Callstack"_s)), 0); + javascriptStack->putDirect(vm, vm.propertyNames->message, JSC::jsString(vm, String("Error [ERR_SYNTHETIC]: JavaScript Callstack"_s)), 0); // TODO: allow errors as an argument { @@ -2817,7 +2817,7 @@ extern "C" void Process__emitMessageEvent(Zig::GlobalObject* global, EncodedJSVa auto* process = static_cast(global->processObject()); auto& vm = global->vm(); - auto ident = Identifier::fromString(vm, "message"_s); + auto ident = vm.propertyNames->message; if (process->wrapped().hasEventListeners(ident)) { JSC::MarkedArgumentBuffer args; args.append(JSValue::decode(value)); diff --git a/src/bun.js/bindings/CommonJSModuleRecord.cpp b/src/bun.js/bindings/CommonJSModuleRecord.cpp index 3ed83392131220..eb7bcb75eb8f5c 100644 --- a/src/bun.js/bindings/CommonJSModuleRecord.cpp +++ b/src/bun.js/bindings/CommonJSModuleRecord.cpp @@ -290,7 +290,7 @@ RequireFunctionPrototype* RequireFunctionPrototype::create( RequireFunctionPrototype* prototype = new (NotNull, JSC::allocateCell(vm)) RequireFunctionPrototype(vm, structure); prototype->finishCreation(vm); - prototype->putDirect(vm, builtinNames(vm).resolvePublicName(), jsCast(globalObject)->requireResolveFunctionUnbound(), 0); + prototype->putDirect(vm, vm.propertyNames->resolve, jsCast(globalObject)->requireResolveFunctionUnbound(), 0); return prototype; } diff --git a/src/bun.js/bindings/webcore/EventEmitter.cpp b/src/bun.js/bindings/webcore/EventEmitter.cpp index f5822895590f6f..021edf1fcaabec 100644 --- a/src/bun.js/bindings/webcore/EventEmitter.cpp +++ b/src/bun.js/bindings/webcore/EventEmitter.cpp @@ -249,7 +249,7 @@ void EventEmitter::innerInvokeEventListeners(const Identifier& eventType, Simple auto* exception = exceptionPtr.get(); if (UNLIKELY(exception)) { - auto errorIdentifier = JSC::Identifier::fromString(vm, eventNames().errorEvent); + auto errorIdentifier = vm.propertyNames->error; auto hasErrorListener = this->hasActiveEventListeners(errorIdentifier); if (!hasErrorListener || eventType == errorIdentifier) { // If the event type is error, report the exception to the console. diff --git a/src/bun.js/bindings/webcore/JSErrorEvent.cpp b/src/bun.js/bindings/webcore/JSErrorEvent.cpp index 6e80cddc1c3ef5..60515f2074451d 100644 --- a/src/bun.js/bindings/webcore/JSErrorEvent.cpp +++ b/src/bun.js/bindings/webcore/JSErrorEvent.cpp @@ -114,7 +114,7 @@ template<> ErrorEvent::Init convertDictionary(JSGlobalObject& if (isNullOrUndefined) errorValue = jsUndefined(); else { - errorValue = object->get(&lexicalGlobalObject, Identifier::fromString(vm, "error"_s)); + errorValue = object->get(&lexicalGlobalObject, vm.propertyNames->error); RETURN_IF_EXCEPTION(throwScope, {}); } if (!errorValue.isUndefined()) { @@ -150,7 +150,7 @@ template<> ErrorEvent::Init convertDictionary(JSGlobalObject& if (isNullOrUndefined) messageValue = jsUndefined(); else { - messageValue = object->get(&lexicalGlobalObject, Identifier::fromString(vm, "message"_s)); + messageValue = object->get(&lexicalGlobalObject, vm.propertyNames->message); RETURN_IF_EXCEPTION(throwScope, {}); } if (!messageValue.isUndefined()) { From 6f994a586b80d229ae394381d03d2dd97a0dafb4 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 26 Jul 2024 03:48:30 -0700 Subject: [PATCH 05/46] bindings: fix zig extern def of Bun__JSValue__deserialize (#12844) --- src/bun.js/bindings/bindings.zig | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig index a05201dcc07c78..e0cf20903af8d1 100644 --- a/src/bun.js/bindings/bindings.zig +++ b/src/bun.js/bindings/bindings.zig @@ -5636,11 +5636,11 @@ pub const JSValue = enum(JSValueReprInt) { return AsyncContextFrame__withAsyncContextIfNeeded(global, this); } - extern "c" fn Bun__JSValue__deserialize(global: *JSGlobalObject, data: [*]const u8, len: isize) JSValue; + extern "c" fn Bun__JSValue__deserialize(global: *JSGlobalObject, data: [*]const u8, len: usize) JSValue; /// Deserializes a JSValue from a serialized buffer. Zig version of `import('bun:jsc').deserialize` pub inline fn deserialize(bytes: []const u8, global: *JSGlobalObject) JSValue { - return Bun__JSValue__deserialize(global, bytes.ptr, @intCast(bytes.len)); + return Bun__JSValue__deserialize(global, bytes.ptr, bytes.len); } extern fn Bun__serializeJSValue(global: *JSC.JSGlobalObject, value: JSValue) SerializedScriptValue.External; From 5f19e4d3328a9b6dabee844c3ba192d22e82e74e Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 26 Jul 2024 03:56:13 -0700 Subject: [PATCH 06/46] ipc: make IPCInstance.context void on windows instead of u0 (#12840) --- src/bun.js/javascript.zig | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index 7fd31649319d99..67efc56c30952a 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -3683,7 +3683,7 @@ pub const VirtualMachine = struct { pub const IPCInstance = struct { globalThis: ?*JSGlobalObject, - context: if (Environment.isPosix) *uws.SocketContext else u0, + context: if (Environment.isPosix) *uws.SocketContext else void, data: IPC.IPCData, pub usingnamespace bun.New(@This()); @@ -3692,10 +3692,7 @@ pub const VirtualMachine = struct { return &this.data; } - pub fn handleIPCMessage( - this: *IPCInstance, - message: IPC.DecodedIPCMessage, - ) void { + pub fn handleIPCMessage(this: *IPCInstance, message: IPC.DecodedIPCMessage) void { JSC.markBinding(@src()); switch (message) { // In future versions we can read this in order to detect version mismatches, @@ -3730,9 +3727,7 @@ pub const VirtualMachine = struct { const IPCInfoType = if (Environment.isWindows) []const u8 else bun.FileDescriptor; pub fn initIPCInstance(this: *VirtualMachine, info: IPCInfoType, mode: IPC.Mode) void { IPC.log("initIPCInstance {" ++ (if (Environment.isWindows) "s" else "") ++ "}", .{info}); - this.ipc = .{ - .waiting = .{ .info = info, .mode = mode }, - }; + this.ipc = .{ .waiting = .{ .info = info, .mode = mode } }; } pub fn getIPCInstance(this: *VirtualMachine) ?*IPCInstance { @@ -3770,7 +3765,7 @@ pub const VirtualMachine = struct { .windows => instance: { var instance = IPCInstance.new(.{ .globalThis = this.global, - .context = 0, + .context = {}, .data = .{ .mode = opts.mode }, }); From 0c032d1e881b678d7bfd4ce06c925cfaef3ec340 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 26 Jul 2024 03:58:01 -0700 Subject: [PATCH 07/46] uws: tidy use of ssl intFromBool (#12839) --- src/deps/uws.zig | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/deps/uws.zig b/src/deps/uws.zig index eab7032d31a179..4878d8dfadebf7 100644 --- a/src/deps/uws.zig +++ b/src/deps/uws.zig @@ -1086,7 +1086,7 @@ pub const Timer = opaque { pub const SocketContext = opaque { pub fn getNativeHandle(this: *SocketContext, comptime ssl: bool) *anyopaque { - return us_socket_context_get_native_handle(comptime @as(i32, @intFromBool(ssl)), this).?; + return us_socket_context_get_native_handle(@intFromBool(ssl), this).?; } fn _deinit_ssl(this: *SocketContext) void { @@ -1143,10 +1143,7 @@ pub const SocketContext = opaque { } fn getLoop(this: *SocketContext, ssl: bool) ?*Loop { - if (ssl) { - return us_socket_context_loop(@as(i32, 1), this); - } - return us_socket_context_loop(@as(i32, 0), this); + return us_socket_context_loop(@intFromBool(ssl), this); } /// closes and deinit the SocketContexts @@ -1164,7 +1161,7 @@ pub const SocketContext = opaque { pub fn close(this: *SocketContext, ssl: bool) void { debug("us_socket_context_close({d})", .{@intFromPtr(this)}); - us_socket_context_close(@as(i32, @intFromBool(ssl)), this); + us_socket_context_close(@intFromBool(ssl), this); } pub fn ext(this: *SocketContext, ssl: bool, comptime ContextType: type) ?*ContextType { From f4321239f2d5832a61b83319c95422bad95251cf Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 26 Jul 2024 03:58:47 -0700 Subject: [PATCH 08/46] node:v8: expose DefaultDeserializer and DefaultSerializer exports (#12838) --- src/js/node/v8.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/js/node/v8.ts b/src/js/node/v8.ts index ff1cf471fe9a12..d70a7bc73c0cd9 100644 --- a/src/js/node/v8.ts +++ b/src/js/node/v8.ts @@ -160,4 +160,6 @@ hideFromStack( DefaultDeserializer, DefaultSerializer, GCProfiler, + DefaultDeserializer, + DefaultSerializer, ); From 56bd90f8096d076f215df7d1e22101e294971da3 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 26 Jul 2024 04:02:31 -0700 Subject: [PATCH 09/46] bindings: better use of jsc api in Path_functionToNamespacedPath (#12836) --- src/bun.js/bindings/Path.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/bun.js/bindings/Path.cpp b/src/bun.js/bindings/Path.cpp index 516b4da73a20bb..74ced1bd768faa 100644 --- a/src/bun.js/bindings/Path.cpp +++ b/src/bun.js/bindings/Path.cpp @@ -119,8 +119,7 @@ JSC_DEFINE_HOST_FUNCTION(Path_functionToNamespacedPath, static JSC::JSObject* createPath(JSGlobalObject* globalThis, bool isWindows) { JSC::VM& vm = globalThis->vm(); - JSC::Structure* plainObjectStructure = JSC::JSFinalObject::createStructure(vm, globalThis, globalThis->objectPrototype(), 0); - JSC::JSObject* path = JSC::JSFinalObject::create(vm, plainObjectStructure); + auto* path = JSC::constructEmptyObject(globalThis); auto clientData = WebCore::clientData(vm); path->putDirect(vm, clientData->builtinNames().isWindowsPrivateName(), From fe60dd7c57118aa56126a29ccca0bb4198407d87 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 26 Jul 2024 04:03:55 -0700 Subject: [PATCH 10/46] use .undefined literal instead of jsUndefined() call (#12834) --- src/bun.js/api/BunObject.zig | 20 +++++----- src/bun.js/api/JSBundler.zig | 6 +-- src/bun.js/api/bun/h2_frame_parser.zig | 22 +++++------ src/bun.js/api/bun/subprocess.zig | 14 +++---- src/bun.js/api/ffi.zig | 4 +- src/bun.js/api/glob.zig | 4 +- src/bun.js/api/html_rewriter.zig | 2 +- src/bun.js/api/server.zig | 10 ++--- src/bun.js/javascript.zig | 4 +- src/bun.js/node/node_fs_stat_watcher.zig | 8 ++-- src/bun.js/node/node_fs_watcher.zig | 8 ++-- src/bun.js/node/node_os.zig | 18 ++++----- src/bun.js/node/path.zig | 20 +++++----- src/bun.js/node/types.zig | 2 +- src/bun.js/webcore.zig | 14 +++---- src/bun.js/webcore/body.zig | 2 +- src/bun.js/webcore/encoding.zig | 2 +- src/bun.js/webcore/response.zig | 6 +-- src/bun.js/webcore/streams.zig | 48 ++++++++++++------------ src/deps/c_ares.zig | 10 ++--- src/logger.zig | 2 +- src/napi/napi.zig | 6 +-- src/shell/shell.zig | 6 +-- 23 files changed, 119 insertions(+), 119 deletions(-) diff --git a/src/bun.js/api/BunObject.zig b/src/bun.js/api/BunObject.zig index 0fa958dd2c858b..7b0b70def7171c 100644 --- a/src/bun.js/api/BunObject.zig +++ b/src/bun.js/api/BunObject.zig @@ -327,7 +327,7 @@ pub fn shell( var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const string_args = arguments.nextEat() orelse { globalThis.throw("shell: expected 2 arguments, got 0", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; }; const template_args_js = arguments.nextEat() orelse { @@ -459,7 +459,7 @@ pub fn braces( const brace_str_js = arguments.nextEat() orelse { globalThis.throw("braces: expected at least 1 argument, got 0", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; }; const brace_str = brace_str_js.toBunString(globalThis); defer brace_str.deref(); @@ -566,7 +566,7 @@ pub fn which( defer arguments.deinit(); const path_arg = arguments.nextEat() orelse { globalThis.throw("which: expected 1 argument, got 0", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; }; var path_str: ZigString.Slice = ZigString.Slice.empty; @@ -589,7 +589,7 @@ pub fn which( if (bin_str.len >= bun.MAX_PATH_BYTES) { globalThis.throw("bin path is too long", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; } if (bin_str.len == 0) { @@ -980,7 +980,7 @@ pub fn openInEditor( return .zero; }; - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn getPublicPath(to: string, origin: URL, comptime Writer: type, writer: Writer) void { @@ -3445,7 +3445,7 @@ pub export fn Bun__escapeHTML16(globalObject: *JSC.JSGlobalObject, input_value: const input_slice = ptr[0..len]; const escaped = strings.escapeHTMLForUTF16Input(globalObject.bunVM().allocator, input_slice) catch { globalObject.vm().throwError(globalObject, ZigString.init("Out of memory").toJS(globalObject)); - return JSC.JSValue.jsUndefined(); + return .undefined; }; switch (escaped) { @@ -3491,7 +3491,7 @@ pub export fn Bun__escapeHTML8(globalObject: *JSC.JSGlobalObject, input_value: J const escaped = strings.escapeHTMLForLatin1Input(allocator, input_slice) catch { globalObject.vm().throwError(globalObject, ZigString.init("Out of memory").toJS(globalObject)); - return JSC.JSValue.jsUndefined(); + return .undefined; }; switch (escaped) { @@ -4557,8 +4557,8 @@ pub const FFIObject = struct { fn stringWidth(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) JSC.JSValue { const arguments = callframe.arguments(2).slice(); - const value = if (arguments.len > 0) arguments[0] else JSC.JSValue.jsUndefined(); - const options_object = if (arguments.len > 1) arguments[1] else JSC.JSValue.jsUndefined(); + const value = if (arguments.len > 0) arguments[0] else .undefined; + const options_object = if (arguments.len > 1) arguments[1] else .undefined; if (!value.isString()) { return JSC.jsNumber(0); @@ -4671,7 +4671,7 @@ pub const JSZlib = struct { // This has to be `inline` due to the callframe. inline fn getOptions(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) ?struct { JSC.Node.StringOrBuffer, ?JSValue } { const arguments = callframe.arguments(2).slice(); - const buffer_value = if (arguments.len > 0) arguments[0] else JSC.JSValue.jsUndefined(); + const buffer_value = if (arguments.len > 0) arguments[0] else .undefined; const options_val: ?JSValue = if (arguments.len > 1 and arguments[1].isObject()) arguments[1] diff --git a/src/bun.js/api/JSBundler.zig b/src/bun.js/api/JSBundler.zig index 9a9ad0c11f6563..1b1a2a9e65f5b8 100644 --- a/src/bun.js/api/JSBundler.zig +++ b/src/bun.js/api/JSBundler.zig @@ -548,12 +548,12 @@ pub const JSBundler = struct { ) JSC.JSValue { if (arguments.len == 0 or !arguments[0].isObject()) { globalThis.throwInvalidArguments("Expected a config object to be passed to Bun.build", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; } var plugins: ?*Plugin = null; const config = Config.fromJS(globalThis, arguments[0], &plugins, globalThis.allocator()) catch { - return JSC.JSValue.jsUndefined(); + return .undefined; }; return bun.BundleV2.generateFromJavaScript( @@ -563,7 +563,7 @@ pub const JSBundler = struct { globalThis.bunVM().eventLoop(), bun.default_allocator, ) catch { - return JSC.JSValue.jsUndefined(); + return .undefined; }; } diff --git a/src/bun.js/api/bun/h2_frame_parser.zig b/src/bun.js/api/bun/h2_frame_parser.zig index a98142aa92179e..7aff2065548425 100644 --- a/src/bun.js/api/bun/h2_frame_parser.zig +++ b/src/bun.js/api/bun/h2_frame_parser.zig @@ -716,7 +716,7 @@ pub const H2FrameParser = struct { stream.state = .CLOSED; if (rstCode == .NO_ERROR) { - this.dispatchWithExtra(.onStreamEnd, JSC.JSValue.jsNumber(stream.id), JSC.JSValue.jsUndefined()); + this.dispatchWithExtra(.onStreamEnd, JSC.JSValue.jsNumber(stream.id), .undefined); } else { this.dispatchWithExtra(.onStreamError, JSC.JSValue.jsNumber(stream.id), JSC.JSValue.jsNumber(@intFromEnum(rstCode))); } @@ -1428,7 +1428,7 @@ pub const H2FrameParser = struct { return .zero; }; - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn loadSettingsFromJSValue(this: *H2FrameParser, globalObject: *JSC.JSGlobalObject, options: JSC.JSValue) bool { @@ -1544,7 +1544,7 @@ pub const H2FrameParser = struct { if (this.loadSettingsFromJSValue(globalObject, options)) { this.setSettings(this.localSettings); - return JSC.JSValue.jsUndefined(); + return .undefined; } return .zero; @@ -1608,14 +1608,14 @@ pub const H2FrameParser = struct { if (opaque_data_arg.asArrayBuffer(globalObject)) |array_buffer| { const slice = array_buffer.byteSlice(); this.sendGoAway(0, @enumFromInt(errorCode), slice, lastStreamID); - return JSC.JSValue.jsUndefined(); + return .undefined; } } } } this.sendGoAway(0, @enumFromInt(errorCode), "", lastStreamID); - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn ping(this: *H2FrameParser, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) JSValue { @@ -1629,7 +1629,7 @@ pub const H2FrameParser = struct { if (args_list.ptr[0].asArrayBuffer(globalObject)) |array_buffer| { const slice = array_buffer.slice(); this.sendPing(false, slice); - return JSC.JSValue.jsUndefined(); + return .undefined; } globalObject.throw("Expected payload to be a Buffer", .{}); @@ -2038,7 +2038,7 @@ pub const H2FrameParser = struct { stream.state = .CLOSED; stream.rstCode = @intFromEnum(ErrorCode.COMPRESSION_ERROR); this.dispatchWithExtra(.onStreamError, JSC.JSValue.jsNumber(stream_id), JSC.JSValue.jsNumber(stream.rstCode)); - return JSC.JSValue.jsUndefined(); + return .undefined; }; } } else { @@ -2058,7 +2058,7 @@ pub const H2FrameParser = struct { stream.state = .CLOSED; stream.rstCode = @intFromEnum(ErrorCode.COMPRESSION_ERROR); this.dispatchWithExtra(.onStreamError, JSC.JSValue.jsNumber(stream_id), JSC.JSValue.jsNumber(stream.rstCode)); - return JSC.JSValue.jsUndefined(); + return .undefined; }; } } @@ -2075,7 +2075,7 @@ pub const H2FrameParser = struct { frame.write(@TypeOf(writer), writer); _ = writer.write(buffer[0..encoded_size]) catch 0; - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn writeStream(this: *H2FrameParser, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) JSValue { JSC.markBinding(@src()); @@ -2250,7 +2250,7 @@ pub const H2FrameParser = struct { stream.state = .CLOSED; stream.rstCode = @intFromEnum(ErrorCode.COMPRESSION_ERROR); this.dispatchWithExtra(.onStreamError, JSC.JSValue.jsNumber(stream_id), JSC.JSValue.jsNumber(stream.rstCode)); - return JSC.JSValue.jsUndefined(); + return .undefined; }; } } else { @@ -2439,7 +2439,7 @@ pub const H2FrameParser = struct { const result = this.readBytes(bytes); bytes = bytes[result..]; } - return JSC.JSValue.jsUndefined(); + return .undefined; } globalObject.throw("Expected data to be a Buffer or ArrayBuffer", .{}); return .zero; diff --git a/src/bun.js/api/bun/subprocess.zig b/src/bun.js/api/bun/subprocess.zig index 6d8822f27e18e2..c2e80ec629d336 100644 --- a/src/bun.js/api/bun/subprocess.zig +++ b/src/bun.js/api/bun/subprocess.zig @@ -696,12 +696,12 @@ pub const Subprocess = struct { pub fn doRef(this: *Subprocess, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSValue { this.ref(); - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn doUnref(this: *Subprocess, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSValue { this.unref(); - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn onStdinDestroyed(this: *Subprocess) void { @@ -737,7 +737,7 @@ pub const Subprocess = struct { const success = ipc_data.serializeAndSend(global, value); if (!success) return .zero; - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn disconnect(this: *Subprocess) void { @@ -1471,9 +1471,9 @@ pub const Subprocess = struct { if (status == .err) status.err.toJSC(globalThis) else - JSC.JSValue.jsUndefined(); + .undefined; - const this_value = if (this_jsvalue.isEmptyOrUndefinedOrNull()) JSC.JSValue.jsUndefined() else this_jsvalue; + const this_value = if (this_jsvalue.isEmptyOrUndefinedOrNull()) .undefined else this_jsvalue; this_value.ensureStillAlive(); const args = [_]JSValue{ @@ -1856,7 +1856,7 @@ pub const Subprocess = struct { var i: u32 = 0; while (stdio_iter.next()) |value| : (i += 1) { if (!stdio[i].extract(globalThis, i, value)) - return JSC.JSValue.jsUndefined(); + return .undefined; if (i == 2) break; } @@ -1865,7 +1865,7 @@ pub const Subprocess = struct { while (stdio_iter.next()) |value| : (i += 1) { var new_item: Stdio = undefined; if (!new_item.extract(globalThis, i, value)) { - return JSC.JSValue.jsUndefined(); + return .undefined; } const opt = switch (new_item.asSpawnOption(i)) { diff --git a/src/bun.js/api/ffi.zig b/src/bun.js/api/ffi.zig index 7b64c504c021e2..963ea23db85a9d 100644 --- a/src/bun.js/api/ffi.zig +++ b/src/bun.js/api/ffi.zig @@ -145,7 +145,7 @@ pub const FFI = struct { ) callconv(.C) JSValue { JSC.markBinding(@src()); if (this.closed) { - return JSC.JSValue.jsUndefined(); + return .undefined; } this.closed = true; if (this.dylib) |*dylib| { @@ -160,7 +160,7 @@ pub const FFI = struct { } this.functions.deinit(allocator); - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn printCallback(global: *JSGlobalObject, object: JSC.JSValue) JSValue { diff --git a/src/bun.js/api/glob.zig b/src/bun.js/api/glob.zig index 2aa6fd17ad4a3c..69b4295e874810 100644 --- a/src/bun.js/api/glob.zig +++ b/src/bun.js/api/glob.zig @@ -448,12 +448,12 @@ pub fn match(this: *Glob, globalThis: *JSGlobalObject, callframe: *JSC.CallFrame defer arguments.deinit(); const str_arg = arguments.nextEat() orelse { globalThis.throw("Glob.matchString: expected 1 arguments, got 0", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; }; if (!str_arg.isString()) { globalThis.throw("Glob.matchString: first argument is not a string", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; } var str = str_arg.toSlice(globalThis, arena.allocator()); diff --git a/src/bun.js/api/html_rewriter.zig b/src/bun.js/api/html_rewriter.zig index e7adc6675d8968..de315f69c4037e 100644 --- a/src/bun.js/api/html_rewriter.zig +++ b/src/bun.js/api/html_rewriter.zig @@ -1040,7 +1040,7 @@ pub const TextChunk = struct { fn contentHandler(this: *TextChunk, comptime Callback: (fn (*LOLHTML.TextChunk, []const u8, bool) LOLHTML.Error!void), thisObject: JSValue, globalObject: *JSGlobalObject, content: ZigString, contentOptions: ?ContentOptions) JSValue { if (this.text_chunk == null) - return JSC.JSValue.jsUndefined(); + return .undefined; var content_slice = content.toSlice(bun.default_allocator); defer content_slice.deinit(); diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 9dd0d651cd11a7..29df596df68be3 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -1616,7 +1616,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp defer ctx.deref(); - handleReject(ctx, if (!err.isEmptyOrUndefinedOrNull()) err else JSC.JSValue.jsUndefined()); + handleReject(ctx, if (!err.isEmptyOrUndefinedOrNull()) err else .undefined); return JSValue.jsUndefined(); } @@ -5723,21 +5723,21 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp }; JSC.C.JSValueUnprotect(this.globalThis, this.thisObject.asObjectRef()); - this.thisObject = JSC.JSValue.jsUndefined(); + this.thisObject = .undefined; this.stop(abrupt); } - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn disposeFromJS(this: *ThisServer) JSC.JSValue { if (this.listener != null) { JSC.C.JSValueUnprotect(this.globalThis, this.thisObject.asObjectRef()); - this.thisObject = JSC.JSValue.jsUndefined(); + this.thisObject = .undefined; this.stop(true); } - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn getPort( diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index 67efc56c30952a..814cd4e8043d74 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -487,7 +487,7 @@ pub export fn Bun__reportUnhandledError(globalObject: *JSGlobalObject, value: JS // See the crash in https://github.com/oven-sh/bun/issues/9778 const jsc_vm = JSC.VirtualMachine.get(); _ = jsc_vm.uncaughtException(globalObject, value, false); - return JSC.JSValue.jsUndefined(); + return .undefined; } /// This function is called on another thread @@ -2928,7 +2928,7 @@ pub const VirtualMachine = struct { pub fn reportUncaughtException(globalObject: *JSGlobalObject, exception: *JSC.Exception) JSValue { var jsc_vm = globalObject.bunVM(); _ = jsc_vm.uncaughtException(globalObject, exception.value(), false); - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn printStackTrace(comptime Writer: type, writer: Writer, trace: ZigStackTrace, comptime allow_ansi_colors: bool) !void { diff --git a/src/bun.js/node/node_fs_stat_watcher.zig b/src/bun.js/node/node_fs_stat_watcher.zig index 59bb78f6cc3275..a4f26346a527bb 100644 --- a/src/bun.js/node/node_fs_stat_watcher.zig +++ b/src/bun.js/node/node_fs_stat_watcher.zig @@ -305,7 +305,7 @@ pub const StatWatcher = struct { if (obj.js_this != .zero) { return obj.js_this; } - return JSC.JSValue.jsUndefined(); + return .undefined; } }; @@ -314,7 +314,7 @@ pub const StatWatcher = struct { this.persistent = true; this.poll_ref.ref(this.ctx); } - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn doUnref(this: *StatWatcher, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { @@ -322,7 +322,7 @@ pub const StatWatcher = struct { this.persistent = false; this.poll_ref.unref(this.ctx); } - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn hasPendingActivity(this: *StatWatcher) bool { @@ -345,7 +345,7 @@ pub const StatWatcher = struct { pub fn doClose(this: *StatWatcher, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { this.close(); - return JSC.JSValue.jsUndefined(); + return .undefined; } /// If the scheduler is not using this, free instantly, otherwise mark for being freed. diff --git a/src/bun.js/node/node_fs_watcher.zig b/src/bun.js/node/node_fs_watcher.zig index 54e9fdacedd8e1..9c5a296beee778 100644 --- a/src/bun.js/node/node_fs_watcher.zig +++ b/src/bun.js/node/node_fs_watcher.zig @@ -579,7 +579,7 @@ pub const FSWatcher = struct { if (js_this == .zero) return; const listener = FSWatcher.listenerGetCached(js_this) orelse return; const globalObject = this.globalThis; - var filename: JSC.JSValue = JSC.JSValue.jsUndefined(); + var filename: JSC.JSValue = .undefined; if (file_name.len > 0) { if (this.encoding == .buffer) filename = JSC.ArrayBuffer.createBuffer(globalObject, file_name) @@ -615,7 +615,7 @@ pub const FSWatcher = struct { this.persistent = true; this.poll_ref.ref(this.ctx); } - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn doUnref(this: *FSWatcher, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { @@ -623,7 +623,7 @@ pub const FSWatcher = struct { this.persistent = false; this.poll_ref.unref(this.ctx); } - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn hasRef(this: *FSWatcher, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { @@ -698,7 +698,7 @@ pub const FSWatcher = struct { pub fn doClose(this: *FSWatcher, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { this.close(); - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn finalize(this: *FSWatcher) void { diff --git a/src/bun.js/node/node_os.zig b/src/bun.js/node/node_os.zig index 9a153b93b1c55a..b55d061ec40206 100644 --- a/src/bun.js/node/node_os.zig +++ b/src/bun.js/node/node_os.zig @@ -66,7 +66,7 @@ pub const OS = struct { }; globalThis.vm().throwError(globalThis, err.toErrorInstance(globalThis)); - return JSC.JSValue.jsUndefined(); + return .undefined; }; } @@ -320,7 +320,7 @@ pub const OS = struct { globalThis, ); globalThis.vm().throwError(globalThis, err); - return JSC.JSValue.jsUndefined(); + return .undefined; } const pid = if (arguments.len > 0) arguments[0].asInt32() else 0; @@ -342,7 +342,7 @@ pub const OS = struct { }; globalThis.vm().throwError(globalThis, err.toErrorInstance(globalThis)); - return JSC.JSValue.jsUndefined(); + return .undefined; } return JSC.JSValue.jsNumberFromInt32(priority); @@ -425,7 +425,7 @@ pub const OS = struct { }; globalThis.vm().throwError(globalThis, err.toErrorInstance(globalThis)); - return JSC.JSValue.jsUndefined(); + return .undefined; } defer C.freeifaddrs(interface_start); @@ -736,7 +736,7 @@ pub const OS = struct { globalThis, ); globalThis.vm().throwError(globalThis, err); - return JSC.JSValue.jsUndefined(); + return .undefined; } const pid = if (arguments.len == 2) arguments[0].coerce(i32, globalThis) else 0; @@ -750,7 +750,7 @@ pub const OS = struct { globalThis, ); globalThis.vm().throwError(globalThis, err); - return JSC.JSValue.jsUndefined(); + return .undefined; } const errcode = C.setProcessPriority(pid, priority); @@ -765,7 +765,7 @@ pub const OS = struct { }; globalThis.vm().throwError(globalThis, err.toErrorInstance(globalThis)); - return JSC.JSValue.jsUndefined(); + return .undefined; }, .ACCES => { const err = JSC.SystemError{ @@ -777,12 +777,12 @@ pub const OS = struct { }; globalThis.vm().throwError(globalThis, err.toErrorInstance(globalThis)); - return JSC.JSValue.jsUndefined(); + return .undefined; }, else => {}, } - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn totalmem(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { diff --git a/src/bun.js/node/path.zig b/src/bun.js/node/path.zig index faa3d1df1c2f7e..e4d1365d019d15 100644 --- a/src/bun.js/node/path.zig +++ b/src/bun.js/node/path.zig @@ -463,7 +463,7 @@ pub fn basename(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [* }; } - const path_ptr = if (args_len > 0) args_ptr[0] else JSC.JSValue.jsUndefined(); + const path_ptr = if (args_len > 0) args_ptr[0] else .undefined; // Supress exeption in zig. It does globalThis.vm().throwError() in JS land. validateString(globalObject, path_ptr, "path", .{}) catch { return .zero; @@ -658,7 +658,7 @@ pub inline fn dirnameJS_T(comptime T: type, globalObject: *JSC.JSGlobalObject, i } pub fn dirname(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [*]JSC.JSValue, args_len: u16) callconv(JSC.conv) JSC.JSValue { - const path_ptr = if (args_len > 0) args_ptr[0] else JSC.JSValue.jsUndefined(); + const path_ptr = if (args_len > 0) args_ptr[0] else .undefined; // Supress exeption in zig. It does globalThis.vm().throwError() in JS land. validateString(globalObject, path_ptr, "path", .{}) catch { // Returning .zero translates to a nullprt JSC.JSValue. @@ -857,7 +857,7 @@ pub inline fn extnameJS_T(comptime T: type, globalObject: *JSC.JSGlobalObject, i } pub fn extname(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [*]JSC.JSValue, args_len: u16) callconv(JSC.conv) JSC.JSValue { - const path_ptr = if (args_len > 0) args_ptr[0] else JSC.JSValue.jsUndefined(); + const path_ptr = if (args_len > 0) args_ptr[0] else .undefined; // Supress exeption in zig. It does globalThis.vm().throwError() in JS land. validateString(globalObject, path_ptr, "path", .{}) catch { // Returning .zero translates to a nullprt JSC.JSValue. @@ -971,7 +971,7 @@ pub fn formatJS_T(comptime T: type, globalObject: *JSC.JSGlobalObject, allocator } pub fn format(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [*]JSC.JSValue, args_len: u16) callconv(JSC.conv) JSC.JSValue { - const pathObject_ptr = if (args_len > 0) args_ptr[0] else JSC.JSValue.jsUndefined(); + const pathObject_ptr = if (args_len > 0) args_ptr[0] else .undefined; // Supress exeption in zig. It does globalThis.vm().throwError() in JS land. validateObject(globalObject, pathObject_ptr, "pathObject", .{}, .{}) catch { // Returning .zero translates to a nullprt JSC.JSValue. @@ -1045,7 +1045,7 @@ pub fn isAbsoluteWindowsZigString(pathZStr: JSC.ZigString) bool { } pub fn isAbsolute(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [*]JSC.JSValue, args_len: u16) callconv(JSC.conv) JSC.JSValue { - const path_ptr = if (args_len > 0) args_ptr[0] else JSC.JSValue.jsUndefined(); + const path_ptr = if (args_len > 0) args_ptr[0] else .undefined; // Supress exeption in zig. It does globalThis.vm().throwError() in JS land. validateString(globalObject, path_ptr, "path", .{}) catch { // Returning .zero translates to a nullprt JSC.JSValue. @@ -1658,7 +1658,7 @@ pub fn normalizeJS_T(comptime T: type, globalObject: *JSC.JSGlobalObject, alloca } pub fn normalize(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [*]JSC.JSValue, args_len: u16) callconv(JSC.conv) JSC.JSValue { - const path_ptr = if (args_len > 0) args_ptr[0] else JSC.JSValue.jsUndefined(); + const path_ptr = if (args_len > 0) args_ptr[0] else .undefined; // Supress exeption in zig. It does globalThis.vm().throwError() in JS land. validateString(globalObject, path_ptr, "path", .{}) catch { // Returning .zero translates to a nullprt JSC.JSValue. @@ -1981,7 +1981,7 @@ pub inline fn parseJS_T(comptime T: type, globalObject: *JSC.JSGlobalObject, isW } pub fn parse(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [*]JSC.JSValue, args_len: u16) callconv(JSC.conv) JSC.JSValue { - const path_ptr = if (args_len > 0) args_ptr[0] else JSC.JSValue.jsUndefined(); + const path_ptr = if (args_len > 0) args_ptr[0] else .undefined; // Supress exeption in zig. It does globalThis.vm().throwError() in JS land. validateString(globalObject, path_ptr, "path", .{}) catch { // Returning .zero translates to a nullprt JSC.JSValue. @@ -2340,13 +2340,13 @@ pub fn relativeJS_T(comptime T: type, globalObject: *JSC.JSGlobalObject, allocat } pub fn relative(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [*]JSC.JSValue, args_len: u16) callconv(JSC.conv) JSC.JSValue { - const from_ptr = if (args_len > 0) args_ptr[0] else JSC.JSValue.jsUndefined(); + const from_ptr = if (args_len > 0) args_ptr[0] else .undefined; // Supress exeption in zig. It does globalThis.vm().throwError() in JS land. validateString(globalObject, from_ptr, "from", .{}) catch { // Returning .zero translates to a nullprt JSC.JSValue. return .zero; }; - const to_ptr = if (args_len > 1) args_ptr[1] else JSC.JSValue.jsUndefined(); + const to_ptr = if (args_len > 1) args_ptr[1] else .undefined; // Supress exeption in zig. It does globalThis.vm().throwError() in JS land. validateString(globalObject, to_ptr, "to", .{}) catch { return .zero; @@ -2906,7 +2906,7 @@ pub fn toNamespacedPathJS_T(comptime T: type, globalObject: *JSC.JSGlobalObject, } pub fn toNamespacedPath(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [*]JSC.JSValue, args_len: u16) callconv(JSC.conv) JSC.JSValue { - if (args_len == 0) return JSC.JSValue.jsUndefined(); + if (args_len == 0) return .undefined; var path_ptr = args_ptr[0]; // Based on Node v21.6.1 path.win32.toNamespacedPath and path.posix.toNamespacedPath: diff --git a/src/bun.js/node/types.zig b/src/bun.js/node/types.zig index 7b1fa0bee6e8d0..ba78ea8722e213 100644 --- a/src/bun.js/node/types.zig +++ b/src/bun.js/node/types.zig @@ -2101,7 +2101,7 @@ pub const Process = struct { fs.top_level_dir_buf[len + 1] = 0; fs.top_level_dir = fs.top_level_dir_buf[0 .. len + 1]; - return JSC.JSValue.jsUndefined(); + return .undefined; }, .err => |e| return e.toJSC(globalObject), } diff --git a/src/bun.js/webcore.zig b/src/bun.js/webcore.zig index a8c6faeb80005f..48109d21c5e276 100644 --- a/src/bun.js/webcore.zig +++ b/src/bun.js/webcore.zig @@ -512,7 +512,7 @@ pub const Crypto = struct { // i don't think its a real scenario, but just in case buf = globalThis.allocator().alloc(u8, keylen) catch { globalThis.throw("Failed to allocate memory", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; }; needs_deinit = true; } else { @@ -570,25 +570,25 @@ pub const Crypto = struct { if (arguments.len < 2) { globalThis.throwInvalidArguments("Expected 2 typed arrays but got nothing", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; } const array_buffer_a = arguments[0].asArrayBuffer(globalThis) orelse { globalThis.throwInvalidArguments("Expected typed array but got {s}", .{@tagName(arguments[0].jsType())}); - return JSC.JSValue.jsUndefined(); + return .undefined; }; const a = array_buffer_a.byteSlice(); const array_buffer_b = arguments[1].asArrayBuffer(globalThis) orelse { globalThis.throwInvalidArguments("Expected typed array but got {s}", .{@tagName(arguments[1].jsType())}); - return JSC.JSValue.jsUndefined(); + return .undefined; }; const b = array_buffer_b.byteSlice(); const len = a.len; if (b.len != len) { globalThis.throw("Input buffers must have the same byte length", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; } return JSC.jsBoolean(len == 0 or bun.BoringSSL.CRYPTO_memcmp(a.ptr, b.ptr, len) == 0); } @@ -619,12 +619,12 @@ pub const Crypto = struct { const arguments = callframe.arguments(1).slice(); if (arguments.len == 0) { globalThis.throwInvalidArguments("Expected typed array but got nothing", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; } var array_buffer = arguments[0].asArrayBuffer(globalThis) orelse { globalThis.throwInvalidArguments("Expected typed array but got {s}", .{@tagName(arguments[0].jsType())}); - return JSC.JSValue.jsUndefined(); + return .undefined; }; const slice = array_buffer.byteSlice(); diff --git a/src/bun.js/webcore/body.zig b/src/bun.js/webcore/body.zig index e4ff382112459c..f526110ab4c843 100644 --- a/src/bun.js/webcore/body.zig +++ b/src/bun.js/webcore/body.zig @@ -220,7 +220,7 @@ pub const Body = struct { break :brk globalThis.readableStreamToFormData(readable.value, switch (form_data.?.encoding) { .Multipart => |multipart| bun.String.init(multipart).toJS(globalThis), - .URLEncoded => JSC.JSValue.jsUndefined(), + .URLEncoded => .undefined, }); }, else => unreachable, diff --git a/src/bun.js/webcore/encoding.zig b/src/bun.js/webcore/encoding.zig index f8f800ff2b888e..b1bb409f9da06a 100644 --- a/src/bun.js/webcore/encoding.zig +++ b/src/bun.js/webcore/encoding.zig @@ -198,7 +198,7 @@ pub const TextEncoder = struct { array.ensureStillAlive(); if (encoder.any_non_ascii) { - return JSC.JSValue.jsUndefined(); + return .undefined; } if (array.isEmpty()) { diff --git a/src/bun.js/webcore/response.zig b/src/bun.js/webcore/response.zig index 29705042f3719d..2adcfc127f1294 100644 --- a/src/bun.js/webcore/response.zig +++ b/src/bun.js/webcore/response.zig @@ -1220,7 +1220,7 @@ pub const Fetch = struct { const js_hostname = hostname.toJS(globalObject); js_hostname.ensureStillAlive(); js_cert.ensureStillAlive(); - const check_result = check_server_identity.call(globalObject, JSC.JSValue.jsUndefined(), &[_]JSC.JSValue{ js_hostname, js_cert }); + const check_result = check_server_identity.call(globalObject, .undefined, &[_]JSC.JSValue{ js_hostname, js_cert }); // if check failed abort the request if (check_result.isAnyError()) { // mark to wait until deinit @@ -2059,7 +2059,7 @@ pub const Fetch = struct { hostname = null; } // an error was thrown - return JSC.JSValue.jsUndefined(); + return .undefined; } } else { body = request.body.value.useAsAnyBlob(); @@ -2372,7 +2372,7 @@ pub const Fetch = struct { hostname = null; } // an error was thrown - return JSC.JSValue.jsUndefined(); + return .undefined; } } diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index 4151d10064ff88..9767f055fce5f8 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -481,14 +481,14 @@ pub const StreamStart = union(Tag) { pub fn toJS(this: StreamStart, globalThis: *JSGlobalObject) JSC.JSValue { switch (this) { .empty, .ready => { - return JSC.JSValue.jsUndefined(); + return .undefined; }, .chunk_size => |chunk| { return JSC.JSValue.jsNumber(@as(Blob.SizeType, @intCast(chunk))); }, .err => |err| { globalThis.vm().throwError(globalThis, err.toJSC(globalThis)); - return JSC.JSValue.jsUndefined(); + return .undefined; }, .owned_and_done => |list| { return JSC.ArrayBuffer.fromBytes(list.slice(), .Uint8Array).toJS(globalThis, null); @@ -497,7 +497,7 @@ pub const StreamStart = union(Tag) { return JSC.ArrayBuffer.create(globalThis, list.slice(), .Uint8Array); }, else => { - return JSC.JSValue.jsUndefined(); + return .undefined; }, } } @@ -1633,7 +1633,7 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { .code = bun.String.static(@as(string, @tagName(JSC.Node.ErrorCode.ERR_ILLEGAL_CONSTRUCTOR))), }; globalThis.throwValue(err.toErrorInstance(globalThis)); - return JSC.JSValue.jsUndefined(); + return .undefined; } var allocator = globalThis.bunVM().allocator; @@ -1641,7 +1641,7 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { globalThis.vm().throwError(globalThis, Syscall.Error.oom.toJSC( globalThis, )); - return JSC.JSValue.jsUndefined(); + return .undefined; }; this.sink.construct(allocator); return createObject(globalThis, this, 0); @@ -1685,7 +1685,7 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { fn invalidThis(globalThis: *JSGlobalObject) JSValue { const err = JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_THIS, "Expected Sink", .{}, globalThis); globalThis.vm().throwError(globalThis, err); - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn unprotect(this: *@This()) void { @@ -1700,7 +1700,7 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { if (comptime @hasDecl(SinkType, "getPendingError")) { if (this.sink.getPendingError()) |err| { globalThis.vm().throwError(globalThis, err); - return JSC.JSValue.jsUndefined(); + return .undefined; } } @@ -1714,7 +1714,7 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { .{}, globalThis, )); - return JSC.JSValue.jsUndefined(); + return .undefined; } const arg = args[0]; @@ -1728,7 +1728,7 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { .{}, globalThis, )); - return JSC.JSValue.jsUndefined(); + return .undefined; } if (arg.asArrayBuffer(globalThis)) |buffer| { @@ -1747,7 +1747,7 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { .{}, globalThis, )); - return JSC.JSValue.jsUndefined(); + return .undefined; } const str = arg.getZigString(globalThis); @@ -1770,7 +1770,7 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { if (comptime @hasDecl(SinkType, "getPendingError")) { if (this.sink.getPendingError()) |err| { globalThis.vm().throwError(globalThis, err); - return JSC.JSValue.jsUndefined(); + return .undefined; } } @@ -1784,7 +1784,7 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { globalThis, ); globalThis.vm().throwError(globalThis, err); - return JSC.JSValue.jsUndefined(); + return .undefined; } const arg = args[0]; @@ -1808,7 +1808,7 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { if (comptime @hasDecl(SinkType, "getPendingError")) { if (this.sink.getPendingError()) |err| { globalThis.vm().throwError(globalThis, err); - return JSC.JSValue.jsUndefined(); + return .undefined; } } @@ -1823,7 +1823,7 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { if (comptime @hasDecl(SinkType, "getPendingError")) { if (this.sink.getPendingError()) |err| { globalThis.vm().throwError(globalThis, err); - return JSC.JSValue.jsUndefined(); + return .undefined; } } @@ -1842,7 +1842,7 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { .result => |value| value, .err => |err| blk: { globalThis.vm().throwError(globalThis, err.toJSC(globalThis)); - break :blk JSC.JSValue.jsUndefined(); + break :blk .undefined; }, }; } @@ -1858,7 +1858,7 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { if (comptime @hasDecl(SinkType, "getPendingError")) { if (this.sink.getPendingError()) |err| { globalThis.vm().throwError(globalThis, err); - return JSC.JSValue.jsUndefined(); + return .undefined; } } @@ -1891,7 +1891,7 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { if (comptime @hasDecl(SinkType, "getPendingError")) { if (this.sink.getPendingError()) |err| { globalThis.vm().throwError(globalThis, err); - return JSC.JSValue.jsUndefined(); + return .undefined; } } @@ -1914,7 +1914,7 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { if (comptime @hasDecl(SinkType, "getPendingError")) { if (this.sink.getPendingError()) |err| { globalThis.vm().throwError(globalThis, err); - return JSC.JSValue.jsUndefined(); + return .undefined; } } @@ -1969,7 +1969,7 @@ pub fn NewJSSink(comptime SinkType: type, comptime name_: []const u8) type { // var this = @ptrCast(*ThisSocket, @alignCast( fromJS(globalThis, callframe.this()) orelse { // const err = JSC.toTypeError(JSC.Node.ErrorCode.ERR_INVALID_THIS, "Expected Socket", .{}, globalThis); // globalThis.vm().throwError(globalThis, err); -// return JSC.JSValue.jsUndefined(); +// return .undefined; // })); // } // }; @@ -2765,7 +2765,7 @@ pub fn ReadableStreamSource( const view = arguments.ptr[0]; view.ensureStillAlive(); this.this_jsvalue = this_jsvalue; - var buffer = view.asArrayBuffer(globalThis) orelse return JSC.JSValue.jsUndefined(); + var buffer = view.asArrayBuffer(globalThis) orelse return .undefined; return processResult( this_jsvalue, globalThis, @@ -2784,7 +2784,7 @@ pub fn ReadableStreamSource( .chunk_size => |size| return JSValue.jsNumber(size), .err => |err| { globalThis.vm().throwError(globalThis, err.toJSC(globalThis)); - return JSC.JSValue.jsUndefined(); + return .undefined; }, else => |rc| { return rc.toJS(globalThis); @@ -2828,7 +2828,7 @@ pub fn ReadableStreamSource( JSC.markBinding(@src()); this.this_jsvalue = callFrame.this(); this.cancel(); - return JSC.JSValue.jsUndefined(); + return .undefined; } pub fn setOnCloseFromJS(this: *ReadableStreamSourceType, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) bool { @@ -2894,7 +2894,7 @@ pub fn ReadableStreamSource( const ref_or_unref = callFrame.argument(0).toBooleanSlow(globalObject); this.setRef(ref_or_unref); - return JSC.JSValue.jsUndefined(); + return .undefined; } fn onClose(ptr: ?*anyopaque) void { @@ -3248,7 +3248,7 @@ pub const FileSink = struct { } if (this.done) { - return .{ .result = JSC.JSValue.jsUndefined() }; + return .{ .result = .undefined }; } const rc = this.writer.flush(); diff --git a/src/deps/c_ares.zig b/src/deps/c_ares.zig index 06871361cc27b3..e009a2ab544bc7 100644 --- a/src/deps/c_ares.zig +++ b/src/deps/c_ares.zig @@ -315,7 +315,7 @@ pub const struct_nameinfo = extern struct { const node_slice = this.node[0..node_len]; array.putIndex(globalThis, 0, JSC.ZigString.fromUTF8(node_slice).toJS(globalThis)); } else { - array.putIndex(globalThis, 0, JSC.JSValue.jsUndefined()); + array.putIndex(globalThis, 0, .undefined); } if (this.service != null) { @@ -323,7 +323,7 @@ pub const struct_nameinfo = extern struct { const service_slice = this.service[0..service_len]; array.putIndex(globalThis, 1, JSC.ZigString.fromUTF8(service_slice).toJS(globalThis)); } else { - array.putIndex(globalThis, 1, JSC.JSValue.jsUndefined()); + array.putIndex(globalThis, 1, .undefined); } return array; @@ -1579,7 +1579,7 @@ pub export fn Bun__canonicalizeIP( const addr_slice = addr.toSlice(bun.default_allocator); const addr_str = addr_slice.slice(); if (addr_str.len >= INET6_ADDRSTRLEN) { - return JSC.JSValue.jsUndefined(); + return .undefined; } var ip_std_text: [INET6_ADDRSTRLEN + 1]u8 = undefined; @@ -1593,12 +1593,12 @@ pub export fn Bun__canonicalizeIP( if (ares_inet_pton(af, &ip_addr, &ip_std_text) != 1) { af = AF.INET6; if (ares_inet_pton(af, &ip_addr, &ip_std_text) != 1) { - return JSC.JSValue.jsUndefined(); + return .undefined; } } // ip_addr will contain the null-terminated string of the cannonicalized IP if (ares_inet_ntop(af, &ip_std_text, &ip_addr, @sizeOf(@TypeOf(ip_addr))) == null) { - return JSC.JSValue.jsUndefined(); + return .undefined; } // use the null-terminated size to return the string const size = bun.len(bun.cast([*:0]u8, &ip_addr)); diff --git a/src/logger.zig b/src/logger.zig index 23c74879f11b4b..bef1602a4cbf38 100644 --- a/src/logger.zig +++ b/src/logger.zig @@ -717,7 +717,7 @@ pub const Log = struct { const count = @as(u16, @intCast(@min(msgs.len, errors_stack.len))); switch (count) { - 0 => return JSC.JSValue.jsUndefined(), + 0 => return .undefined, 1 => { const msg = msgs[0]; return switch (msg.metadata) { diff --git a/src/napi/napi.zig b/src/napi/napi.zig index 8a1799c3de8dd0..c8a82afb7e9853 100644 --- a/src/napi/napi.zig +++ b/src/napi/napi.zig @@ -725,7 +725,7 @@ pub export fn napi_make_callback(env: napi_env, _: *anyopaque, recv: napi_value, if (recv != .zero) recv else - JSC.JSValue.jsUndefined(), + .undefined, if (arg_count > 0 and args != null) @as([*]const JSC.JSValue, @ptrCast(args.?))[0..arg_count] else @@ -1608,10 +1608,10 @@ pub export fn napi_create_threadsafe_function( .callback = if (call_js_cb) |c| .{ .c = .{ .napi_threadsafe_function_call_js = c, - .js = if (func == .zero) JSC.JSValue.jsUndefined() else func.withAsyncContextIfNeeded(env), + .js = if (func == .zero) .undefined else func.withAsyncContextIfNeeded(env), }, } else .{ - .js = if (func == .zero) JSC.JSValue.jsUndefined() else func.withAsyncContextIfNeeded(env), + .js = if (func == .zero) .undefined else func.withAsyncContextIfNeeded(env), }, .ctx = context, .channel = ThreadSafeFunction.Queue.init(max_queue_size, bun.default_allocator), diff --git a/src/shell/shell.zig b/src/shell/shell.zig index ecd520cf9cd203..19cb9f1d8b62d4 100644 --- a/src/shell/shell.zig +++ b/src/shell/shell.zig @@ -4362,7 +4362,7 @@ pub const TestingAPIs = struct { var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const string = arguments.nextEat() orelse { globalThis.throw("shellInternals.disabledOnPosix: expected 1 arguments, got 0", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; }; const bunstr = string.toBunString(globalThis); @@ -4386,7 +4386,7 @@ pub const TestingAPIs = struct { var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const string_args = arguments.nextEat() orelse { globalThis.throw("shell_parse: expected 2 arguments, got 0", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; }; var arena = std.heap.ArenaAllocator.init(bun.default_allocator); @@ -4476,7 +4476,7 @@ pub const TestingAPIs = struct { var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const string_args = arguments.nextEat() orelse { globalThis.throw("shell_parse: expected 2 arguments, got 0", .{}); - return JSC.JSValue.jsUndefined(); + return .undefined; }; var arena = bun.ArenaAllocator.init(bun.default_allocator); From 55437a760bcc4829abf8201140f0196d0295919c Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 26 Jul 2024 04:14:45 -0700 Subject: [PATCH 11/46] launch.json: remove BUN_DEBUG_ALL=1 from 'bun run' (#12845) --- .vscode/launch.json | 1 - 1 file changed, 1 deletion(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index 625ed0cf8d3e3b..4a7052e99b29b3 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -152,7 +152,6 @@ "BUN_DEBUG_QUIET_LOGS": "1", "BUN_DEBUG_EventLoop": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "2", - "BUN_DEBUG_ALL": "1", }, "console": "internalConsole", }, From e643788ae40bf9e5e2d757d563ff491954682423 Mon Sep 17 00:00:00 2001 From: Dylan Conway <35280289+dylan-conway@users.noreply.github.com> Date: Fri, 26 Jul 2024 14:13:58 -0700 Subject: [PATCH 12/46] bump webkit (#12858) Co-authored-by: dylan-conway --- CMakeLists.txt | 2 +- src/generated_versions_list.zig | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 2045d033413f98..1c76bfd8ab8c5b 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -4,7 +4,7 @@ cmake_policy(SET CMP0067 NEW) set(CMAKE_POLICY_DEFAULT_CMP0069 NEW) set(Bun_VERSION "1.1.21") -set(WEBKIT_TAG 49907bff8781719bc2ded068b0c934f6d0074d1e) +set(WEBKIT_TAG c737b24765cddf5294c425b2e23dd381f1e0b33e) set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}") message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}") diff --git a/src/generated_versions_list.zig b/src/generated_versions_list.zig index 56658e0e508c6f..84ef9277fc8800 100644 --- a/src/generated_versions_list.zig +++ b/src/generated_versions_list.zig @@ -4,7 +4,7 @@ pub const boringssl = "29a2cd359458c9384694b75456026e4b57e3e567"; pub const libarchive = "898dc8319355b7e985f68a9819f182aaed61b53a"; pub const mimalloc = "4c283af60cdae205df5a872530c77e2a6a307d43"; pub const picohttpparser = "066d2b1e9ab820703db0837a7255d92d30f0c9f5"; -pub const webkit = "49907bff8781719bc2ded068b0c934f6d0074d1e"; +pub const webkit = "c737b24765cddf5294c425b2e23dd381f1e0b33e"; pub const zig = @import("std").fmt.comptimePrint("{}", .{@import("builtin").zig_version}); pub const zlib = "886098f3f339617b4243b286f5ed364b9989e245"; pub const tinycc = "ab631362d839333660a265d3084d8ff060b96753"; From 0ea8ce3c78312024b40349041ee3eda90ac78bda Mon Sep 17 00:00:00 2001 From: dave caruso Date: Fri, 26 Jul 2024 14:14:16 -0700 Subject: [PATCH 13/46] Fix memory leak when printing any error's source code. (#12831) Co-authored-by: Jarred Sumner --- src/bun.js/bindings/bindings.cpp | 44 ++++++++++++--------- src/bun.js/bindings/exports.zig | 19 ++++++++- src/bun.js/bindings/headers-handwritten.h | 1 + src/bun.js/javascript.zig | 3 ++ src/string.zig | 2 +- test/js/bun/util/inspect-error-leak.test.js | 23 +++++++++++ 6 files changed, 72 insertions(+), 20 deletions(-) create mode 100644 test/js/bun/util/inspect-error-leak.test.js diff --git a/src/bun.js/bindings/bindings.cpp b/src/bun.js/bindings/bindings.cpp index 01bf2de2c57af2..61f7f700e86346 100644 --- a/src/bun.js/bindings/bindings.cpp +++ b/src/bun.js/bindings/bindings.cpp @@ -1,4 +1,3 @@ -#include "JSFFIFunction.h" #include "root.h" #include "JavaScriptCore/JSCast.h" #include "JavaScriptCore/JSType.h" @@ -8,6 +7,7 @@ #include "JavaScriptCore/JSPromiseConstructor.h" #include "JavaScriptCore/DeleteAllCodeEffort.h" #include "JavaScriptCore/BooleanObject.h" +#include "JSFFIFunction.h" #include "headers.h" #include "BunClientData.h" @@ -4053,7 +4053,7 @@ static void populateStackFrameMetadata(JSC::VM& vm, const JSC::StackFrame* stack static void populateStackFramePosition(const JSC::StackFrame* stackFrame, BunString* source_lines, OrdinalNumber* source_line_numbers, uint8_t source_lines_count, - ZigStackFramePosition* position) + ZigStackFramePosition* position, JSC::SourceProvider** referenced_source_provider) { auto code = stackFrame->codeBlock(); if (!code) @@ -4067,6 +4067,7 @@ static void populateStackFramePosition(const JSC::StackFrame* stackFrame, BunStr WTF::StringView sourceString = provider->source(); if (UNLIKELY(sourceString.isNull())) return; + if (!stackFrame->hasBytecodeIndex()) { if (stackFrame->hasLineAndColumnInfo()) { auto lineColumn = stackFrame->computeLineAndColumn(); @@ -4079,6 +4080,7 @@ static void populateStackFramePosition(const JSC::StackFrame* stackFrame, BunStr } auto location = Bun::getAdjustedPositionForBytecode(code, stackFrame->bytecodeIndex()); + *position = location; if (source_lines_count > 1 && source_lines != nullptr && sourceString.is8Bit()) { // Search for the beginning of the line @@ -4096,8 +4098,14 @@ static void populateStackFramePosition(const JSC::StackFrame* stackFrame, BunStr const unsigned char* bytes = sourceString.span8().data(); - // Most of the time, when you look at a stack trace, you want a couple lines above - source_lines[0] = Bun::toStringRef(sourceString.substring(lineStart, lineEnd - lineStart).toStringWithoutCopying()); + // Most of the time, when you look at a stack trace, you want a couple lines above. + + // It is key to not clone this data because source code strings are large. + // Usage of toStringView (non-owning) is safe as we ref the provider. + provider->ref(); + ASSERT(*referenced_source_provider == nullptr); + *referenced_source_provider = provider; + source_lines[0] = Bun::toStringView(sourceString.substring(lineStart, lineEnd - lineStart)); source_line_numbers[0] = location.line(); if (lineStart > 0) { @@ -4123,9 +4131,7 @@ static void populateStackFramePosition(const JSC::StackFrame* stackFrame, BunStr } // We are at the beginning of the line - source_lines[source_line_i] = Bun::toStringRef( - sourceString.substring(byte_offset_in_source_string, end_of_line_offset - byte_offset_in_source_string + 1) - .toStringWithoutCopying()); + source_lines[source_line_i] = Bun::toStringView(sourceString.substring(byte_offset_in_source_string, end_of_line_offset - byte_offset_in_source_string + 1)); source_line_numbers[source_line_i] = location.line().fromZeroBasedInt(location.line().zeroBasedInt() - source_line_i); source_line_i++; @@ -4136,17 +4142,15 @@ static void populateStackFramePosition(const JSC::StackFrame* stackFrame, BunStr } } } - - *position = location; } static void populateStackFrame(JSC::VM& vm, ZigStackTrace* trace, const JSC::StackFrame* stackFrame, - ZigStackFrame* frame, bool is_top) + ZigStackFrame* frame, bool is_top, JSC::SourceProvider** referenced_source_provider) { populateStackFrameMetadata(vm, stackFrame, frame); populateStackFramePosition(stackFrame, is_top ? trace->source_lines_ptr : nullptr, is_top ? trace->source_lines_numbers : nullptr, - is_top ? trace->source_lines_to_collect : 0, &frame->position); + is_top ? trace->source_lines_to_collect : 0, &frame->position, referenced_source_provider); } class V8StackTraceIterator { @@ -4332,7 +4336,7 @@ static void populateStackTrace(JSC::VM& vm, const WTF::Vector& break; ZigStackFrame* frame = &trace->frames_ptr[frame_i]; - populateStackFrame(vm, trace, &frames[stack_frame_i], frame, frame_i == 0); + populateStackFrame(vm, trace, &frames[stack_frame_i], frame, frame_i == 0, &trace->referenced_source_provider); stack_frame_i++; frame_i++; } @@ -4754,10 +4758,9 @@ JSC__JSValue JSC__JSValue__toError_(JSC__JSValue JSValue0) return JSC::JSValue::encode({}); } -void JSC__JSValue__toZigException(JSC__JSValue JSValue0, JSC__JSGlobalObject* arg1, - ZigException* exception) +void JSC__JSValue__toZigException(JSC__JSValue jsException, JSC__JSGlobalObject* global, ZigException* exception) { - JSC::JSValue value = JSC::JSValue::decode(JSValue0); + JSC::JSValue value = JSC::JSValue::decode(jsException); if (value == JSC::JSValue {}) { exception->code = JSErrorCodeError; exception->name = Bun::toStringRef("Error"_s); @@ -4767,17 +4770,17 @@ void JSC__JSValue__toZigException(JSC__JSValue JSValue0, JSC__JSGlobalObject* ar if (JSC::Exception* jscException = JSC::jsDynamicCast(value)) { if (JSC::ErrorInstance* error = JSC::jsDynamicCast(jscException->value())) { - fromErrorInstance(exception, arg1, error, &jscException->stack(), value); + fromErrorInstance(exception, global, error, &jscException->stack(), value); return; } } if (JSC::ErrorInstance* error = JSC::jsDynamicCast(value)) { - fromErrorInstance(exception, arg1, error, nullptr, value); + fromErrorInstance(exception, global, error, nullptr, value); return; } - exceptionFromString(exception, value, arg1); + exceptionFromString(exception, value, global); } void JSC__Exception__getStackTrace(JSC__Exception* arg0, ZigStackTrace* trace) @@ -5775,3 +5778,8 @@ CPP_DECL JSC__JSValue Bun__ProxyObject__getInternalField(JSC__JSValue value, uin { return JSValue::encode(jsCast(JSValue::decode(value))->internalField((ProxyObject::Field)id).get()); } + +CPP_DECL void JSC__SourceProvider__deref(JSC::SourceProvider* provider) +{ + provider->deref(); +} diff --git a/src/bun.js/bindings/exports.zig b/src/bun.js/bindings/exports.zig index 5fe49a0a911644..3fd039729beabe 100644 --- a/src/bun.js/bindings/exports.zig +++ b/src/bun.js/bindings/exports.zig @@ -223,6 +223,13 @@ pub const ResolvedSource = extern struct { pub const Tag = @import("ResolvedSourceTag").ResolvedSourceTag; }; +pub const SourceProvider = opaque { + extern fn JSC__SourceProvider__deref(*SourceProvider) void; + pub fn deref(provider: *SourceProvider) void { + JSC__SourceProvider__deref(provider); + } +}; + const Mimalloc = @import("../../allocators/mimalloc.zig"); export fn ZigString__free(raw: [*]const u8, len: usize, allocator_: ?*anyopaque) void { @@ -426,6 +433,10 @@ pub const ZigStackTrace = extern struct { frames_ptr: [*]ZigStackFrame, frames_len: u8, + /// Non-null if `source_lines_*` points into data owned by a JSC::SourceProvider. + /// If so, then .deref must be called on it to release the memory. + referenced_source_provider: ?*JSC.SourceProvider = null, + pub fn toAPI( this: *const ZigStackTrace, allocator: std.mem.Allocator, @@ -786,6 +797,10 @@ pub const ZigException = extern struct { for (this.stack.frames_ptr[0..this.stack.frames_len]) |*frame| { frame.deinit(); } + + if (this.stack.referenced_source_provider) |source| { + source.deref(); + } } pub const shim = Shimmer("Zig", "Exception", @This()); @@ -828,7 +843,9 @@ pub const ZigException = extern struct { } pub fn deinit(this: *Holder, vm: *JSC.VirtualMachine) void { - this.zigException().deinit(); + if (this.loaded) { + this.zig_exception.deinit(); + } if (this.need_to_clear_parser_arena_on_deinit) { vm.module_loader.resetArena(vm); } diff --git a/src/bun.js/bindings/headers-handwritten.h b/src/bun.js/bindings/headers-handwritten.h index 330f21b178e1a5..ce5287eb5f4794 100644 --- a/src/bun.js/bindings/headers-handwritten.h +++ b/src/bun.js/bindings/headers-handwritten.h @@ -169,6 +169,7 @@ typedef struct ZigStackTrace { uint8_t source_lines_to_collect; ZigStackFrame* frames_ptr; uint8_t frames_len; + JSC::SourceProvider* referenced_source_provider; } ZigStackTrace; typedef struct ZigException { diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index 814cd4e8043d74..2b6ab6c36d1fd1 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -3246,6 +3246,9 @@ pub const VirtualMachine = struct { var exception = exception_holder.zigException(); defer exception_holder.deinit(this); + // The ZigException structure stores substrings of the source code, in + // which we need the lifetime of this data to outlive the inner call to + // remapZigException, but still get freed. var source_code_slice: ?ZigString.Slice = null; defer if (source_code_slice) |slice| slice.deinit(); diff --git a/src/string.zig b/src/string.zig index 8e65e83ead32f1..31df35dcdd7f99 100644 --- a/src/string.zig +++ b/src/string.zig @@ -277,7 +277,7 @@ pub const Tag = enum(u8) { /// into a WTF::String. /// Can be in either `utf8` or `utf16le` encodings. ZigString = 2, - /// Static memory that is guarenteed to never be freed. When converted to WTF::String, + /// Static memory that is guaranteed to never be freed. When converted to WTF::String, /// the memory is not cloned, but instead referenced with WTF::ExternalStringImpl. /// Can be in either `utf8` or `utf16le` encodings. StaticZigString = 3, diff --git a/test/js/bun/util/inspect-error-leak.test.js b/test/js/bun/util/inspect-error-leak.test.js new file mode 100644 index 00000000000000..49df1a331524c7 --- /dev/null +++ b/test/js/bun/util/inspect-error-leak.test.js @@ -0,0 +1,23 @@ +import { test, expect } from "bun:test"; + +const perBatch = 2000; +const repeat = 50; +test("Printing errors does not leak", () => { + function batch() { + for (let i = 0; i < perBatch; i++) { + Bun.inspect(new Error("leak")); + } + Bun.gc(true); + } + + batch(); + const baseline = Math.floor(process.memoryUsage.rss() / 1024); + for (let i = 0; i < repeat; i++) { + batch(); + } + + const after = Math.floor(process.memoryUsage.rss() / 1024); + const diff = ((after - baseline) / 1024) | 0; + console.log(`RSS increased by ${diff} MB`); + expect(diff, `RSS grew by ${diff} MB after ${perBatch * repeat} iterations`).toBeLessThan(10); +}, 10_000); From 9577ab29f31a1b64c1fef08ab786a8f970ec5a9c Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Fri, 26 Jul 2024 14:50:30 -0700 Subject: [PATCH 14/46] Rename `JSC.Node.StringOrBuffer` -> `StringOrBuffer` --- src/bun.js/node/node_fs.zig | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/bun.js/node/node_fs.zig b/src/bun.js/node/node_fs.zig index 2c3929314c8b02..a4a009b943ee55 100644 --- a/src/bun.js/node/node_fs.zig +++ b/src/bun.js/node/node_fs.zig @@ -39,6 +39,7 @@ const E = C.E; const uid_t = if (Environment.isPosix) std.posix.uid_t else bun.windows.libuv.uv_uid_t; const gid_t = if (Environment.isPosix) std.posix.gid_t else bun.windows.libuv.uv_gid_t; const ReadPosition = i64; +const StringOrBuffer = JSC.Node.StringOrBuffer; const Stats = JSC.Node.Stats; const Dirent = JSC.Node.Dirent; @@ -54,7 +55,6 @@ else // TODO: 0; -const StringOrBuffer = JSC.Node.StringOrBuffer; const ArrayBuffer = JSC.MarkedArrayBuffer; const Buffer = JSC.Buffer; const FileSystemFlags = JSC.Node.FileSystemFlags; @@ -2304,7 +2304,7 @@ pub const Arguments = struct { }; const MkdirTemp = struct { - prefix: JSC.Node.StringOrBuffer = .{ .buffer = .{ .buffer = JSC.ArrayBuffer.empty } }, + prefix: StringOrBuffer = .{ .buffer = .{ .buffer = JSC.ArrayBuffer.empty } }, encoding: Encoding = Encoding.utf8, pub fn deinit(this: MkdirTemp) void { @@ -2322,7 +2322,7 @@ pub const Arguments = struct { pub fn fromJS(ctx: JSC.C.JSContextRef, arguments: *ArgumentsSlice, exception: JSC.C.ExceptionRef) ?MkdirTemp { const prefix_value = arguments.next() orelse return MkdirTemp{}; - const prefix = JSC.Node.StringOrBuffer.fromJS(ctx, bun.default_allocator, prefix_value) orelse { + const prefix = StringOrBuffer.fromJS(ctx, bun.default_allocator, prefix_value) orelse { if (exception.* == null) { JSC.throwInvalidArguments( "prefix must be a string or TypedArray", @@ -2680,7 +2680,7 @@ pub const Arguments = struct { /// pub const Write = struct { fd: FileDescriptor, - buffer: JSC.Node.StringOrBuffer, + buffer: StringOrBuffer, // buffer_val: JSC.JSValue = JSC.JSValue.zero, offset: u64 = 0, length: u64 = std.math.maxInt(u64), @@ -4019,14 +4019,14 @@ const Return = struct { } } }; - pub const ReadFile = JSC.Node.StringOrBuffer; + pub const ReadFile = StringOrBuffer; pub const ReadFileWithOptions = union(enum) { string: string, buffer: JSC.Node.Buffer, null_terminated: [:0]const u8, }; - pub const Readlink = JSC.Node.StringOrBuffer; - pub const Realpath = JSC.Node.StringOrBuffer; + pub const Readlink = StringOrBuffer; + pub const Realpath = StringOrBuffer; pub const RealpathNative = Realpath; pub const Rename = void; pub const Rmdir = void; From ead7c64870560114691db7005a233f0647b8f71c Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Fri, 26 Jul 2024 14:50:56 -0700 Subject: [PATCH 15/46] When crash reporter is disabled also disable `resetSegfaultHanlder` --- src/crash_handler.zig | 2 ++ src/main.zig | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/crash_handler.zig b/src/crash_handler.zig index 74813acd5e23d5..c0123708424f0b 100644 --- a/src/crash_handler.zig +++ b/src/crash_handler.zig @@ -770,6 +770,8 @@ pub fn init() void { } pub fn resetSegfaultHandler() void { + if (!enable) return; + if (bun.Environment.os == .windows) { if (windows_segfault_handle) |handle| { const rc = windows.kernel32.RemoveVectoredExceptionHandler(handle); diff --git a/src/main.zig b/src/main.zig index 6c4cfb535bc3f6..8aff3b147ff37a 100644 --- a/src/main.zig +++ b/src/main.zig @@ -8,7 +8,7 @@ const Environment = bun.Environment; pub const panic = bun.crash_handler.panic; pub const std_options = std.Options{ - .enable_segfault_handler = !bun.crash_handler.enable, + .enable_segfault_handler = false, }; pub const io_mode = .blocking; From ca2b20d619ccdca624f261b02416c716e8002f8d Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Fri, 26 Jul 2024 16:03:16 -0700 Subject: [PATCH 16/46] Configure libcpp assert to avoid macOS 13.0 issue (#12860) --- CMakeLists.txt | 12 ++++++++++++ scripts/env.sh | 3 +-- src/bun.js/bindings/ZigGlobalObject.cpp | 3 +-- src/bun.js/bindings/headers-handwritten.h | 8 ++++---- .../bindings/workaround-missing-symbols.cpp | 16 ++++++++++++++++ 5 files changed, 34 insertions(+), 8 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 1c76bfd8ab8c5b..24ba52bfe357b6 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1000,8 +1000,20 @@ add_compile_definitions( ) if(NOT ASSERT_ENABLED) + if(APPLE) + add_compile_definitions("_LIBCXX_ENABLE_ASSERTIONS=0") + add_compile_definitions("_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_NONE") + endif() + add_compile_definitions("NDEBUG=1") else() + if(APPLE) + add_compile_definitions("_LIBCXX_ENABLE_ASSERTIONS=1") + add_compile_definitions("_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_DEBUG") + elseif(CMAKE_SYSTEM_NAME STREQUAL "Linux") + add_compile_definitions("_GLIBCXX_ASSERTIONS=1") + endif() + add_compile_definitions("ASSERT_ENABLED=1") endif() diff --git a/scripts/env.sh b/scripts/env.sh index 1ec1248e4c4109..6ff0e225afafc6 100755 --- a/scripts/env.sh +++ b/scripts/env.sh @@ -76,7 +76,7 @@ fi # https://gitlab.kitware.com/cmake/cmake/-/issues/25755 if [[ $(uname -s) == 'Darwin' && $LLVM_VERSION == '18' ]]; then export CFLAGS="$CFLAGS -fno-define-target-os-macros " - export CXXFLAGS="$CXXFLAGS -fno-define-target-os-macros " + export CXXFLAGS="$CXXFLAGS -fno-define-target-os-macros -D_LIBCXX_ENABLE_ASSERTIONS=0 -D_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_NONE " fi # libarchive needs position-independent executables to compile successfully @@ -120,7 +120,6 @@ fi if [[ $(uname -s) == 'Darwin' ]]; then export CMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET:-13.0} - CMAKE_FLAGS+=(-DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET}) export CFLAGS="$CFLAGS -mmacosx-version-min=${CMAKE_OSX_DEPLOYMENT_TARGET} -D__DARWIN_NON_CANCELABLE=1 " export CXXFLAGS="$CXXFLAGS -mmacosx-version-min=${CMAKE_OSX_DEPLOYMENT_TARGET} -D__DARWIN_NON_CANCELABLE=1 " diff --git a/src/bun.js/bindings/ZigGlobalObject.cpp b/src/bun.js/bindings/ZigGlobalObject.cpp index b64e81326903ad..51b5276bca0b2d 100644 --- a/src/bun.js/bindings/ZigGlobalObject.cpp +++ b/src/bun.js/bindings/ZigGlobalObject.cpp @@ -714,7 +714,6 @@ void Zig::GlobalObject::resetOnEachMicrotaskTick() extern "C" JSC__JSGlobalObject* Zig__GlobalObject__create(void* console_client, int32_t executionContextId, bool miniMode, bool evalMode, void* worker_ptr) { - auto heapSize = miniMode ? JSC::HeapType::Small : JSC::HeapType::Large; JSC::VM& vm = JSC::VM::create(heapSize).leakRef(); // This must happen before JSVMClientData::create @@ -756,7 +755,7 @@ extern "C" JSC__JSGlobalObject* Zig__GlobalObject__create(void* console_client, auto* globalObject = createGlobalObject(); if (UNLIKELY(!globalObject)) { - PANIC("Failed to allocate JavaScript global object. Did your computer run out of memory?"); + BUN_PANIC("Failed to allocate JavaScript global object. Did your computer run out of memory?"); } globalObject->setConsole(console_client); diff --git a/src/bun.js/bindings/headers-handwritten.h b/src/bun.js/bindings/headers-handwritten.h index ce5287eb5f4794..73a461e20858a8 100644 --- a/src/bun.js/bindings/headers-handwritten.h +++ b/src/bun.js/bindings/headers-handwritten.h @@ -139,6 +139,9 @@ const ZigStackFrameCode ZigStackFrameCodeGlobal = 4; const ZigStackFrameCode ZigStackFrameCodeWasm = 5; const ZigStackFrameCode ZigStackFrameCodeConstructor = 6; +extern "C" void __attribute((__noreturn__)) Bun__panic(const char* message, size_t length); +#define BUN_PANIC(message) Bun__panic(message, sizeof(message) - 1) + typedef struct ZigStackFramePosition { int32_t line_zero_based; int32_t column_zero_based; @@ -273,10 +276,7 @@ extern "C" void Bun__WTFStringImpl__ref(WTF::StringImpl* impl); extern "C" bool BunString__fromJS(JSC::JSGlobalObject*, JSC::EncodedJSValue, BunString*); extern "C" JSC::EncodedJSValue BunString__toJS(JSC::JSGlobalObject*, const BunString*); extern "C" void BunString__toWTFString(BunString*); -extern "C" void Bun__panic(const char* message, size_t length); -#ifndef PANIC -#define PANIC(message) Bun__panic(message, sizeof(message) - 1) -#endif + namespace Bun { JSC::JSValue toJS(JSC::JSGlobalObject*, BunString); BunString toString(JSC::JSGlobalObject* globalObject, JSC::JSValue value); diff --git a/src/bun.js/bindings/workaround-missing-symbols.cpp b/src/bun.js/bindings/workaround-missing-symbols.cpp index 0b095e6c602f6c..8e61d2145b2cc4 100644 --- a/src/bun.js/bindings/workaround-missing-symbols.cpp +++ b/src/bun.js/bindings/workaround-missing-symbols.cpp @@ -1,4 +1,5 @@ + #if defined(WIN32) #include @@ -266,8 +267,23 @@ extern "C" int __wrap_mknodat(int dirfd, const char* path, __mode_t mode, __dev_ // macOS #if defined(__APPLE__) +#include #include #include +#include +#include +#include "headers.h" + +void std::__libcpp_verbose_abort(char const* format, ...) +{ + va_list list; + va_start(list, format); + char buffer[1024]; + size_t len = vsnprintf(buffer, sizeof(buffer), format, list); + va_end(list); + + Bun__panic(buffer, len); +} extern "C" int pthread_self_is_exiting_np() { From db806c87dc6f56adf0d42ae230e06c37e8fbff0c Mon Sep 17 00:00:00 2001 From: Dylan Conway <35280289+dylan-conway@users.noreply.github.com> Date: Fri, 26 Jul 2024 17:29:01 -0700 Subject: [PATCH 17/46] fix(build): assertion failure when cross-compiling on windows (#12862) Co-authored-by: dylan-conway --- CMakeLists.txt | 2 +- src/c.zig | 1 - src/compile_target.zig | 4 +++- src/generated_versions_list.zig | 2 +- src/sys.zig | 11 ++++++++--- 5 files changed, 13 insertions(+), 7 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 24ba52bfe357b6..397847b9b07b77 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -4,7 +4,7 @@ cmake_policy(SET CMP0067 NEW) set(CMAKE_POLICY_DEFAULT_CMP0069 NEW) set(Bun_VERSION "1.1.21") -set(WEBKIT_TAG c737b24765cddf5294c425b2e23dd381f1e0b33e) +set(WEBKIT_TAG f9a0fda2d2b2fd001a00bfcf8e7917a56b382516) set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}") message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}") diff --git a/src/c.zig b/src/c.zig index 244172ed03a193..ff9226a66087ce 100644 --- a/src/c.zig +++ b/src/c.zig @@ -104,7 +104,6 @@ pub fn lstat_absolute(path: [:0]const u8) !Stat { // renameatZ fails when renaming across mount points // we assume that this is relatively uncommon -// TODO: change types to use `bun.FileDescriptor` pub fn moveFileZ(from_dir: bun.FileDescriptor, filename: [:0]const u8, to_dir: bun.FileDescriptor, destination: [:0]const u8) !void { switch (bun.sys.renameatConcurrentlyWithoutFallback(from_dir, filename, to_dir, destination)) { .err => |err| { diff --git a/src/compile_target.zig b/src/compile_target.zig index c840c018dd187c..e3330614e7e47f 100644 --- a/src/compile_target.zig +++ b/src/compile_target.zig @@ -291,8 +291,10 @@ pub fn downloadToPath(this: *const CompileTarget, env: *bun.DotEnv.Loader, alloc const dirname = bun.path.dirname(dest_z, .loose); if (dirname.len > 0) { std.fs.cwd().makePath(dirname) catch {}; + continue; } - continue; + + // fallthrough, failed for another reason } node.end(); Output.err(err, "Failed to move cross-compiled bun binary into cache directory {}", .{bun.fmt.fmtPath(u8, dest_z, .{})}); diff --git a/src/generated_versions_list.zig b/src/generated_versions_list.zig index 84ef9277fc8800..a2dbf7bd8c055c 100644 --- a/src/generated_versions_list.zig +++ b/src/generated_versions_list.zig @@ -4,7 +4,7 @@ pub const boringssl = "29a2cd359458c9384694b75456026e4b57e3e567"; pub const libarchive = "898dc8319355b7e985f68a9819f182aaed61b53a"; pub const mimalloc = "4c283af60cdae205df5a872530c77e2a6a307d43"; pub const picohttpparser = "066d2b1e9ab820703db0837a7255d92d30f0c9f5"; -pub const webkit = "c737b24765cddf5294c425b2e23dd381f1e0b33e"; +pub const webkit = "f9a0fda2d2b2fd001a00bfcf8e7917a56b382516"; pub const zig = @import("std").fmt.comptimePrint("{}", .{@import("builtin").zig_version}); pub const zlib = "886098f3f339617b4243b286f5ed364b9989e245"; pub const tinycc = "ab631362d839333660a265d3084d8ff060b96753"; diff --git a/src/sys.zig b/src/sys.zig index ac935271802509..eee32ce4081d85 100644 --- a/src/sys.zig +++ b/src/sys.zig @@ -1868,7 +1868,8 @@ pub fn renameatConcurrentlyWithoutFallback( var err = switch (bun.sys.renameat2(from_dir_fd, from, to_dir_fd, to, .{ .exclude = true, })) { - .err => |err| err, + // if ENOENT don't retry + .err => |err| if (err.getErrno() == .NOENT) return .{ .err = err } else err, .result => break :attempt_atomic_rename_and_fallback_to_racy_delete, }; @@ -1893,8 +1894,12 @@ pub fn renameatConcurrentlyWithoutFallback( } // sad path: let's try to delete the folder and then rename it - var to_dir = to_dir_fd.asDir(); - to_dir.deleteTree(to) catch {}; + if (to_dir_fd.isValid()) { + var to_dir = to_dir_fd.asDir(); + to_dir.deleteTree(to) catch {}; + } else { + std.fs.deleteTreeAbsolute(to) catch {}; + } switch (bun.sys.renameat(from_dir_fd, from, to_dir_fd, to)) { .err => |err| { return .{ .err = err }; From c28f384523f032cba5e404a988021c4192259c1d Mon Sep 17 00:00:00 2001 From: dave caruso Date: Fri, 26 Jul 2024 18:36:53 -0700 Subject: [PATCH 18/46] fix: make raiseIgnoringPanicHandler ignore the panic handler (#12578) Co-authored-by: paperdave Co-authored-by: Jarred Sumner --- .vscode/launch.json | 2 +- src/Global.zig | 21 +---- src/bun.js/WebKit | 2 +- src/cli/run_command.zig | 6 +- src/crash_handler.zig | 8 ++ src/fd.zig | 1 - src/install/lifecycle_script_runner.zig | 9 +- src/js/internal-for-testing.ts | 1 + test/cli/run/fixture-crash.js | 2 +- test/cli/run/run-crash-handler.test.ts | 110 +++++++++++++++--------- 10 files changed, 89 insertions(+), 73 deletions(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index 4a7052e99b29b3..06fd6e26e21613 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -145,7 +145,7 @@ "request": "launch", "name": "bun run [file]", "program": "${workspaceFolder}/build/bun-debug", - "args": ["run", "${file}"], + "args": ["run", "${fileBasename}"], "cwd": "${fileDirname}", "env": { "FORCE_COLOR": "0", diff --git a/src/Global.zig b/src/Global.zig index 16882e0cae54b1..f572cdba1232f8 100644 --- a/src/Global.zig +++ b/src/Global.zig @@ -119,27 +119,12 @@ pub fn exit(code: u32) noreturn { bun.C.quick_exit(@bitCast(code)); } -pub fn raiseIgnoringPanicHandler(sig: anytype) noreturn { - if (comptime @TypeOf(sig) == bun.SignalCode) { - return raiseIgnoringPanicHandler(@intFromEnum(sig)); - } - +pub fn raiseIgnoringPanicHandler(sig: bun.SignalCode) noreturn { Output.flush(); - - if (!Environment.isWindows) { - if (sig >= 1 and sig != std.posix.SIG.STOP and sig != std.posix.SIG.KILL) { - const act = std.posix.Sigaction{ - .handler = .{ .sigaction = @ptrCast(@alignCast(std.posix.SIG.DFL)) }, - .mask = std.posix.empty_sigset, - .flags = 0, - }; - std.posix.sigaction(@intCast(sig), &act, null) catch {}; - } - } - Output.Source.Stdio.restore(); - _ = std.c.raise(sig); + bun.crash_handler.resetSegfaultHandler(); + _ = std.c.raise(@intFromEnum(sig)); std.c.abort(); } diff --git a/src/bun.js/WebKit b/src/bun.js/WebKit index 49018961cccf8c..49907bff878171 160000 --- a/src/bun.js/WebKit +++ b/src/bun.js/WebKit @@ -1 +1 @@ -Subproject commit 49018961cccf8cdcb3fd98e75a8a2226a295ed3c +Subproject commit 49907bff8781719bc2ded068b0c934f6d0074d1e diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index e735b5ff0a6636..5e8abbe276e2a7 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -578,8 +578,7 @@ pub const RunCommand = struct { }); } - Output.flush(); - Global.raiseIgnoringPanicHandler(@intFromEnum(signal)); + Global.raiseIgnoringPanicHandler(signal); }, .exited => |exit_code| { @@ -592,8 +591,7 @@ pub const RunCommand = struct { }); } - Output.flush(); - Global.raiseIgnoringPanicHandler(@intFromEnum(exit_code.signal)); + Global.raiseIgnoringPanicHandler(exit_code.signal); } const code = exit_code.code; diff --git a/src/crash_handler.zig b/src/crash_handler.zig index c0123708424f0b..4a8f8082739b35 100644 --- a/src/crash_handler.zig +++ b/src/crash_handler.zig @@ -208,6 +208,9 @@ pub fn crashHandler( break :check_flag false; } } + // Act like release build when explicitly enabling reporting + if (isReportingEnabled()) break :check_flag false; + break :check_flag true; }; @@ -1562,6 +1565,7 @@ pub const js_bindings = struct { .{ "panic", jsPanic }, .{ "rootError", jsRootError }, .{ "outOfMemory", jsOutOfMemory }, + .{ "raiseIgnoringPanicHandler", jsRaiseIgnoringPanicHandler }, }) |tuple| { const name = JSC.ZigString.static(tuple[0]); obj.put(global, name, JSC.createCallback(global, name, 1, tuple[1])); @@ -1599,6 +1603,10 @@ pub const js_bindings = struct { bun.outOfMemory(); } + pub fn jsRaiseIgnoringPanicHandler(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { + bun.Global.raiseIgnoringPanicHandler(.SIGSEGV); + } + pub fn jsGetFeaturesAsVLQ(global: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { const bits = bun.Analytics.packedFeatures(); var buf = std.BoundedArray(u8, 16){}; diff --git a/src/fd.zig b/src/fd.zig index 27e9745e2ef115..b8c7fec50e34d8 100644 --- a/src/fd.zig +++ b/src/fd.zig @@ -272,7 +272,6 @@ pub const FDImpl = packed struct { if (env.isDebug) { if (result) |err| { if (err.errno == @intFromEnum(posix.E.BADF)) { - // TODO(@paperdave): Zig Compiler Bug, if you remove `this` from the log. An error is correctly printed, but with the wrong reference trace bun.Output.debugWarn("close({s}) = EBADF. This is an indication of a file descriptor UAF", .{this_fmt}); } else { log("close({s}) = {}", .{ this_fmt, err }); diff --git a/src/install/lifecycle_script_runner.zig b/src/install/lifecycle_script_runner.zig index 0d1eeddef310ea..f9df8bfd639f28 100644 --- a/src/install/lifecycle_script_runner.zig +++ b/src/install/lifecycle_script_runner.zig @@ -353,15 +353,15 @@ pub const LifecycleScriptSubprocess = struct { }, .signaled => |signal| { this.printOutput(); + const signal_code = bun.SignalCode.from(signal); + Output.prettyErrorln("error: {s} script from \"{s}\" terminated by {}", .{ this.scriptName(), this.package_name, - - bun.SignalCode.from(signal).fmt(Output.enable_ansi_colors_stderr), + signal_code.fmt(Output.enable_ansi_colors_stderr), }); - Global.raiseIgnoringPanicHandler(@intFromEnum(signal)); - return; + Global.raiseIgnoringPanicHandler(signal); }, .err => |err| { Output.prettyErrorln("error: Failed to run {s} script from \"{s}\" due to\n{}", .{ @@ -372,7 +372,6 @@ pub const LifecycleScriptSubprocess = struct { this.deinit(); Output.flush(); Global.exit(1); - return; }, else => { Output.panic("error: Failed to run {s} script from \"{s}\" due to unexpected status\n{any}", .{ diff --git a/src/js/internal-for-testing.ts b/src/js/internal-for-testing.ts index 4f6f42231b4e6e..9314943ab712d4 100644 --- a/src/js/internal-for-testing.ts +++ b/src/js/internal-for-testing.ts @@ -57,6 +57,7 @@ export const crash_handler = $zig("crash_handler.zig", "js_bindings.generate") a panic: () => void; rootError: () => void; outOfMemory: () => void; + raiseIgnoringPanicHandler: () => void; }; export const upgrade_test_helpers = $zig("upgrade_command.zig", "upgrade_js_bindings.generate") as { diff --git a/test/cli/run/fixture-crash.js b/test/cli/run/fixture-crash.js index 9a56452ac7022e..c90049da5d2dba 100644 --- a/test/cli/run/fixture-crash.js +++ b/test/cli/run/fixture-crash.js @@ -11,5 +11,5 @@ const approach = process.argv[2]; if (approach in crash_handler) { crash_handler[approach](); } else { - console.error("usage: bun fixture-crash.js "); + console.error("usage: bun fixture-crash.js "); } diff --git a/test/cli/run/run-crash-handler.test.ts b/test/cli/run/run-crash-handler.test.ts index 0cc84f40c79fdb..0769129f8ac70f 100644 --- a/test/cli/run/run-crash-handler.test.ts +++ b/test/cli/run/run-crash-handler.test.ts @@ -11,58 +11,84 @@ test.if(process.platform === "darwin")("macOS has the assumed image offset", () expect(getMachOImageZeroOffset()).toBe(0x100000000); }); +test("raise ignoring panic handler does not trigger the panic handler", async () => { + let sent = false; + let onresolve = Promise.withResolvers(); + + using server = Bun.serve({ + port: 0, + fetch(request, server) { + sent = true; + onresolve.resolve(); + return new Response("OK"); + }, + }); + + const proc = Bun.spawn({ + cmd: [bunExe(), path.join(import.meta.dir, "fixture-crash.js"), "raiseIgnoringPanicHandler"], + env: mergeWindowEnvs([ + bunEnv, + { + BUN_CRASH_REPORT_URL: server.url.toString(), + BUN_ENABLE_CRASH_REPORTING: "1", + }, + ]), + }); + await proc.exited; + + await Promise.race([onresolve.promise, Bun.sleep(1000)]); + + expect(proc.exitCode).not.toBe(0); + expect(sent).toBe(false); +}); + describe("automatic crash reporter", () => { - const has_reporting = process.platform !== "linux"; + for (const approach of ["panic", "segfault", "outOfMemory"]) { + test(`${approach} should report`, async () => { + let sent = false; + let onresolve = Promise.withResolvers(); - for (const should_report of has_reporting ? [true, false] : [false]) { - for (const approach of ["panic", "segfault"]) { - // TODO: this dependency injection no worky. fix later - test.todo(`${approach} ${should_report ? "should" : "should not"} report`, async () => { - const temp = tempDirWithFiles("crash-handler-path", { - "curl": ({ root }) => `#!/usr/bin/env bash -echo $@ > ${root}/request.out -`, - "powershell.cmd": ({ root }) => `echo true > ${root}\\request.out -`, - }); + // Self host the crash report backend. + using server = Bun.serve({ + port: 0, + fetch(request, server) { + expect(request.url).toEndWith("/ack"); + sent = true; + onresolve.resolve(); + return new Response("OK"); + }, + }); - const env: any = mergeWindowEnvs([ + const proc = Bun.spawn({ + cmd: [bunExe(), path.join(import.meta.dir, "fixture-crash.js"), approach], + env: mergeWindowEnvs([ + bunEnv, { - ...bunEnv, + BUN_CRASH_REPORT_URL: server.url.toString(), + BUN_ENABLE_CRASH_REPORTING: "1", GITHUB_ACTIONS: undefined, CI: undefined, }, - { - PATH: temp + path.delimiter + process.env.PATH, - }, - ]); - - if (!should_report) { - env.DO_NOT_TRACK = "1"; - } + ]), + stdio: ["ignore", "pipe", "pipe"], + }); + await proc.exited; - const result = Bun.spawnSync( - [ - bunExe(), - path.join(import.meta.dir, "fixture-crash.js"), - approach, - "--debug-crash-handler-use-trace-string", - ], - { env }, - ); + await Promise.race([onresolve.promise, Bun.sleep(1000)]); - console.log(result.stderr.toString("utf-8")); - try { - expect(result.stderr.toString("utf-8")).toInclude("https://bun.report/"); - } catch (e) { - throw e; - } + const stderr = await Bun.readableStreamToText(proc.stderr); - await Bun.sleep(1000); + console.log(stderr); - const did_report = existsSync(path.join(temp, "request.out")); - expect(did_report).toBe(should_report); - }); - } + expect(proc.exitCode).not.toBe(0); + expect(stderr).toContain(server.url.toString()); + if (approach !== "outOfMemory") { + expect(stderr).toContain("oh no: Bun has crashed. This indicates a bug in Bun, not your code"); + } else { + expect(stderr.toLowerCase()).toContain("out of memory"); + expect(stderr.toLowerCase()).not.toContain("panic"); + } + expect(sent).toBe(true); + }); } }); From eec1767a2f66965d027d9d0870ffd77f647d7f33 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 26 Jul 2024 18:47:02 -0700 Subject: [PATCH 19/46] ci: format: switch to mlugg/setup-zig (#12863) --- .github/workflows/run-format.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/run-format.yml b/.github/workflows/run-format.yml index 28fd7a9804b4ff..4d03ce22e83acd 100644 --- a/.github/workflows/run-format.yml +++ b/.github/workflows/run-format.yml @@ -31,7 +31,7 @@ jobs: with: bun-version: "1.1.20" - name: Setup Zig - uses: goto-bus-stop/setup-zig@c7b6cdd3adba8f8b96984640ff172c37c93f73ee + uses: mlugg/setup-zig@v1 with: version: ${{ inputs.zig-version }} - name: Install Dependencies From 644c5c449adc4222adcca1999344014fa7805bf7 Mon Sep 17 00:00:00 2001 From: dave caruso Date: Fri, 26 Jul 2024 19:39:37 -0700 Subject: [PATCH 20/46] fix a bundler crash (#12864) Co-authored-by: paperdave --- src/bundler/bundle_v2.zig | 25 ++++++++++++-------- src/thread_pool.zig | 8 ++----- src/work_pool.zig | 14 +++++++++-- test/bundler/bun-build-api.test.ts | 37 ++++++++++++++++++++++++++++++ 4 files changed, 66 insertions(+), 18 deletions(-) diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 8387da17605d2d..f91d179556f2a0 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -1658,17 +1658,11 @@ pub const BundleV2 = struct { bundler.resolver.opts = bundler.options; - var this = try BundleV2.init(bundler, allocator, JSC.AnyEventLoop.init(allocator), false, JSC.WorkPool.get(), heap); + const this = try BundleV2.init(bundler, allocator, JSC.AnyEventLoop.init(allocator), false, JSC.WorkPool.get(), heap); this.plugins = completion.plugins; this.completion = completion; completion.bundler = this; - errdefer { - var out_log = Logger.Log.init(bun.default_allocator); - this.bundler.log.appendToWithRecycled(&out_log, true) catch bun.outOfMemory(); - completion.log = out_log; - } - defer { if (this.graph.pool.pool.threadpool_context == @as(?*anyopaque, @ptrCast(this.graph.pool))) { this.graph.pool.pool.threadpool_context = null; @@ -1678,6 +1672,16 @@ pub const BundleV2 = struct { this.deinit(); } + errdefer { + // Wait for wait groups to finish. There still may be + this.linker.source_maps.line_offset_wait_group.wait(); + this.linker.source_maps.quoted_contents_wait_group.wait(); + + var out_log = Logger.Log.init(bun.default_allocator); + this.bundler.log.appendToWithRecycled(&out_log, true) catch bun.outOfMemory(); + completion.log = out_log; + } + completion.result = .{ .value = .{ .output_files = try this.runFromJSInNewThread(config), @@ -3836,7 +3840,7 @@ pub const LinkerContext = struct { options: LinkerOptions = .{}, - wait_group: ThreadPoolLib.WaitGroup = undefined, + wait_group: ThreadPoolLib.WaitGroup = .{}, ambiguous_result_pool: std.ArrayList(MatchImport) = undefined, @@ -3874,10 +3878,10 @@ pub const LinkerContext = struct { }; pub const SourceMapData = struct { - line_offset_wait_group: sync.WaitGroup = undefined, + line_offset_wait_group: sync.WaitGroup = .{}, line_offset_tasks: []Task = &.{}, - quoted_contents_wait_group: sync.WaitGroup = undefined, + quoted_contents_wait_group: sync.WaitGroup = .{}, quoted_contents_tasks: []Task = &.{}, pub const Task = struct { @@ -9123,6 +9127,7 @@ pub const LinkerContext = struct { wait_group.deinit(); c.allocator.destroy(wait_group); } + errdefer wait_group.wait(); { var total_count: usize = 0; for (chunks, chunk_contexts) |*chunk, *chunk_ctx| { diff --git a/src/thread_pool.zig b/src/thread_pool.zig index 8d5f859d44538c..e7e8b8d1071fe7 100644 --- a/src/thread_pool.zig +++ b/src/thread_pool.zig @@ -134,14 +134,10 @@ pub const Batch = struct { pub const WaitGroup = struct { mutex: std.Thread.Mutex = .{}, counter: u32 = 0, - event: std.Thread.ResetEvent, + event: std.Thread.ResetEvent = .{}, pub fn init(self: *WaitGroup) void { - self.* = .{ - .mutex = .{}, - .counter = 0, - .event = undefined, - }; + self.* = .{}; } pub fn deinit(self: *WaitGroup) void { diff --git a/src/work_pool.zig b/src/work_pool.zig index 5cbad488ff22c2..b9e1bd157315b1 100644 --- a/src/work_pool.zig +++ b/src/work_pool.zig @@ -14,13 +14,23 @@ pub fn NewWorkPool(comptime max_threads: ?usize) type { @setCold(true); pool = ThreadPool.init(.{ - .max_threads = max_threads orelse @max(@as(u32, @truncate(std.Thread.getCpuCount() catch 0)), 2), + .max_threads = max_threads orelse @max(2, max_threads: { + if (bun.getenvZ("GOMAXPROCS")) |max_procs| try_override: { + break :max_threads std.fmt.parseInt(u32, max_procs, 10) catch + break :try_override; + } + + break :max_threads @as(u32, @truncate(std.Thread.getCpuCount() catch 0)); + }), .stack_size = ThreadPool.default_thread_stack_size, }); return &pool; } + + /// Initialization of WorkPool is not thread-safe, as it is + /// assumed a single main thread sets everything up. Calling + /// this afterwards is thread-safe. pub inline fn get() *ThreadPool { - // lil racy if (loaded) return &pool; loaded = true; diff --git a/test/bundler/bun-build-api.test.ts b/test/bundler/bun-build-api.test.ts index 5f5e89634b169a..586361a9bf084b 100644 --- a/test/bundler/bun-build-api.test.ts +++ b/test/bundler/bun-build-api.test.ts @@ -14,6 +14,43 @@ describe("Bun.build", () => { throw new Error("should have thrown"); }); + // https://github.com/oven-sh/bun/issues/12818 + test("sourcemap + build error crash case", async () => { + const dir = tempDirWithFiles("build", { + "/src/file1.ts": ` + import { A } from './dir'; + console.log(A); + `, + "/src/dir/index.ts": ` + import { B } from "./file3"; + export const A = [B] + `, + "/src/dir/file3.ts": ` + import { C } from "../file1"; // error + export const B = C; + `, + "/src/package.json": ` + { "type": "module" } + `, + "/src/tsconfig.json": ` + { + "extends": "../tsconfig.json", + "compilerOptions": { + "target": "ESNext", + "module": "ESNext", + "types": [] + } + } + `, + }); + const y = await Bun.build({ + entrypoints: [join(dir, "src/file1.ts")], + outdir: join(dir, "out"), + sourcemap: "external", + external: ["@minecraft"], + }); + }); + test("invalid options throws", async () => { expect(() => Bun.build({} as any)).toThrow(); expect(() => From 4effef3eb1394e40ca09afc31959831023400d2d Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Fri, 26 Jul 2024 19:53:36 -0700 Subject: [PATCH 21/46] Handle errors in node:http better (#12641) Co-authored-by: Jarred-Sumner --- bench/sqlite/better-sqlite3.mjs | 31 + bench/sqlite/node.mjs | 7 +- src/js/node/http.ts | 682 ++++++++++-------- .../undici/undici-primordials.test.ts | 14 +- .../node/http/node-fetch-primordials.test.ts | 12 +- test/js/node/http/node-fetch.test.js | 12 +- ...de-http-error-in-data-handler-fixture.1.js | 35 + ...de-http-error-in-data-handler-fixture.2.js | 36 + .../node/http/node-http-primoridals.test.ts | 14 +- test/js/node/http/node-http.test.ts | 55 +- 10 files changed, 577 insertions(+), 321 deletions(-) create mode 100644 bench/sqlite/better-sqlite3.mjs create mode 100644 test/js/node/http/node-http-error-in-data-handler-fixture.1.js create mode 100644 test/js/node/http/node-http-error-in-data-handler-fixture.2.js diff --git a/bench/sqlite/better-sqlite3.mjs b/bench/sqlite/better-sqlite3.mjs new file mode 100644 index 00000000000000..9bf25105b908e1 --- /dev/null +++ b/bench/sqlite/better-sqlite3.mjs @@ -0,0 +1,31 @@ +import { run, bench } from "mitata"; +import { createRequire } from "module"; + +const require = createRequire(import.meta.url); +const db = require("better-sqlite3")("./src/northwind.sqlite"); + +{ + const sql = db.prepare(`SELECT * FROM "Order"`); + + bench('SELECT * FROM "Order"', () => { + sql.all(); + }); +} + +{ + const sql = db.prepare(`SELECT * FROM "Product"`); + + bench('SELECT * FROM "Product"', () => { + sql.all(); + }); +} + +{ + const sql = db.prepare(`SELECT * FROM "OrderDetail"`); + + bench('SELECT * FROM "OrderDetail"', () => { + sql.all(); + }); +} + +await run(); diff --git a/bench/sqlite/node.mjs b/bench/sqlite/node.mjs index 9bf25105b908e1..7602a87612d252 100644 --- a/bench/sqlite/node.mjs +++ b/bench/sqlite/node.mjs @@ -1,8 +1,9 @@ +// Run `node --experimental-sqlite bench/sqlite/node.mjs` to run the script. +// You will need `--experimental-sqlite` flag to run this script and node v22.5.0 or higher. import { run, bench } from "mitata"; -import { createRequire } from "module"; +import { DatabaseSync as Database } from "node:sqlite"; -const require = createRequire(import.meta.url); -const db = require("better-sqlite3")("./src/northwind.sqlite"); +const db = new Database("./src/northwind.sqlite"); { const sql = db.prepare(`SELECT * FROM "Order"`); diff --git a/src/js/node/http.ts b/src/js/node/http.ts index 6a7b821428531b..45c8efefb8cd1b 100644 --- a/src/js/node/http.ts +++ b/src/js/node/http.ts @@ -23,8 +23,12 @@ const { headersTuple: any; }; +// TODO: make this more robust. +function isAbortError(err) { + return err?.name === "AbortError"; +} + const ObjectDefineProperty = Object.defineProperty; -const ObjectSetPrototypeOf = Object.setPrototypeOf; const GlobalPromise = globalThis.Promise; const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/; @@ -288,7 +292,7 @@ function Agent(options = kEmptyObject) { this.protocol = options.protocol || "http:"; } Agent.prototype = {}; -ObjectSetPrototypeOf(Agent.prototype, EventEmitter.prototype); +$setPrototypeDirect.$call(Agent.prototype, EventEmitter.prototype); ObjectDefineProperty(Agent, "globalAgent", { get: function () { @@ -345,21 +349,11 @@ Agent.prototype.destroy = function () { $debug(`${NODE_HTTP_WARNING}\n`, "WARN: Agent.destroy is a no-op"); }; -function emitListeningNextTick(self, onListen, err, hostname, port) { - if (typeof onListen === "function") { - try { - onListen.$apply(self, [err, hostname, port]); - } catch (err) { - self.emit("error", err); - } - } - - self.listening = !err; - - if (err) { - self.emit("error", err); - } else { - self.emit("listening", hostname, port); +function emitListeningNextTick(self, hostname, port) { + if ((self.listening = !!self[serverSymbol])) { + // TODO: remove the arguments + // Note does not pass any arguments. + self.emit("listening", null, hostname, port); } } @@ -445,203 +439,220 @@ function Server(options, callback) { if (callback) this.on("request", callback); return this; } -Object.setPrototypeOf((Server.prototype = {}), EventEmitter.prototype); -Server.prototype.constructor = Server; // Re-add constructor which got lost when setting prototype -Object.setPrototypeOf(Server, EventEmitter); -Server.prototype.ref = function () { - this._unref = false; - this[serverSymbol]?.ref?.(); - return this; -}; +Server.prototype = { + ref() { + this._unref = false; + this[serverSymbol]?.ref?.(); + return this; + }, -Server.prototype.unref = function () { - this._unref = true; - this[serverSymbol]?.unref?.(); - return this; -}; + unref() { + this._unref = true; + this[serverSymbol]?.unref?.(); + return this; + }, -Server.prototype.closeAllConnections = function () { - const server = this[serverSymbol]; - if (!server) { - return; - } - this[serverSymbol] = undefined; - server.stop(true); - this.emit("close"); -}; + closeAllConnections() { + const server = this[serverSymbol]; + if (!server) { + return; + } + this[serverSymbol] = undefined; + server.stop(true); + process.nextTick(emitCloseNT, this); + }, -Server.prototype.closeIdleConnections = function () { - // not actually implemented -}; + closeIdleConnections() { + // not actually implemented + }, -Server.prototype.close = function (optionalCallback?) { - const server = this[serverSymbol]; - if (!server) { - if (typeof optionalCallback === "function") process.nextTick(optionalCallback, new Error("Server is not running")); - return; - } - this[serverSymbol] = undefined; - if (typeof optionalCallback === "function") this.once("close", optionalCallback); - server.stop(); - this.emit("close"); -}; + close(optionalCallback?) { + const server = this[serverSymbol]; + if (!server) { + if (typeof optionalCallback === "function") + process.nextTick(optionalCallback, new Error("Server is not running")); + return; + } + this[serverSymbol] = undefined; + if (typeof optionalCallback === "function") this.once("close", optionalCallback); + server.stop(); + process.nextTick(emitCloseNT, this); + }, -Server.prototype[Symbol.asyncDispose] = function () { - const { resolve, reject, promise } = Promise.withResolvers(); - this.close(function (err, ...args) { - if (err) reject(err); - else resolve(...args); - }); - return promise; -}; + [Symbol.asyncDispose]() { + const { resolve, reject, promise } = Promise.withResolvers(); + this.close(function (err, ...args) { + if (err) reject(err); + else resolve(...args); + }); + return promise; + }, -Server.prototype.address = function () { - if (!this[serverSymbol]) return null; - return this[serverSymbol].address; -}; + address() { + if (!this[serverSymbol]) return null; + return this[serverSymbol].address; + }, -Server.prototype.listen = function (port, host, backlog, onListen) { - const server = this; - let socketPath; - if (typeof port === "undefined") { - port = 0; - } - if (typeof port == "string" && !Number.isSafeInteger(Number(port))) { - socketPath = port; - } - if (typeof host === "function") { - onListen = host; - host = undefined; - } + listen() { + const server = this; + let port, host, onListen; + let socketPath; + let tls = this[tlsSymbol]; + + // This logic must align with: + // - https://github.com/nodejs/node/blob/2eff28fb7a93d3f672f80b582f664a7c701569fb/lib/net.js#L274-L307 + if (arguments.length > 0) { + if (($isObject(arguments[0]) || $isCallable(arguments[0])) && arguments[0] !== null) { + // (options[...][, cb]) + port = arguments[0].port; + host = arguments[0].host; + socketPath = arguments[0].path; + + const otherTLS = arguments[0].tls; + if (otherTLS && $isObject(otherTLS)) { + tls = otherTLS; + } + } else if (typeof arguments[0] === "string" && !(Number(arguments[0]) >= 0)) { + // (path[...][, cb]) + socketPath = arguments[0]; + } else { + // ([port][, host][...][, cb]) + port = arguments[0]; + if (arguments.length > 1 && typeof arguments[1] === "string") { + host = arguments[1]; + } + } + } - if (typeof port === "function") { - onListen = port; - } else if (typeof port === "object") { - port?.signal?.addEventListener("abort", () => { - this.close(); - }); + // Bun defaults to port 3000. + // Node defaults to port 0. + if (port === undefined && !socketPath) { + port = 0; + } - host = port?.host; - port = port?.port; + if ($isCallable(arguments[arguments.length - 1])) { + onListen = arguments[arguments.length - 1]; + } - if (typeof port?.callback === "function") onListen = port?.callback; - } + const ResponseClass = this[optionsSymbol].ServerResponse || ServerResponse; + const RequestClass = this[optionsSymbol].IncomingMessage || IncomingMessage; + let isHTTPS = false; - if (typeof backlog === "function") { - onListen = backlog; - } + try { + if (tls) { + this.serverName = tls.serverName || host || "localhost"; + } + this[serverSymbol] = Bun.serve({ + tls, + port, + hostname: host, + unix: socketPath, + // Bindings to be used for WS Server + websocket: { + open(ws) { + ws.data.open(ws); + }, + message(ws, message) { + ws.data.message(ws, message); + }, + close(ws, code, reason) { + ws.data.close(ws, code, reason); + }, + drain(ws) { + ws.data.drain(ws); + }, + ping(ws, data) { + ws.data.ping(ws, data); + }, + pong(ws, data) { + ws.data.pong(ws, data); + }, + }, + maxRequestBodySize: Number.MAX_SAFE_INTEGER, + // Be very careful not to access (web) Request object + // properties: + // - request.url + // - request.headers + // + // We want to avoid triggering the getter for these properties because + // that will cause the data to be cloned twice, which costs memory & performance. + fetch(req, _server) { + var pendingResponse; + var pendingError; + var reject = err => { + if (pendingError) return; + pendingError = err; + if (rejectFunction) rejectFunction(err); + }; + + var reply = function (resp) { + if (pendingResponse) return; + pendingResponse = resp; + if (resolveFunction) resolveFunction(resp); + }; + + const prevIsNextIncomingMessageHTTPS = isNextIncomingMessageHTTPS; + isNextIncomingMessageHTTPS = isHTTPS; + const http_req = new RequestClass(req); + isNextIncomingMessageHTTPS = prevIsNextIncomingMessageHTTPS; + + const upgrade = http_req.headers.upgrade; + + const http_res = new ResponseClass(http_req, reply); + + http_req.socket[kInternalSocketData] = [_server, http_res, req]; + server.emit("connection", http_req.socket); + + const rejectFn = err => reject(err); + http_req.once("error", rejectFn); + http_res.once("error", rejectFn); + + if (upgrade) { + server.emit("upgrade", http_req, http_req.socket, kEmptyBuffer); + } else { + server.emit("request", http_req, http_res); + } - const ResponseClass = this[optionsSymbol].ServerResponse || ServerResponse; - const RequestClass = this[optionsSymbol].IncomingMessage || IncomingMessage; - let isHTTPS = false; + if (pendingError) { + throw pendingError; + } - try { - const tls = this[tlsSymbol]; - if (tls) { - this.serverName = tls.serverName || host || "localhost"; - } - this[serverSymbol] = Bun.serve({ - tls, - port, - hostname: host, - unix: socketPath, - // Bindings to be used for WS Server - websocket: { - open(ws) { - ws.data.open(ws); - }, - message(ws, message) { - ws.data.message(ws, message); - }, - close(ws, code, reason) { - ws.data.close(ws, code, reason); - }, - drain(ws) { - ws.data.drain(ws); - }, - ping(ws, data) { - ws.data.ping(ws, data); - }, - pong(ws, data) { - ws.data.pong(ws, data); - }, - }, - maxRequestBodySize: Number.MAX_SAFE_INTEGER, - // Be very careful not to access (web) Request object - // properties: - // - request.url - // - request.headers - // - // We want to avoid triggering the getter for these properties because - // that will cause the data to be cloned twice, which costs memory & performance. - fetch(req, _server) { - var pendingResponse; - var pendingError; - var reject = err => { - if (pendingError) return; - pendingError = err; - if (rejectFunction) rejectFunction(err); - }; - - var reply = function (resp) { - if (pendingResponse) return; - pendingResponse = resp; - if (resolveFunction) resolveFunction(resp); - }; - - const prevIsNextIncomingMessageHTTPS = isNextIncomingMessageHTTPS; - isNextIncomingMessageHTTPS = isHTTPS; - const http_req = new RequestClass(req); - isNextIncomingMessageHTTPS = prevIsNextIncomingMessageHTTPS; - - const upgrade = http_req.headers.upgrade; - - const http_res = new ResponseClass(http_req, reply); - - http_req.socket[kInternalSocketData] = [_server, http_res, req]; - server.emit("connection", http_req.socket); - - const rejectFn = err => reject(err); - http_req.once("error", rejectFn); - http_res.once("error", rejectFn); - - if (upgrade) { - server.emit("upgrade", http_req, http_req.socket, kEmptyBuffer); - } else { - server.emit("request", http_req, http_res); - } + if (pendingResponse) { + return pendingResponse; + } - if (pendingError) { - throw pendingError; - } + var { promise, resolve: resolveFunction, reject: rejectFunction } = $newPromiseCapability(GlobalPromise); + return promise; + }, + }); + isHTTPS = this[serverSymbol].protocol === "https"; - if (pendingResponse) { - return pendingResponse; - } + if (this?._unref) { + this[serverSymbol]?.unref?.(); + } - var { promise, resolve: resolveFunction, reject: rejectFunction } = $newPromiseCapability(GlobalPromise); - return promise; - }, - }); - isHTTPS = this[serverSymbol].protocol === "https"; + if ($isCallable(onListen)) { + this.once("listening", onListen); + } - if (this?._unref) { - this[serverSymbol]?.unref?.(); + setTimeout(emitListeningNextTick, 1, this, this[serverSymbol].hostname, this[serverSymbol].port); + } catch (err) { + server.emit("error", err); } - setTimeout(emitListeningNextTick, 1, this, onListen, null, this[serverSymbol].hostname, this[serverSymbol].port); - } catch (err) { - server.emit("error", err); - } + return this; + }, - return this; -}; + setTimeout(msecs, callback) { + // TODO: + return this; + }, -Server.prototype.setTimeout = function (msecs, callback) { - // TODO: - return this; + constructor: Server, }; +$setPrototypeDirect.$call(Server.prototype, EventEmitter.prototype); +$setPrototypeDirect.$call(Server, EventEmitter); function assignHeadersSlow(object, req) { const headers = req.headers; @@ -758,133 +769,172 @@ function IncomingMessage(req, defaultIncomingOpts) { this.complete = !!this[noBodySymbol]; } -Object.setPrototypeOf((IncomingMessage.prototype = {}), Readable.prototype); -IncomingMessage.prototype.constructor = IncomingMessage; // Re-add constructor which got lost when setting prototype -Object.setPrototypeOf(IncomingMessage, Readable); +IncomingMessage.prototype = { + constructor: IncomingMessage, + _construct(callback) { + // TODO: streaming + if (this[typeSymbol] === "response" || this[noBodySymbol]) { + callback(); + return; + } -IncomingMessage.prototype._construct = function (callback) { - // TODO: streaming - if (this[typeSymbol] === "response" || this[noBodySymbol]) { - callback(); - return; - } + const contentLength = this.headers["content-length"]; + const length = contentLength ? parseInt(contentLength, 10) : 0; + if (length === 0) { + this[noBodySymbol] = true; + callback(); + return; + } - const contentLength = this.headers["content-length"]; - const length = contentLength ? parseInt(contentLength, 10) : 0; - if (length === 0) { - this[noBodySymbol] = true; callback(); - return; - } - - callback(); -}; + }, + _read(size) { + if (this[noBodySymbol]) { + this.complete = true; + this.push(null); + } else if (this[bodyStreamSymbol] == null) { + const reader = this[reqSymbol].body?.getReader() as ReadableStreamDefaultReader; + if (!reader) { + this.complete = true; + this.push(null); + return; + } + this[bodyStreamSymbol] = reader; + consumeStream(this, reader); + } + }, + _destroy(err, cb) { + if (!this.readableEnded || !this.complete) { + this[abortedSymbol] = true; + // IncomingMessage emits 'aborted'. + // Client emits 'abort'. + this.emit("aborted"); + } -async function consumeStream(self, reader: ReadableStreamDefaultReader) { - while (true) { - var { done, value } = await reader.readMany(); - if (self[abortedSymbol]) return; - if (done) { - self.complete = true; - self.push(null); - break; + // Suppress "AbortError" from fetch() because we emit this in the 'aborted' event + if (isAbortError(err)) { + err = undefined; } - for (var v of value) { - self.push(v); + + const stream = this[bodyStreamSymbol]; + this[bodyStreamSymbol] = undefined; + const streamState = stream?.$state; + + if (streamState === $streamReadable || streamState === $streamWaiting || streamState === $streamWritable) { + stream?.cancel?.().catch(nop); } - } -} -IncomingMessage.prototype._read = function (size) { - if (this[noBodySymbol]) { - this.complete = true; - this.push(null); - } else if (this[bodyStreamSymbol] == null) { - const reader = this[reqSymbol].body?.getReader() as ReadableStreamDefaultReader; - if (!reader) { - this.complete = true; - this.push(null); - return; + const socket = this[fakeSocketSymbol]; + if (socket) { + socket.destroy(err); } - this[bodyStreamSymbol] = reader; - consumeStream(this, reader); - } -}; -Object.defineProperty(IncomingMessage.prototype, "aborted", { - get() { + if (cb) { + emitErrorNextTick(this, err, cb); + } + }, + get aborted() { return this[abortedSymbol]; }, -}); - -Object.defineProperty(IncomingMessage.prototype, "connection", { - get() { + set aborted(value) { + this[abortedSymbol] = value; + }, + get connection() { return (this[fakeSocketSymbol] ??= new FakeSocket()); }, -}); - -Object.defineProperty(IncomingMessage.prototype, "statusCode", { - get() { + get statusCode() { return this[reqSymbol].status; }, - set(v) { - if (!(v in STATUS_CODES)) return; - this[reqSymbol].status = v; + set statusCode(value) { + if (!(value in STATUS_CODES)) return; + this[reqSymbol].status = value; }, -}); - -Object.defineProperty(IncomingMessage.prototype, "statusMessage", { - get() { + get statusMessage() { return STATUS_CODES[this[reqSymbol].status]; }, - set(v) { - //noop + set statusMessage(value) { + // noop }, -}); - -Object.defineProperty(IncomingMessage.prototype, "httpVersion", { - get() { + get httpVersion() { return "1.1"; }, -}); - -Object.defineProperty(IncomingMessage.prototype, "rawTrailers", { - get() { - return []; + set httpVersion(value) { + // noop }, -}); - -Object.defineProperty(IncomingMessage.prototype, "httpVersionMajor", { - get() { + get httpVersionMajor() { return 1; }, -}); - -Object.defineProperty(IncomingMessage.prototype, "httpVersionMinor", { - get() { + set httpVersionMajor(value) { + // noop + }, + get httpVersionMinor() { return 1; }, -}); - -Object.defineProperty(IncomingMessage.prototype, "trailers", { - get() { + set httpVersionMinor(value) { + // noop + }, + get rawTrailers() { + return []; + }, + set rawTrailers(value) { + // noop + }, + get trailers() { return kEmptyObject; }, -}); - -Object.defineProperty(IncomingMessage.prototype, "socket", { - get() { + set trailers(value) { + // noop + }, + setTimeout(msecs, callback) { + // noop + return this; + }, + get socket() { return (this[fakeSocketSymbol] ??= new FakeSocket()); }, - set(val) { - this[fakeSocketSymbol] = val; + set socket(value) { + this[fakeSocketSymbol] = value; }, -}); - -IncomingMessage.prototype.setTimeout = function (msecs, callback) { - // TODO: - return this; }; +$setPrototypeDirect.$call(IncomingMessage.prototype, Readable.prototype); +$setPrototypeDirect.$call(IncomingMessage, Readable); + +async function consumeStream(self, reader: ReadableStreamDefaultReader) { + var done = false, + value, + aborted = false; + try { + while (true) { + const result = reader.readMany(); + if ($isPromise(result)) { + ({ done, value } = await result); + } else { + ({ done, value } = result); + } + + if (self.destroyed || (aborted = self[abortedSymbol])) { + break; + } + for (var v of value) { + self.push(v); + } + + if (self.destroyed || (aborted = self[abortedSymbol]) || done) { + break; + } + } + } catch (err) { + if (aborted || self.destroyed) return; + self.destroy(err); + } finally { + reader?.cancel?.().catch?.(nop); + } + + if (!self.complete) { + self.complete = true; + self.push(null); + } +} const headersSymbol = Symbol("headers"); const finishedSymbol = Symbol("finished"); @@ -899,9 +949,9 @@ function OutgoingMessage(options) { this[kAbortController] = null; } -Object.setPrototypeOf((OutgoingMessage.prototype = {}), Writable.prototype); +$setPrototypeDirect.$call((OutgoingMessage.prototype = {}), Writable.prototype); OutgoingMessage.prototype.constructor = OutgoingMessage; // Re-add constructor which got lost when setting prototype -Object.setPrototypeOf(OutgoingMessage, Writable); +$setPrototypeDirect.$call(OutgoingMessage, Writable); // Express "compress" package uses this OutgoingMessage.prototype._implicitHeader = function () {}; @@ -1103,9 +1153,9 @@ function ServerResponse(req, reply) { // https://github.com/nodejs/node/blob/cf8c6994e0f764af02da4fa70bc5962142181bf3/lib/_http_server.js#L192 if (req.method === "HEAD") this._hasBody = false; } -Object.setPrototypeOf((ServerResponse.prototype = {}), OutgoingMessage.prototype); +$setPrototypeDirect.$call((ServerResponse.prototype = {}), OutgoingMessage.prototype); ServerResponse.prototype.constructor = ServerResponse; // Re-add constructor which got lost when setting prototype -Object.setPrototypeOf(ServerResponse, OutgoingMessage); +$setPrototypeDirect.$call(ServerResponse, OutgoingMessage); // Express "compress" package uses this ServerResponse.prototype._implicitHeader = function () { @@ -1396,10 +1446,7 @@ class ClientRequest extends OutgoingMessage { this.destroyed = true; // If request is destroyed we abort the current response this[kAbortController]?.abort?.(); - if (err) { - this.emit("error", err); - } - callback(); + emitErrorNextTick(this, err, callback); } _ensureTls() { @@ -1410,11 +1457,16 @@ class ClientRequest extends OutgoingMessage { _final(callback) { this.#finished = true; this[kAbortController] = new AbortController(); - this[kAbortController].signal.addEventListener("abort", () => { - this.emit("abort"); - this[kClearTimeout](); - this.destroy(); - }); + this[kAbortController].signal.addEventListener( + "abort", + () => { + this[kClearTimeout]?.(); + if (this.destroyed) return; + this.emit("abort"); + this.destroy(); + }, + { once: true }, + ); if (this.#signal?.aborted) { this[kAbortController].abort(); } @@ -1471,6 +1523,10 @@ class ClientRequest extends OutgoingMessage { //@ts-ignore this.#fetchRequest = fetch(url, fetchOptions) .then(response => { + if (this.aborted) { + return; + } + const prevIsHTTPS = isNextIncomingMessageHTTPS; isNextIncomingMessageHTTPS = response.url.startsWith("https:"); var res = (this.#res = new IncomingMessage(response, { @@ -1483,7 +1539,7 @@ class ClientRequest extends OutgoingMessage { .catch(err => { // Node treats AbortError separately. // The "abort" listener on the abort controller should have called this - if (err?.name === "AbortError") { + if (isAbortError(err)) { return; } @@ -1505,13 +1561,19 @@ class ClientRequest extends OutgoingMessage { } get aborted() { - return this.#signal?.aborted || !!this[kAbortController]?.signal.aborted; + return this[abortedSymbol] || this.#signal?.aborted || !!this[kAbortController]?.signal.aborted; + } + + set aborted(value) { + this[abortedSymbol] = value; } abort() { if (this.aborted) return; + this[abortedSymbol] = true; + process.nextTick(emitAbortNextTick, this); this[kAbortController]?.abort?.(); - // TODO: Close stream if body streaming + this.destroy(); } constructor(input, options, cb) { @@ -2118,6 +2180,22 @@ function get(url, options, cb) { return req; } +function onError(self, error, cb) { + if (error) { + cb(error); + } else { + cb(); + } +} + +function emitErrorNextTick(self, err, cb) { + process.nextTick(onError, self, err, cb); +} + +function emitAbortNextTick(self) { + self.emit("abort"); +} + var globalAgent = new Agent(); export default { Agent, diff --git a/test/js/first_party/undici/undici-primordials.test.ts b/test/js/first_party/undici/undici-primordials.test.ts index dad454a42d3b75..63c57cf086a0ce 100644 --- a/test/js/first_party/undici/undici-primordials.test.ts +++ b/test/js/first_party/undici/undici-primordials.test.ts @@ -1,7 +1,17 @@ -import { describe, it, expect, beforeAll, afterAll } from "bun:test"; +import { describe, it, expect, beforeAll, afterAll, afterEach } from "bun:test"; +const { Response, Request, Headers, FormData, File, URL, AbortSignal, URLSearchParams } = globalThis; +afterEach(() => { + globalThis.Response = Response; + globalThis.Request = Request; + globalThis.Headers = Headers; + globalThis.FormData = FormData; + globalThis.File = File; + globalThis.URL = URL; + globalThis.AbortSignal = AbortSignal; + globalThis.URLSearchParams = URLSearchParams; +}); it("undici", () => { - const { Response, Request, Headers, FormData, File, URL, AbortSignal, URLSearchParams } = globalThis; globalThis.Response = globalThis.Request = globalThis.Headers = diff --git a/test/js/node/http/node-fetch-primordials.test.ts b/test/js/node/http/node-fetch-primordials.test.ts index de674023c37a36..2fdb93eca92f27 100644 --- a/test/js/node/http/node-fetch-primordials.test.ts +++ b/test/js/node/http/node-fetch-primordials.test.ts @@ -1,4 +1,14 @@ -import { test, expect } from "bun:test"; +import { afterEach, expect, test } from "bun:test"; + +const originalResponse = globalThis.Response; +const originalRequest = globalThis.Request; +const originalHeaders = globalThis.Headers; +afterEach(() => { + globalThis.Response = originalResponse; + globalThis.Request = originalRequest; + globalThis.Headers = originalHeaders; + globalThis.fetch = Bun.fetch; +}); test("fetch, Response, Request can be overriden", async () => { const { Response, Request } = globalThis; diff --git a/test/js/node/http/node-fetch.test.js b/test/js/node/http/node-fetch.test.js index 92a3f12e3925ae..a865d0b1575252 100644 --- a/test/js/node/http/node-fetch.test.js +++ b/test/js/node/http/node-fetch.test.js @@ -3,7 +3,17 @@ import * as iso from "isomorphic-fetch"; import * as vercelFetch from "@vercel/fetch"; import * as stream from "stream"; -import { test, expect } from "bun:test"; +import { test, expect, beforeAll, afterAll, afterEach } from "bun:test"; + +const originalResponse = globalThis.Response; +const originalRequest = globalThis.Request; +const originalHeaders = globalThis.Headers; +afterEach(() => { + globalThis.Response = originalResponse; + globalThis.Request = originalRequest; + globalThis.Headers = originalHeaders; + globalThis.fetch = Bun.fetch; +}); test("node-fetch", () => { expect(Response.prototype).toBeInstanceOf(globalThis.Response); diff --git a/test/js/node/http/node-http-error-in-data-handler-fixture.1.js b/test/js/node/http/node-http-error-in-data-handler-fixture.1.js new file mode 100644 index 00000000000000..b33d56f40f67b2 --- /dev/null +++ b/test/js/node/http/node-http-error-in-data-handler-fixture.1.js @@ -0,0 +1,35 @@ +const http = require("http"); +const server = http.createServer((req, res) => { + res.end("Hello World\n"); +}); +const { promise, resolve, reject } = Promise.withResolvers(); +process.exitCode = 1; + +server.listen(0, function () { + const port = server.address().port; + http + .request(`http://localhost:${port}`, res => { + res + .on("data", data => { + // base64 the message to ensure we don't confuse source code with the error message + throw new Error(Buffer.from("VGVzdCBwYXNzZWQ=", "base64")); + }) + .on("end", () => { + server.close(); + }); + }) + .on("error", reject) + .end(); +}); + +server.on("close", () => { + resolve(); +}); +server.on("error", err => { + reject(err); +}); + +process.on("uncaughtException", err => { + console.log(err); + process.exit(0); +}); diff --git a/test/js/node/http/node-http-error-in-data-handler-fixture.2.js b/test/js/node/http/node-http-error-in-data-handler-fixture.2.js new file mode 100644 index 00000000000000..7fb81dc9f2f7af --- /dev/null +++ b/test/js/node/http/node-http-error-in-data-handler-fixture.2.js @@ -0,0 +1,36 @@ +const http = require("http"); +const server = http.createServer(async (req, res) => { + res.end("Hello World\n"); +}); +const { promise, resolve, reject } = Promise.withResolvers(); +process.exitCode = 1; + +server.listen(0, function () { + const port = server.address().port; + http + .request(`http://localhost:${port}`, res => { + res + .on("data", async data => { + await Bun.sleep(1); + // base64 the message to ensure we don't confuse source code with the error message + throw new Error(Buffer.from("VGVzdCBwYXNzZWQ=", "base64")); + }) + .on("end", () => { + server.close(); + }); + }) + .on("error", reject) + .end(); +}); + +server.on("close", () => { + resolve(); +}); +server.on("error", err => { + reject(err); +}); + +process.on("unhandledRejection", err => { + console.log(err); + process.exit(0); +}); diff --git a/test/js/node/http/node-http-primoridals.test.ts b/test/js/node/http/node-http-primoridals.test.ts index 57b1a8506b50fc..00760801358dc1 100644 --- a/test/js/node/http/node-http-primoridals.test.ts +++ b/test/js/node/http/node-http-primoridals.test.ts @@ -1,4 +1,16 @@ -import { test, expect } from "bun:test"; +import { test, expect, afterEach } from "bun:test"; + +const Response = globalThis.Response; +const Request = globalThis.Request; +const Headers = globalThis.Headers; +const Blob = globalThis.Blob; + +afterEach(() => { + globalThis.Response = Response; + globalThis.Request = Request; + globalThis.Headers = Headers; + globalThis.Blob = Blob; +}); // This test passes by not hanging. test("Overriding Request, Response, Headers, and Blob should not break node:http server", async () => { diff --git a/test/js/node/http/node-http.test.ts b/test/js/node/http/node-http.test.ts index cec90d9e853362..883f30ba7b29a9 100644 --- a/test/js/node/http/node-http.test.ts +++ b/test/js/node/http/node-http.test.ts @@ -1943,7 +1943,7 @@ it("should emit events in the right order", async () => { it("destroy should end download", async () => { // just simulate some file that will take forever to download - const payload = Buffer.from("X".repeat(16 * 1024)); + const payload = Buffer.from("X".repeat(128 * 1024)); using server = Bun.serve({ port: 0, @@ -1958,24 +1958,33 @@ it("destroy should end download", async () => { }); }, }); - { - let chunks = 0; - const { promise, resolve } = Promise.withResolvers(); + async function run() { + let receivedByteLength = 0; + let { promise, resolve } = Promise.withResolvers(); const req = request(server.url, res => { - res.on("data", () => { - process.nextTick(resolve); - chunks++; + res.on("data", data => { + receivedByteLength += data.length; + if (resolve) { + resolve(); + resolve = null; + } }); }); req.end(); - // wait for the first chunk await promise; - // should stop the download req.destroy(); - await Bun.sleep(200); - expect(chunks).toBeLessThanOrEqual(3); + await Bun.sleep(10); + const initialByteLength = receivedByteLength; + expect(receivedByteLength).toBeLessThanOrEqual(payload.length * 3); + await Bun.sleep(10); + expect(initialByteLength).toBe(receivedByteLength); + await Bun.sleep(10); } + + const runCount = 50; + const runs = Array.from({ length: runCount }, run); + await Promise.all(runs); }); it("can send brotli from Server and receive with fetch", async () => { @@ -2219,3 +2228,27 @@ it("should mark complete true", async () => { server.close(); } }); + +it("should propagate exception in sync data handler", async () => { + const { exitCode, stdout } = Bun.spawnSync({ + cmd: [bunExe(), "run", path.join(import.meta.dir, "node-http-error-in-data-handler-fixture.1.js")], + stdout: "pipe", + stderr: "inherit", + env: bunEnv, + }); + + expect(stdout.toString()).toContain("Test passed"); + expect(exitCode).toBe(0); +}); + +it("should propagate exception in async data handler", async () => { + const { exitCode, stdout } = Bun.spawnSync({ + cmd: [bunExe(), "run", path.join(import.meta.dir, "node-http-error-in-data-handler-fixture.2.js")], + stdout: "pipe", + stderr: "inherit", + env: bunEnv, + }); + + expect(stdout.toString()).toContain("Test passed"); + expect(exitCode).toBe(0); +}); From 3285166ba2f44e53bf1ccfdcb4c7556a311bc836 Mon Sep 17 00:00:00 2001 From: dave caruso Date: Fri, 26 Jul 2024 20:00:02 -0700 Subject: [PATCH 22/46] fix: check if we are crashing before exiting gracefully (#12865) --- src/Global.zig | 13 +++++++------ src/bundler.zig | 4 ++-- src/crash_handler.zig | 37 +++++++++++++++++++++++++++++++------ src/js_lexer.zig | 2 +- src/js_parser.zig | 10 +++++----- src/js_printer.zig | 24 ++++++++++++------------ src/json_parser.zig | 2 +- src/logger.zig | 2 +- src/napi/napi.zig | 4 ++-- src/renamer.zig | 2 +- 10 files changed, 63 insertions(+), 37 deletions(-) diff --git a/src/Global.zig b/src/Global.zig index f572cdba1232f8..d3becfed784954 100644 --- a/src/Global.zig +++ b/src/Global.zig @@ -113,10 +113,13 @@ pub fn isExiting() bool { pub fn exit(code: u32) noreturn { is_exiting.store(true, .monotonic); - if (comptime Environment.isMac) { - std.c.exit(@bitCast(code)); + // If we are crashing, allow the crash handler to finish it's work. + bun.crash_handler.sleepForeverIfAnotherThreadIsCrashing(); + + switch (Environment.os) { + .mac => std.c.exit(@bitCast(code)), + else => bun.C.quick_exit(@bitCast(code)), } - bun.C.quick_exit(@bitCast(code)); } pub fn raiseIgnoringPanicHandler(sig: bun.SignalCode) noreturn { @@ -152,11 +155,9 @@ pub inline fn configureAllocator(_: AllocatorConfiguration) void { // if (!config.long_running) Mimalloc.mi_option_set(Mimalloc.mi_option_reset_delay, 0); } -pub const panic = Output.panic; // deprecated - pub fn notimpl() noreturn { @setCold(true); - Global.panic("Not implemented yet!!!!!", .{}); + Output.panic("Not implemented yet!!!!!", .{}); } // Make sure we always print any leftover diff --git a/src/bundler.zig b/src/bundler.zig index c34b5629dfe058..3f7de364e37c23 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -639,7 +639,7 @@ pub const Bundler = struct { framework.resolved = true; this.options.framework = framework.*; } else if (!framework.resolved) { - Global.panic("directly passing framework path is not implemented yet!", .{}); + Output.panic("directly passing framework path is not implemented yet!", .{}); } } } @@ -1649,7 +1649,7 @@ pub const Bundler = struct { } }, .css => {}, - else => Global.panic("Unsupported loader {s} for path: {s}", .{ @tagName(loader), source.path.text }), + else => Output.panic("Unsupported loader {s} for path: {s}", .{ @tagName(loader), source.path.text }), } return null; diff --git a/src/crash_handler.zig b/src/crash_handler.zig index 4a8f8082739b35..0a3e9faf2d8156 100644 --- a/src/crash_handler.zig +++ b/src/crash_handler.zig @@ -168,10 +168,6 @@ pub fn crashHandler( ) noreturn { @setCold(true); - // If a segfault happens while panicking, we want it to actually segfault, not trigger - // the handler. - resetSegfaultHandler(); - if (bun.Environment.isDebug) bun.Output.disableScopedDebugWriter(); @@ -197,7 +193,7 @@ pub fn crashHandler( const writer = std.io.getStdErr().writer(); // The format of the panic trace is slightly different in debug - // builds Mainly, we demangle the backtrace immediately instead + // builds. Mainly, we demangle the backtrace immediately instead // of using a trace string. // // To make the release-mode behavior easier to demo, debug mode @@ -346,12 +342,22 @@ pub fn crashHandler( writer.writeAll("\n") catch std.posix.abort(); } } + // Be aware that this function only lets one thread return from it. // This is important so that we do not try to run the following reload logic twice. waitForOtherThreadToFinishPanicking(); report(trace_str_buf.slice()); + // At this point, the crash handler has performed it's job. Reset the segfault handler + // so that a crash will actually crash. We need this because we want the process to + // exit with a signal, and allow tools to be able to gather core dumps. + // + // This is done so late (in comparison to the Zig Standard Library's panic handler) + // because if multiple threads segfault (more often the case on Windows), we don't + // want another thread to interrupt the crashing of the first one. + resetSegfaultHandler(); + if (bun.auto_reload_on_crash and // Do not reload if the panic arose FROM the reload function. !bun.isProcessReloadInProgressOnAnotherThread()) @@ -371,6 +377,8 @@ pub fn crashHandler( inline 1, 2 => |t| { if (t == 1) { panic_stage = 2; + + resetSegfaultHandler(); Output.flush(); } panic_stage = 3; @@ -384,6 +392,7 @@ pub fn crashHandler( }, 3 => { // Panicked while printing "Panicked during a panic." + panic_stage = 4; }, else => { // Panicked or otherwise looped into the panic handler while trying to exit. @@ -905,6 +914,22 @@ fn waitForOtherThreadToFinishPanicking() void { } } +/// This is to be called by any thread that is attempting to exit the process. +/// If another thread is panicking, this will sleep this thread forever, under +/// the assumption that the crash handler will terminate the program. +/// +/// There have been situations in the past where a bundler thread starts +/// panicking, but the main thread ends up marking a test as passing and then +/// exiting with code zero before the crash handler can finish the crash. +pub fn sleepForeverIfAnotherThreadIsCrashing() void { + if (panicking.load(.acquire) > 0) { + // Sleep forever without hammering the CPU + var futex = std.atomic.Value(u32).init(0); + while (true) std.Thread.Futex.wait(&futex, 0); + comptime unreachable; + } +} + /// Each platform is encoded as a single character. It is placed right after the /// slash after the version, so someone just reading the trace string can tell /// what platform it came from. L, M, and W are for Linux, macOS, and Windows, @@ -1611,7 +1636,7 @@ pub const js_bindings = struct { const bits = bun.Analytics.packedFeatures(); var buf = std.BoundedArray(u8, 16){}; writeU64AsTwoVLQs(buf.writer(), @bitCast(bits)) catch { - // there is definetly enough space in the bounded array + // there is definitely enough space in the bounded array unreachable; }; return bun.String.createLatin1(buf.slice()).toJS(global); diff --git a/src/js_lexer.zig b/src/js_lexer.zig index 681439daed75b3..7e7a41298e5b84 100644 --- a/src/js_lexer.zig +++ b/src/js_lexer.zig @@ -32,7 +32,7 @@ pub const TypeScriptAccessibilityModifier = tables.TypeScriptAccessibilityModifi pub const ChildlessJSXTags = tables.ChildlessJSXTags; fn notimpl() noreturn { - Global.panic("not implemented yet!", .{}); + Output.panic("not implemented yet!", .{}); } pub var emptyJavaScriptString = ([_]u16{0}); diff --git a/src/js_parser.zig b/src/js_parser.zig index e2076b4df523d9..70a5402d0d8e53 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -2729,7 +2729,7 @@ pub const StmtsKind = enum { }; fn notimpl() noreturn { - Global.panic("Not implemented yet!!", .{}); + Output.panic("Not implemented yet!!", .{}); } const ExprBindingTuple = struct { @@ -14949,7 +14949,7 @@ fn NewParser_( p.log.level = .verbose; p.log.printForLogLevel(panic_stream.writer()) catch unreachable; - Global.panic(fmt ++ "\n{s}", args ++ .{panic_buffer[0..panic_stream.pos]}); + Output.panic(fmt ++ "\n{s}", args ++ .{panic_buffer[0..panic_stream.pos]}); } pub fn parsePrefix(p: *P, level: Level, errors: ?*DeferredErrors, flags: Expr.EFlags) anyerror!Expr { @@ -16235,7 +16235,7 @@ fn NewParser_( } }, else => { - Global.panic("Unexpected type in export default: {any}", .{s2}); + Output.panic("Unexpected type in export default: {any}", .{s2}); }, } }, @@ -17521,7 +17521,7 @@ fn NewParser_( var has_proto = false; for (e_.properties.slice()) |*property| { if (property.kind != .spread) { - property.key = p.visitExpr(property.key orelse Global.panic("Expected property key", .{})); + property.key = p.visitExpr(property.key orelse Output.panic("Expected property key", .{})); const key = property.key.?; // Forbid duplicate "__proto__" properties according to the specification if (!property.flags.contains(.is_computed) and @@ -20780,7 +20780,7 @@ fn NewParser_( } }, else => { - Global.panic("Unexpected binding type in namespace. This is a bug. {any}", .{binding}); + Output.panic("Unexpected binding type in namespace. This is a bug. {any}", .{binding}); }, } } diff --git a/src/js_printer.zig b/src/js_printer.zig index 79c1db66b2ac1a..1458dafc964ed5 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -2813,7 +2813,7 @@ fn NewPrinter( if (e.func.name) |sym| { p.printSpaceBeforeIdentifier(); p.addSourceMapping(sym.loc); - p.printSymbol(sym.ref orelse Global.panic("internal error: expected E.Function's name symbol to have a ref\n{any}", .{e.func})); + p.printSymbol(sym.ref orelse Output.panic("internal error: expected E.Function's name symbol to have a ref\n{any}", .{e.func})); } p.printFunc(e.func); @@ -2834,7 +2834,7 @@ fn NewPrinter( if (e.class_name) |name| { p.print(" "); p.addSourceMapping(name.loc); - p.printSymbol(name.ref orelse Global.panic("internal error: expected E.Class's name symbol to have a ref\n{any}", .{e})); + p.printSymbol(name.ref orelse Output.panic("internal error: expected E.Class's name symbol to have a ref\n{any}", .{e})); } p.printClass(e.*); if (wrap) { @@ -3930,7 +3930,7 @@ fn NewPrinter( p.print("}"); }, else => { - Global.panic("Unexpected binding of type {any}", .{binding}); + Output.panic("Unexpected binding of type {any}", .{binding}); }, } } @@ -3959,8 +3959,8 @@ fn NewPrinter( .s_function => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); - const name = s.func.name orelse Global.panic("Internal error: expected func to have a name ref\n{any}", .{s}); - const nameRef = name.ref orelse Global.panic("Internal error: expected func to have a name\n{any}", .{s}); + const name = s.func.name orelse Output.panic("Internal error: expected func to have a name ref\n{any}", .{s}); + const nameRef = name.ref orelse Output.panic("Internal error: expected func to have a name\n{any}", .{s}); if (s.func.flags.contains(.is_export)) { if (!rewrite_esm_to_cjs) { @@ -4084,7 +4084,7 @@ fn NewPrinter( if (class.class.class_name) |name| { p.print("class "); - p.printSymbol(name.ref orelse Global.panic("Internal error: Expected class to have a name ref\n{any}", .{class})); + p.printSymbol(name.ref orelse Output.panic("Internal error: Expected class to have a name ref\n{any}", .{class})); } else { p.print("class"); } @@ -4094,7 +4094,7 @@ fn NewPrinter( p.printNewline(); }, else => { - Global.panic("Internal error: unexpected export default stmt data {any}", .{s}); + Output.panic("Internal error: unexpected export default stmt data {any}", .{s}); }, } }, @@ -4490,7 +4490,7 @@ fn NewPrinter( p.printIndent(); } p.printSpaceBeforeIdentifier(); - p.printSymbol(s.name.ref orelse Global.panic("Internal error: expected label to have a name {any}", .{s})); + p.printSymbol(s.name.ref orelse Output.panic("Internal error: expected label to have a name {any}", .{s})); p.print(":"); p.printBody(s.stmt); }, @@ -4979,9 +4979,9 @@ fn NewPrinter( const to_print: []const u8 = if (slice.len > 1024) slice[slice.len - 1024 ..] else slice; if (to_print.len > 0) { - Global.panic("\nvoluntary crash while printing:\n{s}\n---This is a bug. Not your fault.\n", .{to_print}); + Output.panic("\nvoluntary crash while printing:\n{s}\n---This is a bug. Not your fault.\n", .{to_print}); } else { - Global.panic("\nvoluntary crash while printing. This is a bug. Not your fault.\n", .{}); + Output.panic("\nvoluntary crash while printing. This is a bug. Not your fault.\n", .{}); } }, } @@ -5208,7 +5208,7 @@ fn NewPrinter( // for(;) .s_empty => {}, else => { - Global.panic("Internal error: Unexpected stmt in for loop {any}", .{initSt}); + Output.panic("Internal error: Unexpected stmt in for loop {any}", .{initSt}); }, } } @@ -5717,7 +5717,7 @@ pub fn NewWriter( pub inline fn print(writer: *Self, comptime ValueType: type, str: ValueType) void { if (FeatureFlags.disable_printing_null) { if (str == 0) { - Global.panic("Attempted to print null char", .{}); + Output.panic("Attempted to print null char", .{}); } } diff --git a/src/json_parser.zig b/src/json_parser.zig index 87f7ca1ffe7f7d..bf46ea15edfe0c 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -1075,7 +1075,7 @@ fn expectPrintedJSON(_contents: string, expected: string) !void { const expr = try ParseJSON(&source, &log, default_allocator); if (log.msgs.items.len > 0) { - Global.panic("--FAIL--\nExpr {s}\nLog: {s}\n--FAIL--", .{ expr, log.msgs.items[0].data.text }); + Output.panic("--FAIL--\nExpr {s}\nLog: {s}\n--FAIL--", .{ expr, log.msgs.items[0].data.text }); } const buffer_writer = try js_printer.BufferWriter.init(default_allocator); diff --git a/src/logger.zig b/src/logger.zig index bef1602a4cbf38..70a301c166eb0b 100644 --- a/src/logger.zig +++ b/src/logger.zig @@ -549,7 +549,7 @@ pub const Msg = struct { } } - pub fn formatNoWriter(msg: *const Msg, comptime formatterFunc: @TypeOf(Global.panic)) void { + pub fn formatNoWriter(msg: *const Msg, comptime formatterFunc: @TypeOf(Output.panic)) void { formatterFunc("\n\n{s}: {s}\n{s}\n{s}:{}:{} ({d})", .{ msg.kind.string(), msg.data.text, diff --git a/src/napi/napi.zig b/src/napi/napi.zig index c8a82afb7e9853..4b37c5632291f4 100644 --- a/src/napi/napi.zig +++ b/src/napi/napi.zig @@ -1183,10 +1183,10 @@ pub export fn napi_fatal_error(location_ptr: ?[*:0]const u8, location_len: usize const location = napiSpan(location_ptr, location_len); if (location.len > 0) { - bun.Global.panic("napi: {s}\n {s}", .{ message, location }); + bun.Output.panic("napi: {s}\n {s}", .{ message, location }); } - bun.Global.panic("napi: {s}", .{message}); + bun.Output.panic("napi: {s}", .{message}); } pub export fn napi_create_buffer(env: napi_env, length: usize, data: ?**anyopaque, result: *napi_value) napi_status { log("napi_create_buffer: {d}", .{length}); diff --git a/src/renamer.zig b/src/renamer.zig index 3f3b5725463c21..e23d4aba2ccecc 100644 --- a/src/renamer.zig +++ b/src/renamer.zig @@ -35,7 +35,7 @@ pub const NoOpRenamer = struct { if (renamer.symbols.getConst(resolved)) |symbol| { return symbol.original_name; } else { - Global.panic("Invalid symbol {s} in {s}", .{ ref, renamer.source.path.text }); + Output.panic("Invalid symbol {s} in {s}", .{ ref, renamer.source.path.text }); } } From 56acfa37cedbf04758b8fe2e3c031e05da7d25bc Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Sat, 27 Jul 2024 00:20:50 -0700 Subject: [PATCH 23/46] implement node:util.getSystemErrorName() (#12837) --- docs/runtime/nodejs-apis.md | 2 +- src/bun.js/bindings/NodeError.cpp | 95 ++++++++++++++++++++++ src/bun.js/bindings/NodeError.h | 8 ++ src/bun.js/bindings/bindings.cpp | 18 +++++ src/bun.js/node/node_util_binding.zig | 108 ++++++++++++++++++++++++++ src/darwin_c.zig | 69 ++++++++++++++++ src/js/internal/errors.ts | 4 + src/js/node/util.ts | 12 +++ src/linux_c.zig | 69 ++++++++++++++++ src/windows_c.zig | 69 ++++++++++++++++ test/harness.ts | 38 +++++++++ test/js/node/util/util.test.js | 43 ++++++++++ 12 files changed, 534 insertions(+), 1 deletion(-) create mode 100644 src/bun.js/bindings/NodeError.cpp create mode 100644 src/bun.js/bindings/NodeError.h create mode 100644 src/bun.js/node/node_util_binding.zig create mode 100644 src/js/internal/errors.ts diff --git a/docs/runtime/nodejs-apis.md b/docs/runtime/nodejs-apis.md index 36407cdf6ec565..91a8b61a203d93 100644 --- a/docs/runtime/nodejs-apis.md +++ b/docs/runtime/nodejs-apis.md @@ -153,7 +153,7 @@ Some methods are not optimized yet. ### [`node:util`](https://nodejs.org/api/util.html) -🟡 Missing `MIMEParams` `MIMEType` `aborted` `debug` `getSystemErrorMap` `getSystemErrorName` `transferableAbortController` `transferableAbortSignal` `stripVTControlCharacters` +🟡 Missing `MIMEParams` `MIMEType` `aborted` `debug` `getSystemErrorMap` `transferableAbortController` `transferableAbortSignal` `stripVTControlCharacters` ### [`node:v8`](https://nodejs.org/api/v8.html) diff --git a/src/bun.js/bindings/NodeError.cpp b/src/bun.js/bindings/NodeError.cpp new file mode 100644 index 00000000000000..c29dd343384738 --- /dev/null +++ b/src/bun.js/bindings/NodeError.cpp @@ -0,0 +1,95 @@ +#include "root.h" +#include "headers-handwritten.h" +#include "BunClientData.h" +#include "helpers.h" +#include "JavaScriptCore/JSCJSValue.h" +#include "JavaScriptCore/ErrorInstance.h" +#include "JavaScriptCore/ExceptionScope.h" +#include "wtf/text/ASCIILiteral.h" +#include "wtf/text/MakeString.h" +#include + +JSC::EncodedJSValue JSC__JSValue__createTypeError(const ZigString* message, const ZigString* arg1, JSC::JSGlobalObject* globalObject); +JSC::EncodedJSValue JSC__JSValue__createRangeError(const ZigString* message, const ZigString* arg1, JSC::JSGlobalObject* globalObject); + +namespace Bun { + +using namespace JSC; + +JSC::JSValue createTypeErrorWithCode(JSC::JSGlobalObject* globalObject, String message, ASCIILiteral code) +{ + JSC::VM& vm = globalObject->vm(); + + JSC::JSObject* result = JSC::createTypeError(globalObject, message); + JSC::EnsureStillAliveScope ensureAlive(result); + auto typeError = JSC::JSValue(result).asCell()->getObject(); + + auto clientData = WebCore::clientData(vm); + typeError->putDirect(vm, clientData->builtinNames().codePublicName(), jsString(vm, String(code)), 0); + + return typeError; +} + +JSC::JSValue createRangeErrorWithCode(JSC::JSGlobalObject* globalObject, String message, ASCIILiteral code) +{ + JSC::VM& vm = globalObject->vm(); + + JSC::JSObject* result = JSC::createRangeError(globalObject, message); + JSC::EnsureStillAliveScope ensureAlive(result); + auto typeError = JSC::JSValue(result).asCell()->getObject(); + + auto clientData = WebCore::clientData(vm); + typeError->putDirect(vm, clientData->builtinNames().codePublicName(), jsString(vm, String(code)), 0); + + return typeError; +} + +JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_INVALID_ARG_TYPE, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +{ + JSC::VM& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + + auto argCount = callFrame->argumentCount(); + if (argCount < 3) { + JSC::throwTypeError(globalObject, scope, "requires 3 arguments"_s); + return {}; + } + + auto arg_name = callFrame->argument(0).toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + + auto expected_type = callFrame->argument(1).toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + + auto actual_value = callFrame->argument(2).toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + + auto message = makeString("The \""_s, arg_name, "\" argument must be of type "_s, expected_type, ". Recieved "_s, actual_value); + return JSC::JSValue::encode(createTypeErrorWithCode(globalObject, message, "ERR_INVALID_ARG_TYPE"_s)); +} + +JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_OUT_OF_RANGE, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +{ + JSC::VM& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + + auto argCount = callFrame->argumentCount(); + if (argCount < 3) { + JSC::throwTypeError(globalObject, scope, "requires 3 arguments"_s); + return {}; + } + + auto arg_name = callFrame->argument(0).toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + + auto range = callFrame->argument(1).toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + + auto input = callFrame->argument(2).toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, {}); + + auto message = makeString("The value of \""_s, arg_name, "\" is out of range. It must be "_s, range, ". Received "_s, input); + return JSC::JSValue::encode(createRangeErrorWithCode(globalObject, message, "ERR_OUT_OF_RANGE"_s)); +} + +} diff --git a/src/bun.js/bindings/NodeError.h b/src/bun.js/bindings/NodeError.h new file mode 100644 index 00000000000000..dc959feff0287f --- /dev/null +++ b/src/bun.js/bindings/NodeError.h @@ -0,0 +1,8 @@ +#include "root.h" + +namespace Bun { + +JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_INVALID_ARG_TYPE, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)); +JSC_DEFINE_HOST_FUNCTION(jsFunction_ERR_OUT_OF_RANGE, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)); + +} diff --git a/src/bun.js/bindings/bindings.cpp b/src/bun.js/bindings/bindings.cpp index 61f7f700e86346..d1c58ac9a0d93f 100644 --- a/src/bun.js/bindings/bindings.cpp +++ b/src/bun.js/bindings/bindings.cpp @@ -1570,6 +1570,7 @@ JSC__JSValue WebCore__FetchHeaders__toJS(WebCore__FetchHeaders* headers, JSC__JS return JSC::JSValue::encode(value); } + JSC__JSValue WebCore__FetchHeaders__clone(WebCore__FetchHeaders* headers, JSC__JSGlobalObject* arg1) { auto throwScope = DECLARE_THROW_SCOPE(arg1->vm()); @@ -2132,6 +2133,7 @@ JSC__JSPromise* JSC__JSValue__asPromise(JSC__JSValue JSValue0) JSC::JSValue value = JSC::JSValue::decode(JSValue0); return JSC::jsDynamicCast(value); } + JSC__JSValue JSC__JSValue__createInternalPromise(JSC__JSGlobalObject* globalObject) { JSC::VM& vm = globalObject->vm(); @@ -2192,6 +2194,7 @@ JSC__JSValue JSC__JSPromise__asValue(JSC__JSPromise* arg0, JSC__JSGlobalObject* ASSERT_WITH_MESSAGE(value.inherits(), "JSPromise::asValue() called on a non-promise object"); return JSC::JSValue::encode(value); } + JSC__JSPromise* JSC__JSPromise__create(JSC__JSGlobalObject* arg0) { return JSC::JSPromise::create(arg0->vm(), arg0->promiseStructure()); @@ -2232,6 +2235,7 @@ JSC__JSValue JSC__JSGlobalObject__getCachedObject(JSC__JSGlobalObject* globalObj JSC::JSValue result = globalObject->getIfPropertyExists(globalObject, ident); return JSC::JSValue::encode(result); } + JSC__JSValue JSC__JSGlobalObject__putCachedObject(JSC__JSGlobalObject* globalObject, const ZigString* arg1, JSC__JSValue JSValue2) { JSC::VM& vm = globalObject->vm(); @@ -2473,6 +2477,7 @@ JSC__Exception* JSC__Exception__create(JSC__JSGlobalObject* arg0, JSC__JSObject* ? JSC::Exception::StackCaptureAction::CaptureStack : JSC::Exception::StackCaptureAction::DoNotCaptureStack); } + JSC__JSValue JSC__Exception__value(JSC__Exception* arg0) { return JSC::JSValue::encode(arg0->value()); @@ -2485,17 +2490,20 @@ JSC__JSValue JSC__Exception__value(JSC__Exception* arg0) // JSC__PropertyNameArray__next(JSC__PropertyNameArray* arg0, size_t arg1); // CPP_DECL void JSC__PropertyNameArray__release(JSC__PropertyNameArray* arg0); size_t JSC__JSObject__getArrayLength(JSC__JSObject* arg0) { return arg0->getArrayLength(); } + JSC__JSValue JSC__JSObject__getIndex(JSC__JSValue jsValue, JSC__JSGlobalObject* arg1, uint32_t arg3) { return JSC::JSValue::encode(JSC::JSValue::decode(jsValue).toObject(arg1)->getIndex(arg1, arg3)); } + JSC__JSValue JSC__JSValue__getDirectIndex(JSC__JSValue jsValue, JSC__JSGlobalObject* arg1, uint32_t arg3) { JSC::JSObject* object = JSC::JSValue::decode(jsValue).getObject(); return JSC::JSValue::encode(object->getDirectIndex(arg1, arg3)); } + JSC__JSValue JSC__JSObject__getDirect(JSC__JSObject* arg0, JSC__JSGlobalObject* arg1, const ZigString* arg2) { @@ -2530,6 +2538,7 @@ bool JSC__JSString__eql(const JSC__JSString* arg0, JSC__JSGlobalObject* obj, JSC } bool JSC__JSString__is8Bit(const JSC__JSString* arg0) { return arg0->is8Bit(); }; size_t JSC__JSString__length(const JSC__JSString* arg0) { return arg0->length(); } + JSC__JSObject* JSC__JSString__toObject(JSC__JSString* arg0, JSC__JSGlobalObject* arg1) { return arg0->toObject(arg1); @@ -2551,6 +2560,7 @@ extern "C" JSC::JSInternalPromise* JSModuleLoader__import(JSC::JSGlobalObject* g RETURN_IF_EXCEPTION(scope, nullptr); return promise; } + JSC__JSValue JSC__JSModuleLoader__evaluate(JSC__JSGlobalObject* globalObject, const unsigned char* arg1, size_t arg2, const unsigned char* originUrlPtr, size_t originURLLen, const unsigned char* referrerUrlPtr, size_t referrerUrlLen, JSC__JSValue JSValue5, JSC__JSValue* arg6) @@ -2625,6 +2635,7 @@ JSC__JSValue JSC__JSValue__createRangeError(const ZigString* message, const ZigS return JSC::JSValue::encode(rangeError); } + JSC__JSValue JSC__JSValue__createTypeError(const ZigString* message, const ZigString* arg1, JSC__JSGlobalObject* globalObject) { @@ -3083,6 +3094,7 @@ void JSC__JSPromise__rejectAsHandledException(JSC__JSPromise* arg0, JSC__JSGloba { arg0->rejectAsHandled(arg1, arg2); } + JSC__JSPromise* JSC__JSPromise__rejectedPromise(JSC__JSGlobalObject* arg0, JSC__JSValue JSValue1) { return JSC::JSPromise::rejectedPromise(arg0, JSC::JSValue::decode(JSValue1)); @@ -3148,6 +3160,7 @@ void JSC__JSPromise__rejectOnNextTickWithHandled(JSC__JSPromise* promise, JSC__J RETURN_IF_EXCEPTION(scope, void()); } } + JSC__JSPromise* JSC__JSPromise__resolvedPromise(JSC__JSGlobalObject* globalObject, JSC__JSValue JSValue1) { JSC::VM& vm = globalObject->vm(); @@ -3236,6 +3249,7 @@ void JSC__JSInternalPromise__rejectAsHandledException(JSC__JSInternalPromise* ar { arg0->rejectAsHandled(arg1, arg2); } + JSC__JSInternalPromise* JSC__JSInternalPromise__rejectedPromise(JSC__JSGlobalObject* arg0, JSC__JSValue JSValue1) { @@ -3248,6 +3262,7 @@ void JSC__JSInternalPromise__resolve(JSC__JSInternalPromise* arg0, JSC__JSGlobal { arg0->resolve(arg1, JSC::JSValue::decode(JSValue2)); } + JSC__JSInternalPromise* JSC__JSInternalPromise__resolvedPromise(JSC__JSGlobalObject* arg0, JSC__JSValue JSValue1) { @@ -3493,6 +3508,7 @@ bool JSC__JSValue__isUndefinedOrNull(JSC__JSValue JSValue0) { return JSC::JSValue::decode(JSValue0).isUndefinedOrNull(); } + JSC__JSValue JSC__JSValue__jsBoolean(bool arg0) { return JSC::JSValue::encode(JSC::jsBoolean(arg0)); @@ -3501,6 +3517,7 @@ JSC__JSValue JSC__JSValue__jsDoubleNumber(double arg0) { return JSC::JSValue::encode(JSC::jsNumber(arg0)); } + JSC__JSValue JSC__JSValue__jsEmptyString(JSC__JSGlobalObject* arg0) { return JSC::JSValue::encode(JSC::jsEmptyString(arg0->vm())); @@ -4918,6 +4935,7 @@ JSC__JSValue JSC__JSPromise__rejectedPromiseValue(JSC__JSGlobalObject* globalObj JSC::ensureStillAliveHere(JSC::JSValue::decode(JSValue1)); return JSC::JSValue::encode(promise); } + JSC__JSValue JSC__JSPromise__resolvedPromiseValue(JSC__JSGlobalObject* globalObject, JSC__JSValue JSValue1) { diff --git a/src/bun.js/node/node_util_binding.zig b/src/bun.js/node/node_util_binding.zig new file mode 100644 index 00000000000000..d58bac22db85be --- /dev/null +++ b/src/bun.js/node/node_util_binding.zig @@ -0,0 +1,108 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Environment = bun.Environment; +const JSC = bun.JSC; +const string = bun.string; +const Output = bun.Output; +const ZigString = JSC.ZigString; +const uv = bun.windows.libuv; + +pub fn internalErrorName(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue { + const arguments = callframe.arguments(1).slice(); + if (arguments.len < 1) { + globalThis.throwNotEnoughArguments("internalErrorName", 1, arguments.len); + return .zero; + } + + const err_value = arguments[0]; + const err_int = err_value.toInt32(); + + if (err_int == -4095) return ZigString.static("EOF").toJS(globalThis); + if (err_int == -4094) return ZigString.static("UNKNOWN").toJS(globalThis); + if (err_int == -3000) return ZigString.static("EAI_ADDRFAMILY").toJS(globalThis); + if (err_int == -3001) return ZigString.static("EAI_AGAIN").toJS(globalThis); + if (err_int == -3002) return ZigString.static("EAI_BADFLAGS").toJS(globalThis); + if (err_int == -3003) return ZigString.static("EAI_CANCELED").toJS(globalThis); + if (err_int == -3004) return ZigString.static("EAI_FAIL").toJS(globalThis); + if (err_int == -3005) return ZigString.static("EAI_FAMILY").toJS(globalThis); + if (err_int == -3006) return ZigString.static("EAI_MEMORY").toJS(globalThis); + if (err_int == -3007) return ZigString.static("EAI_NODATA").toJS(globalThis); + if (err_int == -3008) return ZigString.static("EAI_NONAME").toJS(globalThis); + if (err_int == -3009) return ZigString.static("EAI_OVERFLOW").toJS(globalThis); + if (err_int == -3010) return ZigString.static("EAI_SERVICE").toJS(globalThis); + if (err_int == -3011) return ZigString.static("EAI_SOCKTYPE").toJS(globalThis); + if (err_int == -3013) return ZigString.static("EAI_BADHINTS").toJS(globalThis); + if (err_int == -3014) return ZigString.static("EAI_PROTOCOL").toJS(globalThis); + + if (err_int == -bun.C.UV_E2BIG) return ZigString.static("E2BIG").toJS(globalThis); + if (err_int == -bun.C.UV_EACCES) return ZigString.static("EACCES").toJS(globalThis); + if (err_int == -bun.C.UV_EADDRINUSE) return ZigString.static("EADDRINUSE").toJS(globalThis); + if (err_int == -bun.C.UV_EADDRNOTAVAIL) return ZigString.static("EADDRNOTAVAIL").toJS(globalThis); + if (err_int == -bun.C.UV_EAFNOSUPPORT) return ZigString.static("EAFNOSUPPORT").toJS(globalThis); + if (err_int == -bun.C.UV_EAGAIN) return ZigString.static("EAGAIN").toJS(globalThis); + if (err_int == -bun.C.UV_EALREADY) return ZigString.static("EALREADY").toJS(globalThis); + if (err_int == -bun.C.UV_EBADF) return ZigString.static("EBADF").toJS(globalThis); + if (err_int == -bun.C.UV_EBUSY) return ZigString.static("EBUSY").toJS(globalThis); + if (err_int == -bun.C.UV_ECANCELED) return ZigString.static("ECANCELED").toJS(globalThis); + if (err_int == -bun.C.UV_ECHARSET) return ZigString.static("ECHARSET").toJS(globalThis); + if (err_int == -bun.C.UV_ECONNABORTED) return ZigString.static("ECONNABORTED").toJS(globalThis); + if (err_int == -bun.C.UV_ECONNREFUSED) return ZigString.static("ECONNREFUSED").toJS(globalThis); + if (err_int == -bun.C.UV_ECONNRESET) return ZigString.static("ECONNRESET").toJS(globalThis); + if (err_int == -bun.C.UV_EDESTADDRREQ) return ZigString.static("EDESTADDRREQ").toJS(globalThis); + if (err_int == -bun.C.UV_EEXIST) return ZigString.static("EEXIST").toJS(globalThis); + if (err_int == -bun.C.UV_EFAULT) return ZigString.static("EFAULT").toJS(globalThis); + if (err_int == -bun.C.UV_EHOSTUNREACH) return ZigString.static("EHOSTUNREACH").toJS(globalThis); + if (err_int == -bun.C.UV_EINTR) return ZigString.static("EINTR").toJS(globalThis); + if (err_int == -bun.C.UV_EINVAL) return ZigString.static("EINVAL").toJS(globalThis); + if (err_int == -bun.C.UV_EIO) return ZigString.static("EIO").toJS(globalThis); + if (err_int == -bun.C.UV_EISCONN) return ZigString.static("EISCONN").toJS(globalThis); + if (err_int == -bun.C.UV_EISDIR) return ZigString.static("EISDIR").toJS(globalThis); + if (err_int == -bun.C.UV_ELOOP) return ZigString.static("ELOOP").toJS(globalThis); + if (err_int == -bun.C.UV_EMFILE) return ZigString.static("EMFILE").toJS(globalThis); + if (err_int == -bun.C.UV_EMSGSIZE) return ZigString.static("EMSGSIZE").toJS(globalThis); + if (err_int == -bun.C.UV_ENAMETOOLONG) return ZigString.static("ENAMETOOLONG").toJS(globalThis); + if (err_int == -bun.C.UV_ENETDOWN) return ZigString.static("ENETDOWN").toJS(globalThis); + if (err_int == -bun.C.UV_ENETUNREACH) return ZigString.static("ENETUNREACH").toJS(globalThis); + if (err_int == -bun.C.UV_ENFILE) return ZigString.static("ENFILE").toJS(globalThis); + if (err_int == -bun.C.UV_ENOBUFS) return ZigString.static("ENOBUFS").toJS(globalThis); + if (err_int == -bun.C.UV_ENODEV) return ZigString.static("ENODEV").toJS(globalThis); + if (err_int == -bun.C.UV_ENOENT) return ZigString.static("ENOENT").toJS(globalThis); + if (err_int == -bun.C.UV_ENOMEM) return ZigString.static("ENOMEM").toJS(globalThis); + if (err_int == -bun.C.UV_ENONET) return ZigString.static("ENONET").toJS(globalThis); + if (err_int == -bun.C.UV_ENOSPC) return ZigString.static("ENOSPC").toJS(globalThis); + if (err_int == -bun.C.UV_ENOSYS) return ZigString.static("ENOSYS").toJS(globalThis); + if (err_int == -bun.C.UV_ENOTCONN) return ZigString.static("ENOTCONN").toJS(globalThis); + if (err_int == -bun.C.UV_ENOTDIR) return ZigString.static("ENOTDIR").toJS(globalThis); + if (err_int == -bun.C.UV_ENOTEMPTY) return ZigString.static("ENOTEMPTY").toJS(globalThis); + if (err_int == -bun.C.UV_ENOTSOCK) return ZigString.static("ENOTSOCK").toJS(globalThis); + if (err_int == -bun.C.UV_ENOTSUP) return ZigString.static("ENOTSUP").toJS(globalThis); + if (err_int == -bun.C.UV_EPERM) return ZigString.static("EPERM").toJS(globalThis); + if (err_int == -bun.C.UV_EPIPE) return ZigString.static("EPIPE").toJS(globalThis); + if (err_int == -bun.C.UV_EPROTO) return ZigString.static("EPROTO").toJS(globalThis); + if (err_int == -bun.C.UV_EPROTONOSUPPORT) return ZigString.static("EPROTONOSUPPORT").toJS(globalThis); + if (err_int == -bun.C.UV_EPROTOTYPE) return ZigString.static("EPROTOTYPE").toJS(globalThis); + if (err_int == -bun.C.UV_EROFS) return ZigString.static("EROFS").toJS(globalThis); + if (err_int == -bun.C.UV_ESHUTDOWN) return ZigString.static("ESHUTDOWN").toJS(globalThis); + if (err_int == -bun.C.UV_ESPIPE) return ZigString.static("ESPIPE").toJS(globalThis); + if (err_int == -bun.C.UV_ESRCH) return ZigString.static("ESRCH").toJS(globalThis); + if (err_int == -bun.C.UV_ETIMEDOUT) return ZigString.static("ETIMEDOUT").toJS(globalThis); + if (err_int == -bun.C.UV_ETXTBSY) return ZigString.static("ETXTBSY").toJS(globalThis); + if (err_int == -bun.C.UV_EXDEV) return ZigString.static("EXDEV").toJS(globalThis); + if (err_int == -bun.C.UV_EFBIG) return ZigString.static("EFBIG").toJS(globalThis); + if (err_int == -bun.C.UV_ENOPROTOOPT) return ZigString.static("ENOPROTOOPT").toJS(globalThis); + if (err_int == -bun.C.UV_ERANGE) return ZigString.static("ERANGE").toJS(globalThis); + if (err_int == -bun.C.UV_ENXIO) return ZigString.static("ENXIO").toJS(globalThis); + if (err_int == -bun.C.UV_EMLINK) return ZigString.static("EMLINK").toJS(globalThis); + if (err_int == -bun.C.UV_EHOSTDOWN) return ZigString.static("EHOSTDOWN").toJS(globalThis); + if (err_int == -bun.C.UV_EREMOTEIO) return ZigString.static("EREMOTEIO").toJS(globalThis); + if (err_int == -bun.C.UV_ENOTTY) return ZigString.static("ENOTTY").toJS(globalThis); + if (err_int == -bun.C.UV_EFTYPE) return ZigString.static("EFTYPE").toJS(globalThis); + if (err_int == -bun.C.UV_EILSEQ) return ZigString.static("EILSEQ").toJS(globalThis); + if (err_int == -bun.C.UV_EOVERFLOW) return ZigString.static("EOVERFLOW").toJS(globalThis); + if (err_int == -bun.C.UV_ESOCKTNOSUPPORT) return ZigString.static("ESOCKTNOSUPPORT").toJS(globalThis); + if (err_int == -bun.C.UV_ENODATA) return ZigString.static("ENODATA").toJS(globalThis); + if (err_int == -bun.C.UV_EUNATCH) return ZigString.static("EUNATCH").toJS(globalThis); + + const fmtstring = bun.String.createFormat("Unknown system error {d}", .{err_int}) catch bun.outOfMemory(); + return fmtstring.toJS(globalThis); +} diff --git a/src/darwin_c.zig b/src/darwin_c.zig index 075b6c1852bf6f..a5b3a8721ab63f 100644 --- a/src/darwin_c.zig +++ b/src/darwin_c.zig @@ -415,6 +415,75 @@ pub const SystemErrno = enum(u8) { }; }; +pub const UV_E2BIG: i32 = @intFromEnum(SystemErrno.E2BIG); +pub const UV_EACCES: i32 = @intFromEnum(SystemErrno.EACCES); +pub const UV_EADDRINUSE: i32 = @intFromEnum(SystemErrno.EADDRINUSE); +pub const UV_EADDRNOTAVAIL: i32 = @intFromEnum(SystemErrno.EADDRNOTAVAIL); +pub const UV_EAFNOSUPPORT: i32 = @intFromEnum(SystemErrno.EAFNOSUPPORT); +pub const UV_EAGAIN: i32 = @intFromEnum(SystemErrno.EAGAIN); +pub const UV_EALREADY: i32 = @intFromEnum(SystemErrno.EALREADY); +pub const UV_EBADF: i32 = @intFromEnum(SystemErrno.EBADF); +pub const UV_EBUSY: i32 = @intFromEnum(SystemErrno.EBUSY); +pub const UV_ECANCELED: i32 = @intFromEnum(SystemErrno.ECANCELED); +pub const UV_ECHARSET: i32 = -bun.windows.libuv.UV__ECHARSET; +pub const UV_ECONNABORTED: i32 = @intFromEnum(SystemErrno.ECONNABORTED); +pub const UV_ECONNREFUSED: i32 = @intFromEnum(SystemErrno.ECONNREFUSED); +pub const UV_ECONNRESET: i32 = @intFromEnum(SystemErrno.ECONNRESET); +pub const UV_EDESTADDRREQ: i32 = @intFromEnum(SystemErrno.EDESTADDRREQ); +pub const UV_EEXIST: i32 = @intFromEnum(SystemErrno.EEXIST); +pub const UV_EFAULT: i32 = @intFromEnum(SystemErrno.EFAULT); +pub const UV_EHOSTUNREACH: i32 = @intFromEnum(SystemErrno.EHOSTUNREACH); +pub const UV_EINTR: i32 = @intFromEnum(SystemErrno.EINTR); +pub const UV_EINVAL: i32 = @intFromEnum(SystemErrno.EINVAL); +pub const UV_EIO: i32 = @intFromEnum(SystemErrno.EIO); +pub const UV_EISCONN: i32 = @intFromEnum(SystemErrno.EISCONN); +pub const UV_EISDIR: i32 = @intFromEnum(SystemErrno.EISDIR); +pub const UV_ELOOP: i32 = @intFromEnum(SystemErrno.ELOOP); +pub const UV_EMFILE: i32 = @intFromEnum(SystemErrno.EMFILE); +pub const UV_EMSGSIZE: i32 = @intFromEnum(SystemErrno.EMSGSIZE); +pub const UV_ENAMETOOLONG: i32 = @intFromEnum(SystemErrno.ENAMETOOLONG); +pub const UV_ENETDOWN: i32 = @intFromEnum(SystemErrno.ENETDOWN); +pub const UV_ENETUNREACH: i32 = @intFromEnum(SystemErrno.ENETUNREACH); +pub const UV_ENFILE: i32 = @intFromEnum(SystemErrno.ENFILE); +pub const UV_ENOBUFS: i32 = @intFromEnum(SystemErrno.ENOBUFS); +pub const UV_ENODEV: i32 = @intFromEnum(SystemErrno.ENODEV); +pub const UV_ENOENT: i32 = @intFromEnum(SystemErrno.ENOENT); +pub const UV_ENOMEM: i32 = @intFromEnum(SystemErrno.ENOMEM); +pub const UV_ENONET: i32 = -bun.windows.libuv.UV_ENONET; +pub const UV_ENOSPC: i32 = @intFromEnum(SystemErrno.ENOSPC); +pub const UV_ENOSYS: i32 = @intFromEnum(SystemErrno.ENOSYS); +pub const UV_ENOTCONN: i32 = @intFromEnum(SystemErrno.ENOTCONN); +pub const UV_ENOTDIR: i32 = @intFromEnum(SystemErrno.ENOTDIR); +pub const UV_ENOTEMPTY: i32 = @intFromEnum(SystemErrno.ENOTEMPTY); +pub const UV_ENOTSOCK: i32 = @intFromEnum(SystemErrno.ENOTSOCK); +pub const UV_ENOTSUP: i32 = @intFromEnum(SystemErrno.ENOTSUP); +pub const UV_EPERM: i32 = @intFromEnum(SystemErrno.EPERM); +pub const UV_EPIPE: i32 = @intFromEnum(SystemErrno.EPIPE); +pub const UV_EPROTO: i32 = @intFromEnum(SystemErrno.EPROTO); +pub const UV_EPROTONOSUPPORT: i32 = @intFromEnum(SystemErrno.EPROTONOSUPPORT); +pub const UV_EPROTOTYPE: i32 = @intFromEnum(SystemErrno.EPROTOTYPE); +pub const UV_EROFS: i32 = @intFromEnum(SystemErrno.EROFS); +pub const UV_ESHUTDOWN: i32 = @intFromEnum(SystemErrno.ESHUTDOWN); +pub const UV_ESPIPE: i32 = @intFromEnum(SystemErrno.ESPIPE); +pub const UV_ESRCH: i32 = @intFromEnum(SystemErrno.ESRCH); +pub const UV_ETIMEDOUT: i32 = @intFromEnum(SystemErrno.ETIMEDOUT); +pub const UV_ETXTBSY: i32 = @intFromEnum(SystemErrno.ETXTBSY); +pub const UV_EXDEV: i32 = @intFromEnum(SystemErrno.EXDEV); +pub const UV_EFBIG: i32 = @intFromEnum(SystemErrno.EFBIG); +pub const UV_ENOPROTOOPT: i32 = @intFromEnum(SystemErrno.ENOPROTOOPT); +pub const UV_ERANGE: i32 = @intFromEnum(SystemErrno.ERANGE); +pub const UV_ENXIO: i32 = @intFromEnum(SystemErrno.ENXIO); +pub const UV_EMLINK: i32 = @intFromEnum(SystemErrno.EMLINK); +pub const UV_EHOSTDOWN: i32 = @intFromEnum(SystemErrno.EHOSTDOWN); +pub const UV_EREMOTEIO: i32 = -bun.windows.libuv.UV_EREMOTEIO; +pub const UV_ENOTTY: i32 = @intFromEnum(SystemErrno.ENOTTY); +pub const UV_EFTYPE: i32 = @intFromEnum(SystemErrno.EFTYPE); +pub const UV_EILSEQ: i32 = @intFromEnum(SystemErrno.EILSEQ); +pub const UV_EOVERFLOW: i32 = @intFromEnum(SystemErrno.EOVERFLOW); +pub const UV_ESOCKTNOSUPPORT: i32 = @intFromEnum(SystemErrno.ESOCKTNOSUPPORT); +pub const UV_ENODATA: i32 = @intFromEnum(SystemErrno.ENODATA); +pub const UV_EUNATCH: i32 = -bun.windows.libuv.UV_EUNATCH; + // Courtesy of https://github.com/nodejs/node/blob/master/deps/uv/src/unix/darwin-stub.h pub const struct_CFArrayCallBacks = opaque {}; pub const CFIndex = c_long; diff --git a/src/js/internal/errors.ts b/src/js/internal/errors.ts new file mode 100644 index 00000000000000..034f33b4575e30 --- /dev/null +++ b/src/js/internal/errors.ts @@ -0,0 +1,4 @@ +export default { + ERR_INVALID_ARG_TYPE: $newCppFunction("NodeError.cpp", "jsFunction_ERR_INVALID_ARG_TYPE", 3), + ERR_OUT_OF_RANGE: $newCppFunction("NodeError.cpp", "jsFunction_ERR_OUT_OF_RANGE", 3), +}; diff --git a/src/js/node/util.ts b/src/js/node/util.ts index c237a1915fd970..eb7db1298542b9 100644 --- a/src/js/node/util.ts +++ b/src/js/node/util.ts @@ -2,6 +2,11 @@ const types = require("node:util/types"); /** @type {import('node-inspect-extracted')} */ const utl = require("internal/util/inspect"); +const { ERR_INVALID_ARG_TYPE, ERR_OUT_OF_RANGE } = require("internal/errors"); + +const internalErrorName = $newZigFunction("node_util_binding.zig", "internalErrorName", 1); + +const NumberIsSafeInteger = Number.isSafeInteger; var cjs_exports = {}; @@ -280,6 +285,12 @@ function styleText(format, text) { return `\u001b[${formatCodes[0]}m${text}\u001b[${formatCodes[1]}m`; } +function getSystemErrorName(err: any) { + if (typeof err !== "number") throw ERR_INVALID_ARG_TYPE("err", "number", err); + if (err >= 0 || !NumberIsSafeInteger(err)) throw ERR_OUT_OF_RANGE("err", "a negative integer", err); + return internalErrorName(err); +} + export default Object.assign(cjs_exports, { format, formatWithOptions, @@ -315,4 +326,5 @@ export default Object.assign(cjs_exports, { TextEncoder, parseArgs, styleText, + getSystemErrorName, }); diff --git a/src/linux_c.zig b/src/linux_c.zig index 5ba1b39b75bf8e..46e1b080a04b18 100644 --- a/src/linux_c.zig +++ b/src/linux_c.zig @@ -293,6 +293,75 @@ pub const SystemErrno = enum(u8) { }; }; +pub const UV_E2BIG: i32 = @intFromEnum(SystemErrno.E2BIG); +pub const UV_EACCES: i32 = @intFromEnum(SystemErrno.EACCES); +pub const UV_EADDRINUSE: i32 = @intFromEnum(SystemErrno.EADDRINUSE); +pub const UV_EADDRNOTAVAIL: i32 = @intFromEnum(SystemErrno.EADDRNOTAVAIL); +pub const UV_EAFNOSUPPORT: i32 = @intFromEnum(SystemErrno.EAFNOSUPPORT); +pub const UV_EAGAIN: i32 = @intFromEnum(SystemErrno.EAGAIN); +pub const UV_EALREADY: i32 = @intFromEnum(SystemErrno.EALREADY); +pub const UV_EBADF: i32 = @intFromEnum(SystemErrno.EBADF); +pub const UV_EBUSY: i32 = @intFromEnum(SystemErrno.EBUSY); +pub const UV_ECANCELED: i32 = @intFromEnum(SystemErrno.ECANCELED); +pub const UV_ECHARSET: i32 = -bun.windows.libuv.UV_ECHARSET; +pub const UV_ECONNABORTED: i32 = @intFromEnum(SystemErrno.ECONNABORTED); +pub const UV_ECONNREFUSED: i32 = @intFromEnum(SystemErrno.ECONNREFUSED); +pub const UV_ECONNRESET: i32 = @intFromEnum(SystemErrno.ECONNRESET); +pub const UV_EDESTADDRREQ: i32 = @intFromEnum(SystemErrno.EDESTADDRREQ); +pub const UV_EEXIST: i32 = @intFromEnum(SystemErrno.EEXIST); +pub const UV_EFAULT: i32 = @intFromEnum(SystemErrno.EFAULT); +pub const UV_EHOSTUNREACH: i32 = @intFromEnum(SystemErrno.EHOSTUNREACH); +pub const UV_EINTR: i32 = @intFromEnum(SystemErrno.EINTR); +pub const UV_EINVAL: i32 = @intFromEnum(SystemErrno.EINVAL); +pub const UV_EIO: i32 = @intFromEnum(SystemErrno.EIO); +pub const UV_EISCONN: i32 = @intFromEnum(SystemErrno.EISCONN); +pub const UV_EISDIR: i32 = @intFromEnum(SystemErrno.EISDIR); +pub const UV_ELOOP: i32 = @intFromEnum(SystemErrno.ELOOP); +pub const UV_EMFILE: i32 = @intFromEnum(SystemErrno.EMFILE); +pub const UV_EMSGSIZE: i32 = @intFromEnum(SystemErrno.EMSGSIZE); +pub const UV_ENAMETOOLONG: i32 = @intFromEnum(SystemErrno.ENAMETOOLONG); +pub const UV_ENETDOWN: i32 = @intFromEnum(SystemErrno.ENETDOWN); +pub const UV_ENETUNREACH: i32 = @intFromEnum(SystemErrno.ENETUNREACH); +pub const UV_ENFILE: i32 = @intFromEnum(SystemErrno.ENFILE); +pub const UV_ENOBUFS: i32 = @intFromEnum(SystemErrno.ENOBUFS); +pub const UV_ENODEV: i32 = @intFromEnum(SystemErrno.ENODEV); +pub const UV_ENOENT: i32 = @intFromEnum(SystemErrno.ENOENT); +pub const UV_ENOMEM: i32 = @intFromEnum(SystemErrno.ENOMEM); +pub const UV_ENONET: i32 = @intFromEnum(SystemErrno.ENONET); +pub const UV_ENOSPC: i32 = @intFromEnum(SystemErrno.ENOSPC); +pub const UV_ENOSYS: i32 = @intFromEnum(SystemErrno.ENOSYS); +pub const UV_ENOTCONN: i32 = @intFromEnum(SystemErrno.ENOTCONN); +pub const UV_ENOTDIR: i32 = @intFromEnum(SystemErrno.ENOTDIR); +pub const UV_ENOTEMPTY: i32 = @intFromEnum(SystemErrno.ENOTEMPTY); +pub const UV_ENOTSOCK: i32 = @intFromEnum(SystemErrno.ENOTSOCK); +pub const UV_ENOTSUP: i32 = @intFromEnum(SystemErrno.ENOTSUP); +pub const UV_EPERM: i32 = @intFromEnum(SystemErrno.EPERM); +pub const UV_EPIPE: i32 = @intFromEnum(SystemErrno.EPIPE); +pub const UV_EPROTO: i32 = @intFromEnum(SystemErrno.EPROTO); +pub const UV_EPROTONOSUPPORT: i32 = @intFromEnum(SystemErrno.EPROTONOSUPPORT); +pub const UV_EPROTOTYPE: i32 = @intFromEnum(SystemErrno.EPROTOTYPE); +pub const UV_EROFS: i32 = @intFromEnum(SystemErrno.EROFS); +pub const UV_ESHUTDOWN: i32 = @intFromEnum(SystemErrno.ESHUTDOWN); +pub const UV_ESPIPE: i32 = @intFromEnum(SystemErrno.ESPIPE); +pub const UV_ESRCH: i32 = @intFromEnum(SystemErrno.ESRCH); +pub const UV_ETIMEDOUT: i32 = @intFromEnum(SystemErrno.ETIMEDOUT); +pub const UV_ETXTBSY: i32 = @intFromEnum(SystemErrno.ETXTBSY); +pub const UV_EXDEV: i32 = @intFromEnum(SystemErrno.EXDEV); +pub const UV_EFBIG: i32 = @intFromEnum(SystemErrno.EFBIG); +pub const UV_ENOPROTOOPT: i32 = @intFromEnum(SystemErrno.ENOPROTOOPT); +pub const UV_ERANGE: i32 = @intFromEnum(SystemErrno.ERANGE); +pub const UV_ENXIO: i32 = @intFromEnum(SystemErrno.ENXIO); +pub const UV_EMLINK: i32 = @intFromEnum(SystemErrno.EMLINK); +pub const UV_EHOSTDOWN: i32 = @intFromEnum(SystemErrno.EHOSTDOWN); +pub const UV_EREMOTEIO: i32 = @intFromEnum(SystemErrno.EREMOTEIO); +pub const UV_ENOTTY: i32 = @intFromEnum(SystemErrno.ENOTTY); +pub const UV_EFTYPE: i32 = -bun.windows.libuv.UV_EFTYPE; +pub const UV_EILSEQ: i32 = @intFromEnum(SystemErrno.EILSEQ); +pub const UV_EOVERFLOW: i32 = @intFromEnum(SystemErrno.EOVERFLOW); +pub const UV_ESOCKTNOSUPPORT: i32 = @intFromEnum(SystemErrno.ESOCKTNOSUPPORT); +pub const UV_ENODATA: i32 = @intFromEnum(SystemErrno.ENODATA); +pub const UV_EUNATCH: i32 = @intFromEnum(SystemErrno.EUNATCH); + pub const preallocate_length = 2048 * 1024; pub fn preallocate_file(fd: std.posix.fd_t, offset: std.posix.off_t, len: std.posix.off_t) anyerror!void { // https://gist.github.com/Jarred-Sumner/b37b93399b63cbfd86e908c59a0a37df diff --git a/src/windows_c.zig b/src/windows_c.zig index d465ac0ecf1559..db6a06b8a3efa4 100644 --- a/src/windows_c.zig +++ b/src/windows_c.zig @@ -958,6 +958,75 @@ pub const SystemErrno = enum(u16) { }; }; +pub const UV_E2BIG = -uv.UV_E2BIG; +pub const UV_EACCES = -uv.UV_EACCES; +pub const UV_EADDRINUSE = -uv.UV_EADDRINUSE; +pub const UV_EADDRNOTAVAIL = -uv.UV_EADDRNOTAVAIL; +pub const UV_EAFNOSUPPORT = -uv.UV_EAFNOSUPPORT; +pub const UV_EAGAIN = -uv.UV_EAGAIN; +pub const UV_EALREADY = -uv.UV_EALREADY; +pub const UV_EBADF = -uv.UV_EBADF; +pub const UV_EBUSY = -uv.UV_EBUSY; +pub const UV_ECANCELED = -uv.UV_ECANCELED; +pub const UV_ECHARSET = -uv.UV_ECHARSET; +pub const UV_ECONNABORTED = -uv.UV_ECONNABORTED; +pub const UV_ECONNREFUSED = -uv.UV_ECONNREFUSED; +pub const UV_ECONNRESET = -uv.UV_ECONNRESET; +pub const UV_EDESTADDRREQ = -uv.UV_EDESTADDRREQ; +pub const UV_EEXIST = -uv.UV_EEXIST; +pub const UV_EFAULT = -uv.UV_EFAULT; +pub const UV_EHOSTUNREACH = -uv.UV_EHOSTUNREACH; +pub const UV_EINTR = -uv.UV_EINTR; +pub const UV_EINVAL = -uv.UV_EINVAL; +pub const UV_EIO = -uv.UV_EIO; +pub const UV_EISCONN = -uv.UV_EISCONN; +pub const UV_EISDIR = -uv.UV_EISDIR; +pub const UV_ELOOP = -uv.UV_ELOOP; +pub const UV_EMFILE = -uv.UV_EMFILE; +pub const UV_EMSGSIZE = -uv.UV_EMSGSIZE; +pub const UV_ENAMETOOLONG = -uv.UV_ENAMETOOLONG; +pub const UV_ENETDOWN = -uv.UV_ENETDOWN; +pub const UV_ENETUNREACH = -uv.UV_ENETUNREACH; +pub const UV_ENFILE = -uv.UV_ENFILE; +pub const UV_ENOBUFS = -uv.UV_ENOBUFS; +pub const UV_ENODEV = -uv.UV_ENODEV; +pub const UV_ENOENT = -uv.UV_ENOENT; +pub const UV_ENOMEM = -uv.UV_ENOMEM; +pub const UV_ENONET = -uv.UV_ENONET; +pub const UV_ENOSPC = -uv.UV_ENOSPC; +pub const UV_ENOSYS = -uv.UV_ENOSYS; +pub const UV_ENOTCONN = -uv.UV_ENOTCONN; +pub const UV_ENOTDIR = -uv.UV_ENOTDIR; +pub const UV_ENOTEMPTY = -uv.UV_ENOTEMPTY; +pub const UV_ENOTSOCK = -uv.UV_ENOTSOCK; +pub const UV_ENOTSUP = -uv.UV_ENOTSUP; +pub const UV_EPERM = -uv.UV_EPERM; +pub const UV_EPIPE = -uv.UV_EPIPE; +pub const UV_EPROTO = -uv.UV_EPROTO; +pub const UV_EPROTONOSUPPORT = -uv.UV_EPROTONOSUPPORT; +pub const UV_EPROTOTYPE = -uv.UV_EPROTOTYPE; +pub const UV_EROFS = -uv.UV_EROFS; +pub const UV_ESHUTDOWN = -uv.UV_ESHUTDOWN; +pub const UV_ESPIPE = -uv.UV_ESPIPE; +pub const UV_ESRCH = -uv.UV_ESRCH; +pub const UV_ETIMEDOUT = -uv.UV_ETIMEDOUT; +pub const UV_ETXTBSY = -uv.UV_ETXTBSY; +pub const UV_EXDEV = -uv.UV_EXDEV; +pub const UV_EFBIG = -uv.UV_EFBIG; +pub const UV_ENOPROTOOPT = -uv.UV_ENOPROTOOPT; +pub const UV_ERANGE = -uv.UV_ERANGE; +pub const UV_ENXIO = -uv.UV_ENXIO; +pub const UV_EMLINK = -uv.UV_EMLINK; +pub const UV_EHOSTDOWN = -uv.UV_EHOSTDOWN; +pub const UV_EREMOTEIO = -uv.UV_EREMOTEIO; +pub const UV_ENOTTY = -uv.UV_ENOTTY; +pub const UV_EFTYPE = -uv.UV_EFTYPE; +pub const UV_EILSEQ = -uv.UV_EILSEQ; +pub const UV_EOVERFLOW = -uv.UV_EOVERFLOW; +pub const UV_ESOCKTNOSUPPORT = -uv.UV_ESOCKTNOSUPPORT; +pub const UV_ENODATA = -uv.UV_ENODATA; +pub const UV_EUNATCH = -uv.UV_EUNATCH; + pub const off_t = i64; pub fn preallocate_file(_: posix.fd_t, _: off_t, _: off_t) !void {} diff --git a/test/harness.ts b/test/harness.ts index 704d41f1f9e92e..6710381e756ead 100644 --- a/test/harness.ts +++ b/test/harness.ts @@ -386,6 +386,43 @@ expect.extend({ message: () => `Expected ${cmds.join(" ")} to fail`, }; }, + toThrowWithCode(fn: CallableFunction, cls: CallableFunction, code: string) { + try { + fn(); + return { + pass: false, + message: () => `Received function did not throw`, + }; + } catch (e) { + // expect(e).toBeInstanceOf(cls); + if (!(e instanceof cls)) { + return { + pass: false, + message: () => `Expected error to be instanceof ${cls.name}; got ${e.__proto__.constructor.name}`, + }; + } + + // expect(e).toHaveProperty("code"); + if (!("code" in e)) { + return { + pass: false, + message: () => `Expected error to have property 'code'; got ${e}`, + }; + } + + // expect(e.code).toEqual(code); + if (e.code !== code) { + return { + pass: false, + message: () => `Expected error to have code '${code}'; got ${e.code}`, + }; + } + + return { + pass: true, + }; + } + }, }); export function ospath(path: string) { @@ -1033,6 +1070,7 @@ interface BunHarnessTestMatchers { toHaveTestTimedOutAfter(expected: number): void; toBeBinaryType(expected: keyof typeof binaryTypes): void; toRun(optionalStdout?: string, expectedCode?: number): void; + toThrowWithCode(cls: CallableFunction, code: string): void; } declare module "bun:test" { diff --git a/test/js/node/util/util.test.js b/test/js/node/util/util.test.js index 4c1c51511e08c4..a1f63ca8010128 100644 --- a/test/js/node/util/util.test.js +++ b/test/js/node/util/util.test.js @@ -24,6 +24,7 @@ import { expect, describe, it } from "bun:test"; import util from "util"; import assert from "assert"; +import "harness"; // const context = require('vm').runInNewContext; // TODO: Use a vm polyfill const strictEqual = (...args) => { @@ -357,4 +358,46 @@ describe("util", () => { assert.strictEqual(util.styleText("red", "test"), "\u001b[31mtest\u001b[39m"); }); + + describe("getSystemErrorName", () => { + for (const item of ["test", {}, []]) { + it(`throws when passing: ${item}`, () => { + expect(() => util.getSystemErrorName(item)).toThrowWithCode(TypeError, "ERR_INVALID_ARG_TYPE"); + }); + } + + for (const item of [0, 1, Infinity, -Infinity, NaN]) { + it(`throws when passing: ${item}`, () => { + expect(() => util.getSystemErrorName(item)).toThrowWithCode(RangeError, "ERR_OUT_OF_RANGE"); + }); + } + + const proc = Bun.spawnSync({ + cmd: [ + "node", + "-e", + "console.log(JSON.stringify([...require('node:util').getSystemErrorMap().entries()].map((v) => [v[0], v[1][0]])));", + ], + stdio: ["ignore", "pipe", "pipe"], + }); + for (const [code, name] of JSON.parse(proc.stdout.toString())) { + it(`getSystemErrorName(${code}) should be ${name}`, () => { + expect(util.getSystemErrorName(code)).toBe(name); + }); + } + + it("getSystemErrorName(-4096) should be unknown", () => { + expect(util.getSystemErrorName(-4096)).toBe("Unknown system error -4096"); + }); + + // these are the windows/fallback codes and they should match node in either returning the correct name or 'Unknown system error'. + // eg on linux getSystemErrorName(-4034) should return unkown and not 'ERANGE' since errno defines it as -34 for that platform. + for (let i = -4095; i <= -4023; i++) { + it(`negative space: getSystemErrorName(${i}) is correct`, () => { + const cmd = ["node", "-e", `console.log(JSON.stringify(util.getSystemErrorName(${i})));`]; + const stdio = ["ignore", "pipe", "pipe"]; + expect(util.getSystemErrorName(i)).toEqual(JSON.parse(Bun.spawnSync({ cmd, stdio }).stdout.toString())); + }); + } + }); }); From 1d6dd3bc7a4b385b1a24bdaf96d5cefddc8707eb Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 27 Jul 2024 01:02:46 -0700 Subject: [PATCH 24/46] Slightly better error.stack (#12861) --- src/bun.js/bindings/CallSite.cpp | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/src/bun.js/bindings/CallSite.cpp b/src/bun.js/bindings/CallSite.cpp index 85a41a17be3ecc..f1a51812655a58 100644 --- a/src/bun.js/bindings/CallSite.cpp +++ b/src/bun.js/bindings/CallSite.cpp @@ -126,11 +126,21 @@ void CallSite::formatAsString(JSC::VM& vm, JSC::JSGlobalObject* globalObject, WT if (isNative()) { sb.append("native"_s); } else { - sb.append(mySourceURL->getString(globalObject)); - sb.append(":"_s); - sb.append(myLineNumber->getString(globalObject)); - sb.append(":"_s); - sb.append(myColumnNumber->getString(globalObject)); + if (mySourceURL->length() == 0) { + sb.append("unknown"_s); + } else { + sb.append(mySourceURL->getString(globalObject)); + } + + if (myLineNumber->length() > 0 && myColumnNumber->length() > 0) { + sb.append(":"_s); + sb.append(myLineNumber->getString(globalObject)); + sb.append(":"_s); + sb.append(myColumnNumber->getString(globalObject)); + } else if (myLineNumber->length() > 0) { + sb.append(":"_s); + sb.append(myLineNumber->getString(globalObject)); + } } sb.append(")"_s); } From ee8f939e9d58c17e7b1d45d889e284e39dc04f98 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 27 Jul 2024 02:02:48 -0700 Subject: [PATCH 25/46] Bump versions of things --- CMakeLists.txt | 2 +- LATEST | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 397847b9b07b77..fd42fe427c2728 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -3,7 +3,7 @@ cmake_policy(SET CMP0091 NEW) cmake_policy(SET CMP0067 NEW) set(CMAKE_POLICY_DEFAULT_CMP0069 NEW) -set(Bun_VERSION "1.1.21") +set(Bun_VERSION "1.1.22") set(WEBKIT_TAG f9a0fda2d2b2fd001a00bfcf8e7917a56b382516) set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}") diff --git a/LATEST b/LATEST index 0ee9c5d6a351ae..a2a8e42b09e184 100644 --- a/LATEST +++ b/LATEST @@ -1 +1 @@ -1.1.20 \ No newline at end of file +1.1.21 \ No newline at end of file From 2957aac2169307fa545578159495dbb02f72746f Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 28 Jul 2024 05:13:50 -0700 Subject: [PATCH 26/46] In debug builds on macOS, add malloc_zone_check when GC runs --- src/bun.js/bindings/bindings.cpp | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/src/bun.js/bindings/bindings.cpp b/src/bun.js/bindings/bindings.cpp index d1c58ac9a0d93f..def0956044bed4 100644 --- a/src/bun.js/bindings/bindings.cpp +++ b/src/bun.js/bindings/bindings.cpp @@ -116,6 +116,13 @@ #include "ErrorStackFrame.h" +#if OS(DARWIN) +#if BUN_DEBUG +#include +#define IS_MALLOC_DEBUGGING_ENABLED 1 +#endif +#endif + static WTF::StringView StringView_slice(WTF::StringView sv, unsigned start, unsigned end) { return sv.substring(start, end - start); @@ -4811,6 +4818,12 @@ JSC__JSValue JSC__VM__runGC(JSC__VM* vm, bool sync) { JSC::JSLockHolder lock(vm); +#if IS_MALLOC_DEBUGGING_ENABLED && OS(DARWIN) + if (!malloc_zone_check(nullptr)) { + BUN_PANIC("Heap corruption detected!!"); + } +#endif + vm->finalizeSynchronousJSExecution(); WTF::releaseFastMallocFreeMemory(); @@ -4825,6 +4838,12 @@ JSC__JSValue JSC__VM__runGC(JSC__VM* vm, bool sync) vm->finalizeSynchronousJSExecution(); +#if IS_MALLOC_DEBUGGING_ENABLED && OS(DARWIN) + if (!malloc_zone_check(nullptr)) { + BUN_PANIC("Heap corruption detected after GC!!"); + } +#endif + return JSC::JSValue::encode(JSC::jsNumber(vm->heap.sizeAfterLastFullCollection())); } From af581f257aa547f7f9bc7ff39817cb86f4a98dd9 Mon Sep 17 00:00:00 2001 From: Dylan Conway <35280289+dylan-conway@users.noreply.github.com> Date: Sun, 28 Jul 2024 07:46:53 -0700 Subject: [PATCH 27/46] textencoder: remove DOMJIT (#12868) --- src/bun.js/bindings/webcore/JSTextEncoder.cpp | 167 +++++++++--------- 1 file changed, 87 insertions(+), 80 deletions(-) diff --git a/src/bun.js/bindings/webcore/JSTextEncoder.cpp b/src/bun.js/bindings/webcore/JSTextEncoder.cpp index e617345166a245..0cee5a0c67b995 100644 --- a/src/bun.js/bindings/webcore/JSTextEncoder.cpp +++ b/src/bun.js/bindings/webcore/JSTextEncoder.cpp @@ -73,10 +73,10 @@ extern "C" size_t TextEncoder__encodeInto8(const LChar* stringPtr, size_t string extern "C" size_t TextEncoder__encodeInto16(const UChar* stringPtr, size_t stringLen, void* ptr, size_t len); extern "C" JSC::EncodedJSValue TextEncoder__encodeRopeString(JSC::JSGlobalObject* lexicalGlobalObject, JSC::JSString* str); -extern "C" { -static JSC_DECLARE_JIT_OPERATION_WITHOUT_WTF_INTERNAL(jsTextEncoderEncodeWithoutTypeCheck, JSC::EncodedJSValue, (JSC::JSGlobalObject*, JSTextEncoder*, DOMJIT::IDLJSArgumentType)); -static JSC_DECLARE_JIT_OPERATION_WITHOUT_WTF_INTERNAL(jsTextEncoderPrototypeFunction_encodeIntoWithoutTypeCheck, JSC::EncodedJSValue, (JSC::JSGlobalObject * lexicalGlobalObject, JSTextEncoder* castedThis, DOMJIT::IDLJSArgumentType source, DOMJIT::IDLJSArgumentType destination)); -} +// extern "C" { +// static JSC_DECLARE_JIT_OPERATION_WITHOUT_WTF_INTERNAL(jsTextEncoderEncodeWithoutTypeCheck, JSC::EncodedJSValue, (JSC::JSGlobalObject*, JSTextEncoder*, DOMJIT::IDLJSArgumentType)); +// static JSC_DECLARE_JIT_OPERATION_WITHOUT_WTF_INTERNAL(jsTextEncoderPrototypeFunction_encodeIntoWithoutTypeCheck, JSC::EncodedJSValue, (JSC::JSGlobalObject * lexicalGlobalObject, JSTextEncoder* castedThis, DOMJIT::IDLJSArgumentType source, DOMJIT::IDLJSArgumentType destination)); +// } template<> TextEncoder::EncodeIntoResult convertDictionary(JSGlobalObject& lexicalGlobalObject, JSValue value) { @@ -214,90 +214,97 @@ template<> void JSTextEncoderDOMConstructor::initializeProperties(VM& vm, JSDOMG putDirect(vm, vm.propertyNames->prototype, JSTextEncoder::prototype(vm, globalObject), JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete); } -static const JSC::DOMJIT::Signature DOMJITSignatureForJSTextEncoderEncodeWithoutTypeCheck( - jsTextEncoderEncodeWithoutTypeCheck, - JSTextEncoder::info(), - // https://github.com/oven-sh/bun/issues/9226 - // It's not totally clear what the correct side effects are for this function, so we just make it conservative for now. - JSC::DOMJIT::Effect {}, - DOMJIT::IDLResultTypeFilter::value, - DOMJIT::IDLArgumentTypeFilter::value); - -static const JSC::DOMJIT::Signature DOMJITSignatureForJSTextEncoderEncodeIntoWithoutTypeCheck( - jsTextEncoderPrototypeFunction_encodeIntoWithoutTypeCheck, - JSTextEncoder::info(), - - JSC::DOMJIT::Effect {}, - // JSC::DOMJIT::Effect::forReadWriteKinds(encodeIntoRead, encodeIntoWrite), - DOMJIT::IDLResultTypeFilter::value, - DOMJIT::IDLArgumentTypeFilter::value, - DOMJIT::IDLArgumentTypeFilter::value); +// static const JSC::DOMJIT::Signature DOMJITSignatureForJSTextEncoderEncodeWithoutTypeCheck( +// jsTextEncoderEncodeWithoutTypeCheck, +// JSTextEncoder::info(), +// // https://github.com/oven-sh/bun/issues/9226 +// // It's not totally clear what the correct side effects are for this function, so we just make it conservative for now. +// JSC::DOMJIT::Effect {}, +// DOMJIT::IDLResultTypeFilter::value, +// DOMJIT::IDLArgumentTypeFilter::value); + +// static const JSC::DOMJIT::Signature DOMJITSignatureForJSTextEncoderEncodeIntoWithoutTypeCheck( +// jsTextEncoderPrototypeFunction_encodeIntoWithoutTypeCheck, +// JSTextEncoder::info(), + +// JSC::DOMJIT::Effect {}, +// // JSC::DOMJIT::Effect::forReadWriteKinds(encodeIntoRead, encodeIntoWrite), +// DOMJIT::IDLResultTypeFilter::value, +// DOMJIT::IDLArgumentTypeFilter::value, +// DOMJIT::IDLArgumentTypeFilter::value); /* Hash table for prototype */ static const HashTableValue JSTextEncoderPrototypeTableValues[] = { { "constructor"_s, static_cast(JSC::PropertyAttribute::DontEnum), NoIntrinsic, { HashTableValue::GetterSetterType, jsTextEncoderConstructor, 0 } }, { "encoding"_s, static_cast(JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::CustomAccessor | JSC::PropertyAttribute::DOMAttribute), NoIntrinsic, { HashTableValue::GetterSetterType, jsTextEncoder_encoding, 0 } }, - { "encode"_s, static_cast(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::DOMJITFunction), NoIntrinsic, { HashTableValue::DOMJITFunctionType, jsTextEncoderPrototypeFunction_encode, &DOMJITSignatureForJSTextEncoderEncodeWithoutTypeCheck } }, - { "encodeInto"_s, static_cast(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::DOMJITFunction), NoIntrinsic, { HashTableValue::DOMJITFunctionType, jsTextEncoderPrototypeFunction_encodeInto, &DOMJITSignatureForJSTextEncoderEncodeIntoWithoutTypeCheck } }, + // TODO: bring these back after fix issue with globalObject pointer argument in `encodeInto` + // REPRO: + // 1. bun create docusaurus + // 2. bun ./node_modules/.bin/docusaurus build --no-minify + // https://github.com/oven-sh/bun/issues/12335 + // { "encode"_s, static_cast(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::DOMJITFunction), NoIntrinsic, { HashTableValue::DOMJITFunctionType, jsTextEncoderPrototypeFunction_encode, &DOMJITSignatureForJSTextEncoderEncodeWithoutTypeCheck } }, + // { "encodeInto"_s, static_cast(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::DOMJITFunction), NoIntrinsic, { HashTableValue::DOMJITFunctionType, jsTextEncoderPrototypeFunction_encodeInto, &DOMJITSignatureForJSTextEncoderEncodeIntoWithoutTypeCheck } }, + { "encode"_s, static_cast(JSC::PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsTextEncoderPrototypeFunction_encode, 1 } }, + { "encodeInto"_s, static_cast(JSC::PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsTextEncoderPrototypeFunction_encodeInto, 2 } }, }; -JSC_DEFINE_JIT_OPERATION(jsTextEncoderEncodeWithoutTypeCheck, JSC::EncodedJSValue, (JSC::JSGlobalObject * lexicalGlobalObject, JSTextEncoder* castedThis, DOMJIT::IDLJSArgumentType input)) -{ - VM& vm = JSC::getVM(lexicalGlobalObject); - IGNORE_WARNINGS_BEGIN("frame-address") - CallFrame* callFrame = DECLARE_CALL_FRAME(vm); - IGNORE_WARNINGS_END - JSC::JITOperationPrologueCallFrameTracer tracer(vm, callFrame); - auto throwScope = DECLARE_THROW_SCOPE(vm); - JSC::EncodedJSValue res; - String str; - if (input->is8Bit()) { - if (input->isRope()) { - GCDeferralContext gcDeferralContext(vm); - auto encodedValue = TextEncoder__encodeRopeString(lexicalGlobalObject, input); - if (!JSC::JSValue::decode(encodedValue).isUndefined()) { - RELEASE_AND_RETURN(throwScope, { encodedValue }); - } - } - - str = input->value(lexicalGlobalObject); - res = TextEncoder__encode8(lexicalGlobalObject, str.span8().data(), str.length()); - } else { - str = input->value(lexicalGlobalObject); - res = TextEncoder__encode16(lexicalGlobalObject, str.span16().data(), str.length()); - } - - if (UNLIKELY(JSC::JSValue::decode(res).isObject() && JSC::JSValue::decode(res).getObject()->isErrorInstance())) { - throwScope.throwException(lexicalGlobalObject, JSC::JSValue::decode(res)); - return { encodedJSValue() }; - } - - RELEASE_AND_RETURN(throwScope, { res }); -} - -JSC_DEFINE_JIT_OPERATION(jsTextEncoderPrototypeFunction_encodeIntoWithoutTypeCheck, JSC::EncodedJSValue, (JSC::JSGlobalObject * lexicalGlobalObject, JSTextEncoder* castedThis, DOMJIT::IDLJSArgumentType sourceStr, DOMJIT::IDLJSArgumentType destination)) -{ - VM& vm = JSC::getVM(lexicalGlobalObject); - IGNORE_WARNINGS_BEGIN("frame-address") - CallFrame* callFrame = DECLARE_CALL_FRAME(vm); - IGNORE_WARNINGS_END - JSC::JITOperationPrologueCallFrameTracer tracer(vm, callFrame); - String source = sourceStr->value(lexicalGlobalObject); - size_t res = 0; - if (!source.is8Bit()) { - res = TextEncoder__encodeInto16(source.span16().data(), source.length(), destination->vector(), destination->byteLength()); - } else { - res = TextEncoder__encodeInto8(source.span8().data(), source.length(), destination->vector(), destination->byteLength()); - } - - Zig::GlobalObject* globalObject = reinterpret_cast(lexicalGlobalObject); - auto* result = JSC::constructEmptyObject(vm, globalObject->encodeIntoObjectStructure()); - result->putDirectOffset(vm, 0, JSC::jsNumber(static_cast(res))); - result->putDirectOffset(vm, 1, JSC::jsNumber(static_cast(res >> 32))); - - return { JSValue::encode(result) }; -} +// JSC_DEFINE_JIT_OPERATION(jsTextEncoderEncodeWithoutTypeCheck, JSC::EncodedJSValue, (JSC::JSGlobalObject * lexicalGlobalObject, JSTextEncoder* castedThis, DOMJIT::IDLJSArgumentType input)) +// { +// VM& vm = JSC::getVM(lexicalGlobalObject); +// IGNORE_WARNINGS_BEGIN("frame-address") +// CallFrame* callFrame = DECLARE_CALL_FRAME(vm); +// IGNORE_WARNINGS_END +// JSC::JITOperationPrologueCallFrameTracer tracer(vm, callFrame); +// auto throwScope = DECLARE_THROW_SCOPE(vm); +// JSC::EncodedJSValue res; +// String str; +// if (input->is8Bit()) { +// if (input->isRope()) { +// GCDeferralContext gcDeferralContext(vm); +// auto encodedValue = TextEncoder__encodeRopeString(lexicalGlobalObject, input); +// if (!JSC::JSValue::decode(encodedValue).isUndefined()) { +// RELEASE_AND_RETURN(throwScope, { encodedValue }); +// } +// } + +// str = input->value(lexicalGlobalObject); +// res = TextEncoder__encode8(lexicalGlobalObject, str.span8().data(), str.length()); +// } else { +// str = input->value(lexicalGlobalObject); +// res = TextEncoder__encode16(lexicalGlobalObject, str.span16().data(), str.length()); +// } + +// if (UNLIKELY(JSC::JSValue::decode(res).isObject() && JSC::JSValue::decode(res).getObject()->isErrorInstance())) { +// throwScope.throwException(lexicalGlobalObject, JSC::JSValue::decode(res)); +// return { encodedJSValue() }; +// } + +// RELEASE_AND_RETURN(throwScope, { res }); +// } + +// JSC_DEFINE_JIT_OPERATION(jsTextEncoderPrototypeFunction_encodeIntoWithoutTypeCheck, JSC::EncodedJSValue, (JSC::JSGlobalObject * lexicalGlobalObject, JSTextEncoder* castedThis, DOMJIT::IDLJSArgumentType sourceStr, DOMJIT::IDLJSArgumentType destination)) +// { +// VM& vm = JSC::getVM(lexicalGlobalObject); +// IGNORE_WARNINGS_BEGIN("frame-address") +// CallFrame* callFrame = DECLARE_CALL_FRAME(vm); +// IGNORE_WARNINGS_END +// JSC::JITOperationPrologueCallFrameTracer tracer(vm, callFrame); +// String source = sourceStr->value(lexicalGlobalObject); +// size_t res = 0; +// if (!source.is8Bit()) { +// res = TextEncoder__encodeInto16(source.span16().data(), source.length(), destination->vector(), destination->byteLength()); +// } else { +// res = TextEncoder__encodeInto8(source.span8().data(), source.length(), destination->vector(), destination->byteLength()); +// } + +// Zig::GlobalObject* globalObject = reinterpret_cast(lexicalGlobalObject); +// auto* result = JSC::constructEmptyObject(vm, globalObject->encodeIntoObjectStructure()); +// result->putDirectOffset(vm, 0, JSC::jsNumber(static_cast(res))); +// result->putDirectOffset(vm, 1, JSC::jsNumber(static_cast(res >> 32))); + +// return { JSValue::encode(result) }; +// } const ClassInfo JSTextEncoderPrototype::s_info = { "TextEncoder"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSTextEncoderPrototype) }; From 720448ccbbf11e07896935b554f2129ce7708739 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 28 Jul 2024 08:30:32 -0700 Subject: [PATCH 28/46] Fix memory leak in RuntimeTranspilerStore (#12900) --- src/bun.js/module_loader.zig | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index 11465357f28d19..c205a1a0ec4b61 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -411,12 +411,10 @@ pub const RuntimeTranspilerStore = struct { }; const parse_error = this.parse_error; - if (!vm.transpiler_store.store.hive.in(this)) { - this.promise.deinit(); - } + this.promise.deinit(); this.deinit(); - _ = vm.transpiler_store.store.hive.put(this); + _ = vm.transpiler_store.store.put(this); ModuleLoader.AsyncModule.fulfill(globalThis, promise, resolved_source, parse_error, specifier, referrer, &log); } From 5d67e7b2b75257efef99d672142e5e37703acbd4 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 28 Jul 2024 18:37:35 -0700 Subject: [PATCH 29/46] Use typed allocators in more places (#12899) --- src/async/posix_event_loop.zig | 6 +- src/async/windows_event_loop.zig | 4 +- src/bun.js/api/JSTranspiler.zig | 3 + src/bun.js/api/server.zig | 39 +++---- src/bun.js/bindings/bindings.zig | 187 +++++++++++++------------------ src/bun.js/event_loop.zig | 2 +- src/bun.js/javascript.zig | 19 +++- src/bun.js/module_loader.zig | 6 +- src/bun.js/rare_data.zig | 2 +- src/bun.js/webcore/body.zig | 2 + src/bun.js/webcore/request.zig | 33 ++---- src/bun.zig | 24 ++-- src/hive_array.zig | 36 ++++-- src/resolver/package_json.zig | 2 + src/resolver/resolver.zig | 6 +- src/resolver/tsconfig_json.zig | 9 +- 16 files changed, 189 insertions(+), 191 deletions(-) diff --git a/src/async/posix_event_loop.zig b/src/async/posix_event_loop.zig index aa42d1848cb316..491e09ce7b2fcc 100644 --- a/src/async/posix_event_loop.zig +++ b/src/async/posix_event_loop.zig @@ -537,7 +537,7 @@ pub const FilePoll = struct { } }; - const HiveArray = bun.HiveArray(FilePoll, 128).Fallback; + const HiveArray = bun.HiveArray(FilePoll, if (bun.heap_breakdown.enabled) 0 else 128).Fallback; // We defer freeing FilePoll until the end of the next event loop iteration // This ensures that we don't free a FilePoll before the next callback is called @@ -548,9 +548,9 @@ pub const FilePoll = struct { const log = Output.scoped(.FilePoll, false); - pub fn init(allocator: std.mem.Allocator) Store { + pub fn init() Store { return .{ - .hive = HiveArray.init(allocator), + .hive = HiveArray.init(bun.typedAllocator(FilePoll)), }; } diff --git a/src/async/windows_event_loop.zig b/src/async/windows_event_loop.zig index 24a2d7647c4a29..0943d1959b1938 100644 --- a/src/async/windows_event_loop.zig +++ b/src/async/windows_event_loop.zig @@ -316,9 +316,9 @@ pub const FilePoll = struct { const log = Output.scoped(.FilePoll, false); - pub fn init(allocator: std.mem.Allocator) Store { + pub fn init() Store { return .{ - .hive = HiveArray.init(allocator), + .hive = HiveArray.init(bun.typedAllocator(FilePoll)), }; } diff --git a/src/bun.js/api/JSTranspiler.zig b/src/bun.js/api/JSTranspiler.zig index 966372a0d74964..56390b6eb93453 100644 --- a/src/bun.js/api/JSTranspiler.zig +++ b/src/bun.js/api/JSTranspiler.zig @@ -247,6 +247,9 @@ pub const TransformTask = struct { this.log.deinit(); this.input_code.deinitAndUnprotect(); this.output_code.deref(); + if (this.tsconfig) |tsconfig| { + tsconfig.destroy(); + } this.destroy(); } diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 29df596df68be3..19fd82e2fc38bc 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -1400,7 +1400,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp // This pre-allocates up to 2,048 RequestContext structs. // It costs about 655,632 bytes. - pub const RequestContextStackAllocator = bun.HiveArray(RequestContext, 2048).Fallback; + pub const RequestContextStackAllocator = bun.HiveArray(RequestContext, if (bun.heap_breakdown.enabled) 0 else 2048).Fallback; pub const name = "HTTPRequestContext" ++ (if (debug_mode) "Debug" else "") ++ (if (ThisServer.ssl_enabled) "TLS" else ""); pub const shim = JSC.Shimmer("Bun", name, @This()); @@ -1427,7 +1427,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp blob: JSC.WebCore.AnyBlob = JSC.WebCore.AnyBlob{ .Blob = .{} }, sendfile: SendfileContext = undefined, - request_body: ?*JSC.WebCore.BodyValueRef = null, + request_body: ?*JSC.BodyValueRef = null, request_body_buf: std.ArrayListUnmanaged(u8) = .{}, request_body_content_len: usize = 0, @@ -5300,6 +5300,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp pub const doRequestIP = JSC.wrapInstanceMethod(ThisServer, "requestIP", false); pub usingnamespace NamespaceType; + pub usingnamespace bun.New(@This()); pub fn constructor(globalThis: *JSC.JSGlobalObject, _: *JSC.CallFrame) ?*ThisServer { globalThis.throw("Server() is not a constructor", .{}); @@ -5667,7 +5668,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp existing_request = Request.init( bun.String.createUTF8(url.href), headers, - JSC.WebCore.InitRequestBodyValue(body) catch bun.outOfMemory(), + this.vm.initRequestBodyValue(body) catch bun.outOfMemory(), method, ); } else if (first_arg.as(Request)) |request_| { @@ -5684,8 +5685,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp return JSPromise.rejectedPromiseValue(ctx, err); } - var request = bun.default_allocator.create(Request) catch unreachable; - request.* = existing_request; + var request = Request.new(existing_request); const response_value = this.config.onRequest.call( this.globalThis, @@ -5977,23 +5977,26 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp this.config.deinit(); this.app.destroy(); - const allocator = this.allocator; - allocator.destroy(this); + this.destroy(); } pub fn init(config: ServerConfig, globalThis: *JSGlobalObject) *ThisServer { - var server = bun.default_allocator.create(ThisServer) catch bun.outOfMemory(); - server.* = .{ + var server = ThisServer.new(.{ .globalThis = globalThis, .config = config, .base_url_string_for_joining = bun.default_allocator.dupe(u8, strings.trim(config.base_url.href, "/")) catch unreachable, .vm = JSC.VirtualMachine.get(), .allocator = Arena.getThreadlocalDefault(), - }; + }); if (RequestContext.pool == null) { RequestContext.pool = server.allocator.create(RequestContext.RequestContextStackAllocator) catch bun.outOfMemory(); - RequestContext.pool.?.* = RequestContext.RequestContextStackAllocator.init(server.allocator); + RequestContext.pool.?.* = RequestContext.RequestContextStackAllocator.init( + if (comptime bun.heap_breakdown.enabled) + bun.typedAllocator(RequestContext) + else + bun.default_allocator, + ); } server.request_pool_allocator = RequestContext.pool.?; @@ -6236,20 +6239,19 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp var ctx = this.request_pool_allocator.tryGet() catch bun.outOfMemory(); ctx.create(this, req, resp); this.vm.jsc.reportExtraMemory(@sizeOf(RequestContext)); - var request_object = this.allocator.create(JSC.WebCore.Request) catch bun.outOfMemory(); - var body = JSC.WebCore.InitRequestBodyValue(.{ .Null = {} }) catch unreachable; + var body = this.vm.initRequestBodyValue(.{ .Null = {} }) catch unreachable; ctx.request_body = body; var signal = JSC.WebCore.AbortSignal.new(this.globalThis); ctx.signal = signal; - request_object.* = .{ + const request_object = Request.new(.{ .method = ctx.method, .request_context = AnyRequestContext.init(ctx), .https = ssl_enabled, .signal = signal.ref(), .body = body.ref(), - }; + }); if (comptime debug_mode) { ctx.flags.is_web_browser_navigation = brk: { @@ -6354,21 +6356,20 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp req.setYield(false); var ctx = this.request_pool_allocator.tryGet() catch @panic("ran out of memory"); ctx.create(this, req, resp); - var request_object = this.allocator.create(JSC.WebCore.Request) catch unreachable; - var body = JSC.WebCore.InitRequestBodyValue(.{ .Null = {} }) catch unreachable; + var body = this.vm.initRequestBodyValue(.{ .Null = {} }) catch unreachable; ctx.request_body = body; var signal = JSC.WebCore.AbortSignal.new(this.globalThis); ctx.signal = signal; - request_object.* = .{ + var request_object = Request.new(.{ .method = ctx.method, .request_context = AnyRequestContext.init(ctx), .upgrader = ctx, .https = ssl_enabled, .signal = signal.ref(), .body = body.ref(), - }; + }); ctx.upgrade_context = upgrade_ctx; // We keep the Request object alive for the duration of the request so that we can remove the pointer to the UWS request object. diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig index e0cf20903af8d1..d1aa20bf0ef3e8 100644 --- a/src/bun.js/bindings/bindings.zig +++ b/src/bun.js/bindings/bindings.zig @@ -3355,115 +3355,84 @@ pub const JSValue = enum(JSValueReprInt) { pub const name = "JSC::JSValue"; pub const namespace = "JSC"; pub const JSType = enum(u8) { - // The Cell value must come before any JS that is a JSCell. - Cell, - Structure, - String, - HeapBigInt, - Symbol, - - GetterSetter, - CustomGetterSetter, - /// For 32-bit architectures, this wraps a 64-bit JSValue - APIValueWrapper, - - NativeExecutable, - - ProgramExecutable, - ModuleProgramExecutable, - EvalExecutable, - FunctionExecutable, - - UnlinkedFunctionExecutable, - - UnlinkedProgramCodeBlock, - UnlinkedModuleProgramCodeBlock, - UnlinkedEvalCodeBlock, - UnlinkedFunctionCodeBlock, - - CodeBlock, - - JSImmutableButterfly, - JSSourceCode, - JSScriptFetcher, - JSScriptFetchParameters, - - // The Object value must come before any JS that is a subclass of JSObject. - Object, - FinalObject, - JSCallee, - JSFunction, - InternalFunction, - NullSetterFunction, - BooleanObject, - NumberObject, - ErrorInstance, - GlobalProxy, - DirectArguments, - ScopedArguments, - ClonedArguments, - - // Start JSArray s. - Array, - DerivedArray, - // End JSArray s. - - ArrayBuffer, - - // Start JSArrayBufferView s. Keep in sync with the order of FOR_EACH_D_ARRAY__EXCLUDING_DATA_VIEW. - Int8Array, - Uint8Array, - Uint8ClampedArray, - Int16Array, - Uint16Array, - Int32Array, - Uint32Array, - Float32Array, - Float64Array, - BigInt64Array, - BigUint64Array, - DataView, - // End JSArrayBufferView s. - - // JSScope <- JSWithScope - // <- StrictEvalActivation - // <- JSSymbolTableObject <- JSLexicalEnvironment <- JSModuleEnvironment - // <- JSSegmentedVariableObject <- JSGlobalLexicalEnvironment - // <- JSGlobalObject - // Start JSScope s. - // Start environment record s. - GlobalObject, - GlobalLexicalEnvironment, - LexicalEnvironment, - ModuleEnvironment, - StrictEvalActivation, - // End environment record s. - WithScope, - // End JSScope s. - - ModuleNamespaceObject, - ShadowRealm, - RegExpObject, - JSDate, - ProxyObject, - JSGenerator, - JSAsyncGenerator, - JSArrayIterator, - JSMapIterator, - JSSetIterator, - JSStringIterator, - JSPromise, - JSMap, - JSSet, - JSWeakMap, - JSWeakSet, - WebAssemblyModule, - WebAssemblyInstance, - WebAssemblyGCObject, - // Start StringObject s. - StringObject, - DerivedStringObject, - // End StringObject s. + Cell = 0, + Structure = 1, + String = 2, + HeapBigInt = 3, + Symbol = 4, + GetterSetter = 5, + CustomGetterSetter = 6, + APIValueWrapper = 7, + NativeExecutable = 8, + ProgramExecutable = 9, + ModuleProgramExecutable = 10, + EvalExecutable = 11, + FunctionExecutable = 12, + UnlinkedFunctionExecutable = 13, + UnlinkedProgramCodeBlock = 14, + UnlinkedModuleProgramCodeBlock = 15, + UnlinkedEvalCodeBlock = 16, + UnlinkedFunctionCodeBlock = 17, + CodeBlock = 18, + JSImmutableButterfly = 19, + JSSourceCode = 20, + JSScriptFetcher = 21, + JSScriptFetchParameters = 22, + Object = 23, + FinalObject = 24, + JSCallee = 25, + JSFunction = 26, + InternalFunction = 27, + NullSetterFunction = 28, + BooleanObject = 29, + NumberObject = 30, + ErrorInstance = 31, + GlobalProxy = 32, + DirectArguments = 33, + ScopedArguments = 34, + ClonedArguments = 35, + Array = 36, + DerivedArray = 37, + ArrayBuffer = 38, + Int8Array = 39, + Uint8Array = 40, + Uint8ClampedArray = 41, + Int16Array = 42, + Uint16Array = 43, + Int32Array = 44, + Uint32Array = 45, + Float32Array = 46, + Float64Array = 47, + BigInt64Array = 48, + BigUint64Array = 49, + DataView = 50, + GlobalObject = 51, + GlobalLexicalEnvironment = 52, + LexicalEnvironment = 53, + ModuleEnvironment = 54, + StrictEvalActivation = 55, + WithScope = 56, + ModuleNamespaceObject = 57, + ShadowRealm = 58, + RegExpObject = 59, + JSDate = 60, + ProxyObject = 61, + JSGenerator = 62, + JSAsyncGenerator = 63, + JSArrayIterator = 64, + JSMapIterator = 65, + JSSetIterator = 66, + JSStringIterator = 67, + JSPromise = 68, + JSMap = 69, + JSSet = 70, + JSWeakMap = 71, + JSWeakSet = 72, + WebAssemblyModule = 73, + WebAssemblyInstance = 74, + WebAssemblyGCObject = 75, + StringObject = 76, + DerivedStringObject = 77, InternalFieldTuple, diff --git a/src/bun.js/event_loop.zig b/src/bun.js/event_loop.zig index d9f8bc20ad9a3c..c280085ca2e75b 100644 --- a/src/bun.js/event_loop.zig +++ b/src/bun.js/event_loop.zig @@ -1802,7 +1802,7 @@ pub const MiniEventLoop = struct { pub fn filePolls(this: *MiniEventLoop) *Async.FilePoll.Store { return this.file_polls_ orelse { this.file_polls_ = this.allocator.create(Async.FilePoll.Store) catch bun.outOfMemory(); - this.file_polls_.?.* = Async.FilePoll.Store.init(this.allocator); + this.file_polls_.?.* = Async.FilePoll.Store.init(); return this.file_polls_.?; }; } diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index 2b6ab6c36d1fd1..75a99bdbe43fa8 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -646,6 +646,10 @@ export fn Bun__getVerboseFetchValue() i32 { }; } +const body_value_pool_size = if (bun.heap_breakdown.enabled) 0 else 256; +pub const BodyValueRef = bun.HiveRef(JSC.WebCore.Body.Value, body_value_pool_size); +const BodyValueHiveAllocator = bun.HiveArray(BodyValueRef, body_value_pool_size).Fallback; + /// TODO: rename this to ScriptExecutionContext /// This is the shared global state for a single JS instance execution /// Today, Bun is one VM per thread, so the name "VirtualMachine" sort of makes sense @@ -781,10 +785,16 @@ pub const VirtualMachine = struct { debug_thread_id: if (Environment.allow_assert) std.Thread.Id else void, + body_value_hive_allocator: BodyValueHiveAllocator = undefined, + pub const OnUnhandledRejection = fn (*VirtualMachine, globalObject: *JSC.JSGlobalObject, JSC.JSValue) void; pub const OnException = fn (*ZigException) void; + pub fn initRequestBodyValue(this: *VirtualMachine, body: JSC.WebCore.Body.Value) !*BodyValueRef { + return BodyValueRef.init(body, &this.body_value_hive_allocator); + } + pub fn uwsLoop(this: *const VirtualMachine) *uws.Loop { if (comptime Environment.isPosix) { if (Environment.allow_assert) { @@ -1485,7 +1495,7 @@ pub const VirtualMachine = struct { vm.* = VirtualMachine{ .global = undefined, - .transpiler_store = RuntimeTranspilerStore.init(allocator), + .transpiler_store = RuntimeTranspilerStore.init(), .allocator = allocator, .entry_point = ServerEntryPoint{}, .bundler = bundler, @@ -1555,6 +1565,7 @@ pub const VirtualMachine = struct { } vm.configureDebugger(opts.debugger); + vm.body_value_hive_allocator = BodyValueHiveAllocator.init(bun.typedAllocator(JSC.WebCore.Body.Value)); return vm; } @@ -1600,7 +1611,7 @@ pub const VirtualMachine = struct { vm.* = VirtualMachine{ .global = undefined, - .transpiler_store = RuntimeTranspilerStore.init(allocator), + .transpiler_store = RuntimeTranspilerStore.init(), .allocator = allocator, .entry_point = ServerEntryPoint{}, .bundler = bundler, @@ -1674,6 +1685,7 @@ pub const VirtualMachine = struct { } vm.configureDebugger(opts.debugger); + vm.body_value_hive_allocator = BodyValueHiveAllocator.init(bun.typedAllocator(JSC.WebCore.Body.Value)); return vm; } @@ -1748,7 +1760,7 @@ pub const VirtualMachine = struct { vm.* = VirtualMachine{ .global = undefined, .allocator = allocator, - .transpiler_store = RuntimeTranspilerStore.init(allocator), + .transpiler_store = RuntimeTranspilerStore.init(), .entry_point = ServerEntryPoint{}, .bundler = bundler, .console = console, @@ -1816,6 +1828,7 @@ pub const VirtualMachine = struct { source_code_printer.?.* = js_printer.BufferPrinter.init(writer); source_code_printer.?.ctx.append_null_byte = false; } + vm.body_value_hive_allocator = BodyValueHiveAllocator.init(bun.typedAllocator(JSC.WebCore.Body.Value)); return vm; } diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index c205a1a0ec4b61..cb0fd1d4ca293a 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -266,9 +266,9 @@ pub const RuntimeTranspilerStore = struct { pub const Queue = bun.UnboundedQueue(TranspilerJob, .next); - pub fn init(allocator: std.mem.Allocator) RuntimeTranspilerStore { + pub fn init() RuntimeTranspilerStore { return RuntimeTranspilerStore{ - .store = TranspilerJob.Store.init(allocator), + .store = TranspilerJob.Store.init(bun.typedAllocator(TranspilerJob)), }; } @@ -340,7 +340,7 @@ pub const RuntimeTranspilerStore = struct { work_task: JSC.WorkPoolTask = .{ .callback = runFromWorkerThread }, next: ?*TranspilerJob = null, - pub const Store = bun.HiveArray(TranspilerJob, 64).Fallback; + pub const Store = bun.HiveArray(TranspilerJob, if (bun.heap_breakdown.enabled) 0 else 64).Fallback; pub const Fetcher = union(enum) { virtual_module: bun.String, diff --git a/src/bun.js/rare_data.zig b/src/bun.js/rare_data.zig index 1a65e3e287639f..b0c929ea36abc9 100644 --- a/src/bun.js/rare_data.zig +++ b/src/bun.js/rare_data.zig @@ -156,7 +156,7 @@ pub const HotMap = struct { pub fn filePolls(this: *RareData, vm: *JSC.VirtualMachine) *Async.FilePoll.Store { return this.file_polls_ orelse { this.file_polls_ = vm.allocator.create(Async.FilePoll.Store) catch unreachable; - this.file_polls_.?.* = Async.FilePoll.Store.init(vm.allocator); + this.file_polls_.?.* = Async.FilePoll.Store.init(); return this.file_polls_.?; }; } diff --git a/src/bun.js/webcore/body.zig b/src/bun.js/webcore/body.zig index f526110ab4c843..ead8eb7bad3872 100644 --- a/src/bun.js/webcore/body.zig +++ b/src/bun.js/webcore/body.zig @@ -310,6 +310,8 @@ pub const Body = struct { Error: JSValue, Null: void, + pub const heap_breakdown_label = "BodyValue"; + pub fn toBlobIfPossible(this: *Value) void { if (this.* == .WTFStringImpl) { if (this.WTFStringImpl.toUTF8IfNeeded(bun.default_allocator)) |bytes| { diff --git a/src/bun.js/webcore/request.zig b/src/bun.js/webcore/request.zig index 64e72373d192fa..6d5d3a40dd4037 100644 --- a/src/bun.js/webcore/request.zig +++ b/src/bun.js/webcore/request.zig @@ -49,22 +49,13 @@ const Body = JSC.WebCore.Body; const Blob = JSC.WebCore.Blob; const Response = JSC.WebCore.Response; -const body_value_pool_size: u16 = 256; -pub const BodyValueRef = bun.HiveRef(Body.Value, body_value_pool_size); -const BodyValueHiveAllocator = bun.HiveArray(BodyValueRef, body_value_pool_size).Fallback; - -var body_value_hive_allocator = BodyValueHiveAllocator.init(bun.default_allocator); - -pub fn InitRequestBodyValue(value: Body.Value) !*BodyValueRef { - return try BodyValueRef.init(value, &body_value_hive_allocator); -} // https://developer.mozilla.org/en-US/docs/Web/API/Request pub const Request = struct { url: bun.String = bun.String.empty, // NOTE(@cirospaciari): renamed to _headers to avoid direct manipulation, use getFetchHeaders, setFetchHeaders, ensureFetchHeaders and hasFetchHeaders instead _headers: ?*FetchHeaders = null, signal: ?*AbortSignal = null, - body: *BodyValueRef, + body: *JSC.BodyValueRef, method: Method = Method.GET, request_context: JSC.API.AnyRequestContext = JSC.API.AnyRequestContext.Null, https: bool = false, @@ -75,6 +66,7 @@ pub const Request = struct { const RequestMixin = BodyMixin(@This()); pub usingnamespace JSC.Codegen.JSRequest; + pub usingnamespace bun.New(@This()); pub const getText = RequestMixin.getText; pub const getBytes = RequestMixin.getBytes; @@ -101,7 +93,7 @@ pub const Request = struct { pub fn init( url: bun.String, headers: ?*FetchHeaders, - body: *BodyValueRef, + body: *JSC.BodyValueRef, method: Method, ) Request { return Request{ @@ -299,10 +291,10 @@ pub const Request = struct { } } - pub fn finalize(this: *Request) callconv(.C) void { + pub fn finalize(this: *Request) void { this.finalizeWithoutDeinit(); _ = this.body.unref(); - bun.default_allocator.destroy(this); + this.destroy(); } pub fn getRedirect( @@ -477,7 +469,8 @@ pub const Request = struct { arguments: []const JSC.JSValue, ) ?Request { var success = false; - const body = InitRequestBodyValue(.{ .Null = {} }) catch { + const vm = globalThis.bunVM(); + const body = vm.initRequestBodyValue(.{ .Null = {} }) catch { return null; }; var req = Request{ @@ -709,11 +702,7 @@ pub const Request = struct { const request = constructInto(globalThis, arguments) orelse { return null; }; - const request_ = getAllocator(globalThis).create(Request) catch { - return null; - }; - request_.* = request; - return request_; + return Request.new(request); } pub fn getBodyValue( @@ -821,8 +810,8 @@ pub const Request = struct { ) void { _ = allocator; this.ensureURL() catch {}; - - const body = InitRequestBodyValue(this.body.value.clone(globalThis)) catch { + const vm = globalThis.bunVM(); + const body = vm.initRequestBodyValue(this.body.value.clone(globalThis)) catch { globalThis.throw("Failed to clone request", .{}); return; }; @@ -841,7 +830,7 @@ pub const Request = struct { } pub fn clone(this: *Request, allocator: std.mem.Allocator, globalThis: *JSGlobalObject) *Request { - const req = allocator.create(Request) catch unreachable; + const req = Request.new(undefined); this.cloneInto(req, allocator, globalThis, false); return req; } diff --git a/src/bun.zig b/src/bun.zig index 3d156f85420427..3c26115796d5a8 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -43,6 +43,13 @@ pub const callconv_inline: std.builtin.CallingConvention = if (builtin.mode == . /// FileSystem is a singleton. pub const fs_allocator = default_allocator; +pub fn typedAllocator(comptime T: type) std.mem.Allocator { + if (heap_breakdown.enabled) + return heap_breakdown.allocator(comptime T); + + return default_allocator; +} + pub const C = @import("root").C; pub const sha = @import("./sha.zig"); pub const FeatureFlags = @import("feature_flags.zig"); @@ -1910,15 +1917,17 @@ pub fn Ref(comptime T: type) type { pub fn HiveRef(comptime T: type, comptime capacity: u16) type { return struct { const HiveAllocator = HiveArray(@This(), capacity).Fallback; - ref_count: u32, allocator: *HiveAllocator, value: T, + pub fn init(value: T, allocator: *HiveAllocator) !*@This() { - var this = try allocator.tryGet(); - this.allocator = allocator; - this.ref_count = 1; - this.value = value; + const this = try allocator.tryGet(); + this.* = .{ + .ref_count = 1, + .allocator = allocator, + .value = value, + }; return this; } @@ -1928,8 +1937,9 @@ pub fn HiveRef(comptime T: type, comptime capacity: u16) type { } pub fn unref(this: *@This()) ?*@This() { - this.ref_count -= 1; - if (this.ref_count == 0) { + const ref_count = this.ref_count; + this.ref_count = ref_count - 1; + if (ref_count == 1) { if (@hasDecl(T, "deinit")) { this.value.deinit(); } diff --git a/src/hive_array.zig b/src/hive_array.zig index 0f1ee8d8e57736..c3479da816bdb4 100644 --- a/src/hive_array.zig +++ b/src/hive_array.zig @@ -67,7 +67,7 @@ pub fn HiveArray(comptime T: type, comptime capacity: u16) type { } pub const Fallback = struct { - hive: HiveArray(T, capacity), + hive: if (capacity > 0) HiveArray(T, capacity) else void, allocator: std.mem.Allocator, pub const This = @This(); @@ -75,37 +75,53 @@ pub fn HiveArray(comptime T: type, comptime capacity: u16) type { pub fn init(allocator: std.mem.Allocator) This { return .{ .allocator = allocator, - .hive = HiveArray(T, capacity).init(), + .hive = if (capacity > 0) HiveArray(T, capacity).init() else {}, }; } pub fn get(self: *This) *T { - if (self.hive.get()) |value| { - return value; + if (comptime capacity > 0) { + if (self.hive.get()) |value| { + return value; + } } return self.allocator.create(T) catch unreachable; } pub fn getAndSeeIfNew(self: *This, new: *bool) *T { - if (self.hive.get()) |value| { - new.* = false; - return value; + if (comptime capacity > 0) { + if (self.hive.get()) |value| { + new.* = false; + return value; + } } return self.allocator.create(T) catch unreachable; } pub fn tryGet(self: *This) !*T { - if (self.hive.get()) |value| { - return value; + if (comptime capacity > 0) { + if (self.hive.get()) |value| { + return value; + } } return try self.allocator.create(T); } + pub fn in(self: *const This, value: *const T) bool { + if (comptime capacity > 0) { + if (self.hive.in(value)) return true; + } + + return false; + } + pub fn put(self: *This, value: *T) void { - if (self.hive.put(value)) return; + if (comptime capacity > 0) { + if (self.hive.put(value)) return; + } self.allocator.destroy(value); } diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig index 50fdd810521b38..a5b539403f7cc7 100644 --- a/src/resolver/package_json.zig +++ b/src/resolver/package_json.zig @@ -56,6 +56,8 @@ pub const PackageJSON = struct { production, }; + pub usingnamespace bun.New(@This()); + pub fn generateHash(package_json: *PackageJSON) void { var hashy: [1024]u8 = undefined; @memset(&hashy, 0); diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index f8f027e9e9aaf5..9e1a2bdec53f9a 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -2499,7 +2499,7 @@ pub const Resolver = struct { const source = logger.Source.initPathString(key_path.text, entry.contents); const file_dir = source.path.sourceDir(); - var result = (try TSConfigJSON.parse(bun.fs_allocator, r.log, source, &r.caches.json)) orelse return null; + var result = (try TSConfigJSON.parse(bun.default_allocator, r.log, source, &r.caches.json)) orelse return null; if (result.hasBaseURL()) { @@ -2559,9 +2559,7 @@ pub const Resolver = struct { ) orelse return null; } - const _pkg = try bun.default_allocator.create(PackageJSON); - _pkg.* = pkg; - return _pkg; + return PackageJSON.new(pkg); } fn dirInfoCached( diff --git a/src/resolver/tsconfig_json.zig b/src/resolver/tsconfig_json.zig index f5a61a0dee30a9..22dd62cdc6659a 100644 --- a/src/resolver/tsconfig_json.zig +++ b/src/resolver/tsconfig_json.zig @@ -57,6 +57,7 @@ pub const TSConfigJSON = struct { emit_decorator_metadata: bool = false, + pub usingnamespace bun.New(@This()); pub fn hasBaseURL(tsconfig: *const TSConfigJSON) bool { return tsconfig.base_url.len > 0; } @@ -323,13 +324,7 @@ pub const TSConfigJSON = struct { assert(result.base_url.len > 0); } - const _result = allocator.create(TSConfigJSON) catch unreachable; - _result.* = result; - - if (Environment.isDebug and has_base_url) { - assert(_result.base_url.len > 0); - } - return _result; + return TSConfigJSON.new(result); } pub fn isValidTSConfigPathPattern(text: string, log: *logger.Log, source: *const logger.Source, loc: logger.Loc, allocator: std.mem.Allocator) bool { From 22e080b79f5ea4b0171a9e7d4a35f798d3dc99b4 Mon Sep 17 00:00:00 2001 From: Andrew Johnston Date: Sun, 28 Jul 2024 18:38:01 -0700 Subject: [PATCH 30/46] fix(build): use specific version of lld for link on unix (#12907) --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index fd42fe427c2728..f85c2c72a3b172 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1223,7 +1223,7 @@ endif() if(UNIX AND NOT APPLE) target_link_options(${bun} PUBLIC - -fuse-ld=lld + -fuse-ld=lld-${LLVM_VERSION} -fno-pic -static-libstdc++ -static-libgcc From cd0306a02dee8a2cc041978142f17647c168bf0e Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 28 Jul 2024 19:38:03 -0700 Subject: [PATCH 31/46] Fix debug build issue --- src/heap_breakdown.zig | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/heap_breakdown.zig b/src/heap_breakdown.zig index fc934545ab6b8b..1f2c41f8b64ac7 100644 --- a/src/heap_breakdown.zig +++ b/src/heap_breakdown.zig @@ -78,8 +78,8 @@ pub const Zone = opaque { return false; } - fn rawFree(zone: *anyopaque, buf: [*]u8, _: u8, _: usize) void { - malloc_zone_free(@ptrCast(zone), @ptrCast(buf)); + fn rawFree(zone: *anyopaque, buf: []u8, _: u8, _: usize) void { + malloc_zone_free(@ptrCast(zone), @ptrCast(buf.ptr)); } pub const vtable = std.mem.Allocator.VTable{ From 9316a7b8afd192b43eab199473455eb7390a994a Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 28 Jul 2024 21:27:08 -0700 Subject: [PATCH 32/46] Add named allocator --- src/brotli.zig | 4 ++-- src/bun.zig | 11 ++++++++-- src/heap_breakdown.zig | 50 ++++++++++++++++++++++-------------------- src/zlib.zig | 4 ++-- 4 files changed, 39 insertions(+), 30 deletions(-) diff --git a/src/brotli.zig b/src/brotli.zig index 082fedeb8170f7..7dcfe457da83a8 100644 --- a/src/brotli.zig +++ b/src/brotli.zig @@ -12,7 +12,7 @@ const mimalloc = bun.Mimalloc; const BrotliAllocator = struct { pub fn alloc(_: ?*anyopaque, len: usize) callconv(.C) *anyopaque { if (bun.heap_breakdown.enabled) { - const zone = bun.heap_breakdown.getZone(BrotliAllocator); + const zone = bun.heap_breakdown.getZone("brotli"); return zone.malloc_zone_malloc(len) orelse bun.outOfMemory(); } @@ -21,7 +21,7 @@ const BrotliAllocator = struct { pub fn free(_: ?*anyopaque, data: ?*anyopaque) callconv(.C) void { if (bun.heap_breakdown.enabled) { - const zone = bun.heap_breakdown.getZone(BrotliAllocator); + const zone = bun.heap_breakdown.getZone("brotli"); zone.malloc_zone_free(data); return; } diff --git a/src/bun.zig b/src/bun.zig index 3c26115796d5a8..2d92b4b15c7e5b 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -50,6 +50,13 @@ pub fn typedAllocator(comptime T: type) std.mem.Allocator { return default_allocator; } +pub inline fn namedAllocator(comptime name: [:0]const u8) std.mem.Allocator { + if (heap_breakdown.enabled) + return heap_breakdown.namedAllocator(name); + + return default_allocator; +} + pub const C = @import("root").C; pub const sha = @import("./sha.zig"); pub const FeatureFlags = @import("feature_flags.zig"); @@ -2945,7 +2952,7 @@ pub const heap_breakdown = @import("./heap_breakdown.zig"); /// to dump the heap. pub inline fn new(comptime T: type, init: T) *T { const ptr = if (heap_breakdown.enabled) - heap_breakdown.getZone(T).create(T, init) + heap_breakdown.getZoneT(T).create(T, init) else ptr: { const ptr = default_allocator.create(T) catch outOfMemory(); ptr.* = init; @@ -2971,7 +2978,7 @@ pub inline fn destroy(ptr: anytype) void { } if (comptime heap_breakdown.enabled) { - heap_breakdown.getZone(T).destroy(T, ptr); + heap_breakdown.getZoneT(T).destroy(T, ptr); } else { default_allocator.destroy(ptr); } diff --git a/src/heap_breakdown.zig b/src/heap_breakdown.zig index 1f2c41f8b64ac7..d77b12917adefa 100644 --- a/src/heap_breakdown.zig +++ b/src/heap_breakdown.zig @@ -6,44 +6,46 @@ const vm_size_t = usize; pub const enabled = Environment.allow_assert and Environment.isMac; +fn heapLabel(comptime T: type) [:0]const u8 { + const base_name = if (@hasDecl(T, "heap_label")) + T.heap_label + else + bun.meta.typeBaseName(@typeName(T)); + return "Bun__" ++ base_name; +} + pub fn allocator(comptime T: type) std.mem.Allocator { - return getZone(T).allocator(); + return namedAllocator(comptime heapLabel(T)); +} +pub fn namedAllocator(comptime name: [:0]const u8) std.mem.Allocator { + return getZone(name).allocator(); +} + +pub fn getZoneT(comptime T: type) *Zone { + return getZone(comptime heapLabel(T)); } -pub fn getZone(comptime T: type) *Zone { +pub fn getZone(comptime name: [:0]const u8) *Zone { comptime bun.assert(enabled); const static = struct { - pub var zone: std.atomic.Value(?*Zone) = .{ .raw = null }; - pub var lock: bun.Lock = bun.Lock.init(); - }; - - return static.zone.load(.monotonic) orelse brk: { - static.lock.lock(); - defer static.lock.unlock(); - - if (static.zone.load(.monotonic)) |z| { - break :brk z; + pub var zone: *Zone = undefined; + pub fn initOnce() void { + zone = Zone.init(name); } - const z = Zone.init(T); - static.zone.store(z, .monotonic); - break :brk z; + pub var once = std.once(initOnce); }; + + static.once.call(); + return static.zone; } pub const Zone = opaque { - pub fn init(comptime T: type) *Zone { + pub fn init(comptime name: [:0]const u8) *Zone { const zone = malloc_create_zone(0, 0); - const title: [:0]const u8 = comptime title: { - const base_name = if (@hasDecl(T, "heap_label")) - T.heap_label - else - bun.meta.typeBaseName(@typeName(T)); - break :title "Bun__" ++ base_name; - }; - malloc_set_zone_name(zone, title.ptr); + malloc_set_zone_name(zone, name.ptr); return zone; } diff --git a/src/zlib.zig b/src/zlib.zig index 7ab99ee6df26ef..b337c9b507361f 100644 --- a/src/zlib.zig +++ b/src/zlib.zig @@ -291,7 +291,7 @@ pub const ZlibError = error{ const ZlibAllocator = struct { pub fn alloc(_: *anyopaque, items: uInt, len: uInt) callconv(.C) *anyopaque { if (bun.heap_breakdown.enabled) { - const zone = bun.heap_breakdown.getZone(ZlibAllocator); + const zone = bun.heap_breakdown.getZone("zlib"); return zone.malloc_zone_calloc(items, len) orelse bun.outOfMemory(); } @@ -300,7 +300,7 @@ const ZlibAllocator = struct { pub fn free(_: *anyopaque, data: *anyopaque) callconv(.C) void { if (bun.heap_breakdown.enabled) { - const zone = bun.heap_breakdown.getZone(ZlibAllocator); + const zone = bun.heap_breakdown.getZone("zlib"); zone.malloc_zone_free(data); return; } From 263a4b70eab9a34c694fdc813a24f24ac7897245 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Mon, 29 Jul 2024 01:37:59 -0700 Subject: [PATCH 33/46] Add `BUN_FEATURE_FLAG_DISABLE_ASYNC_TRANSPILER` feature flag --- src/bun.js/javascript.zig | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index 75a99bdbe43fa8..eff41e2c3f19dd 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -985,6 +985,10 @@ pub const VirtualMachine = struct { this.hide_bun_stackframes = false; } + if (bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_DISABLE_ASYNC_TRANSPILER")) { + this.transpiler_store.enabled = false; + } + if (map.map.fetchSwapRemove("NODE_CHANNEL_FD")) |kv| { const mode = if (map.map.fetchSwapRemove("NODE_CHANNEL_SERIALIZATION_MODE")) |mode_kv| IPC.Mode.fromString(mode_kv.value.value) orelse .json From 22238a4eb94aca2215dcb094380e840edaf17651 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Mon, 29 Jul 2024 04:31:39 -0700 Subject: [PATCH 34/46] Fix various Windows build issues --- .vscode/launch.json | 1 + scripts/build-boringssl.ps1 | 1 + scripts/build-mimalloc.ps1 | 1 + scripts/env.ps1 | 11 +++++------ 4 files changed, 8 insertions(+), 6 deletions(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index 06fd6e26e21613..5326a6f15f723b 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -17,6 +17,7 @@ "cwd": "${workspaceFolder}/test", "env": { "FORCE_COLOR": "1", + "BUN_DEBUG_QUIET_LOGS": "1", "BUN_GARBAGE_COLLECTOR_LEVEL": "1", }, "console": "internalConsole", diff --git a/scripts/build-boringssl.ps1 b/scripts/build-boringssl.ps1 index 596f0b3058bfec..5a48215f4b4bbb 100755 --- a/scripts/build-boringssl.ps1 +++ b/scripts/build-boringssl.ps1 @@ -3,6 +3,7 @@ $ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pip Push-Location (Join-Path $BUN_DEPS_DIR 'boringssl') try { + Remove-Item -ErrorAction SilentlyContinue -Recurse -Force build Set-Location (mkdir -Force build) Run cmake @CMAKE_FLAGS .. diff --git a/scripts/build-mimalloc.ps1 b/scripts/build-mimalloc.ps1 index a62bc9bcafeab6..478c12f4b80dbe 100755 --- a/scripts/build-mimalloc.ps1 +++ b/scripts/build-mimalloc.ps1 @@ -3,6 +3,7 @@ $ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pip Push-Location (Join-Path $BUN_DEPS_DIR 'mimalloc') try { + Remove-Item -ErrorAction SilentlyContinue -Recurse -Force build Set-Location (mkdir -Force build) Run cmake .. @CMAKE_FLAGS ` diff --git a/scripts/env.ps1 b/scripts/env.ps1 index cf92d5c312014f..e9492abee448e6 100755 --- a/scripts/env.ps1 +++ b/scripts/env.ps1 @@ -75,12 +75,11 @@ $CMAKE_FLAGS = @( "-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded" ) -if ($env:USE_LTO -eq "1") { - if (Get-Command lld-lib -ErrorAction SilentlyContinue) { - $AR = Get-Command lld-lib -ErrorAction SilentlyContinue - $env:AR = $AR - $CMAKE_FLAGS += "-DCMAKE_AR=$AR" - } +if (Get-Command llvm-lib -ErrorAction SilentlyContinue) { + $AR_CMD = Get-Command llvm-lib -ErrorAction SilentlyContinue + $AR = $AR_CMD.Path + $env:AR = $AR + $CMAKE_FLAGS += "-DCMAKE_AR=$AR" } $env:CC = "clang-cl" From 4e5f579eeb6302cb85415c28199105d3cb0c4095 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Mon, 29 Jul 2024 06:25:38 -0700 Subject: [PATCH 35/46] Enable concurrent transpiler on Windows (#12915) --- src/feature_flags.zig | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/feature_flags.zig b/src/feature_flags.zig index 76b843e61c54a7..8bd8141608bf98 100644 --- a/src/feature_flags.zig +++ b/src/feature_flags.zig @@ -155,8 +155,7 @@ pub const export_star_redirect = false; pub const streaming_file_uploads_for_http_client = true; -// TODO: fix concurrent transpiler on Windows -pub const concurrent_transpiler = !env.isWindows; +pub const concurrent_transpiler = true; // https://github.com/oven-sh/bun/issues/5426#issuecomment-1813865316 pub const disable_auto_js_to_ts_in_node_modules = true; From a652645c1d8169b1715e675b9286764d0db31aa0 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Mon, 29 Jul 2024 11:10:55 -0700 Subject: [PATCH 36/46] Remove some dynamic memory allocations in uWebSockets (#12897) --- packages/bun-uws/capi/Makefile | 38 - packages/bun-uws/capi/examples/Broadcast.c | 157 -- .../capi/examples/BroadcastEchoServer.c | 175 --- packages/bun-uws/capi/examples/EchoServer.c | 81 - packages/bun-uws/capi/examples/HelloWorld.c | 33 - .../bun-uws/capi/examples/HelloWorldAsync.c | 123 -- .../bun-uws/capi/examples/RustHelloWorld.rs | 309 ---- packages/bun-uws/capi/examples/ServerName.c | 59 - packages/bun-uws/capi/examples/UpgradeAsync.c | 255 ---- packages/bun-uws/capi/examples/UpgradeSync.c | 117 -- packages/bun-uws/capi/libuwebsockets.cpp | 1349 ----------------- packages/bun-uws/capi/libuwebsockets.h | 260 ---- packages/bun-uws/src/HttpContext.h | 6 +- packages/bun-uws/src/HttpResponse.h | 17 +- packages/bun-uws/src/HttpResponseData.h | 21 +- src/deps/libuwsockets.cpp | 30 +- 16 files changed, 41 insertions(+), 2989 deletions(-) delete mode 100644 packages/bun-uws/capi/Makefile delete mode 100644 packages/bun-uws/capi/examples/Broadcast.c delete mode 100644 packages/bun-uws/capi/examples/BroadcastEchoServer.c delete mode 100644 packages/bun-uws/capi/examples/EchoServer.c delete mode 100644 packages/bun-uws/capi/examples/HelloWorld.c delete mode 100644 packages/bun-uws/capi/examples/HelloWorldAsync.c delete mode 100644 packages/bun-uws/capi/examples/RustHelloWorld.rs delete mode 100644 packages/bun-uws/capi/examples/ServerName.c delete mode 100644 packages/bun-uws/capi/examples/UpgradeAsync.c delete mode 100644 packages/bun-uws/capi/examples/UpgradeSync.c delete mode 100644 packages/bun-uws/capi/libuwebsockets.cpp delete mode 100644 packages/bun-uws/capi/libuwebsockets.h diff --git a/packages/bun-uws/capi/Makefile b/packages/bun-uws/capi/Makefile deleted file mode 100644 index 583eceba0d68c6..00000000000000 --- a/packages/bun-uws/capi/Makefile +++ /dev/null @@ -1,38 +0,0 @@ -CAPI_EXAMPLE_FILES := HelloWorld HelloWorldAsync ServerName UpgradeSync UpgradeAsync EchoServer Broadcast BroadcastEchoServer -RUST_EXAMPLE_FILES := RustHelloWorld -LIBRARY_NAME := libuwebsockets - -default: - $(MAKE) capi - $(CXX) -O3 -flto -I ../src -I ../uSockets/src examples/HelloWorld.c *.o -lz -luv -lssl -lcrypto -lstdc++ ../uSockets/uSockets.a -o HelloWorld - -capi: - $(MAKE) clean - cd ../uSockets && $(CC) -pthread -DUWS_WITH_PROXY -DLIBUS_USE_OPENSSL -DLIBUS_USE_LIBUV -std=c11 -Isrc -flto -fPIC -O3 -c src/*.c src/eventing/*.c src/crypto/*.c - cd ../uSockets && $(CXX) -std=c++17 -flto -fPIC -O3 -c src/crypto/*.cpp - cd ../uSockets && $(AR) rvs uSockets.a *.o - - $(CXX) -DUWS_WITH_PROXY -c -O3 -std=c++17 -lz -luv -flto -fPIC -I ../src -I ../uSockets/src $(LIBRARY_NAME).cpp - $(AR) rvs $(LIBRARY_NAME).a $(LIBRARY_NAME).o ../uSockets/uSockets.a -shared: - $(MAKE) clean - - cd ../uSockets && $(CC) -pthread -DUWS_WITH_PROXY -DLIBUS_USE_OPENSSL -DLIBUS_USE_LIBUV -std=c11 -Isrc -flto -fPIC -O3 -c src/*.c src/eventing/*.c src/crypto/*.c - cd ../uSockets && $(CXX) -std=c++17 -flto -fPIC -O3 -c src/crypto/*.cpp - cd ../uSockets && $(AR) rvs uSockets.a *.o - - $(CXX) -DUWS_WITH_PROXY -c -O3 -std=c++17 -lz -luv -flto -fPIC -I ../src -I ../uSockets/src $(LIBRARY_NAME).cpp - $(CXX) -shared -o $(LIBRARY_NAME).so $(LIBRARY_NAME).o ../uSockets/uSockets.a -fPIC -lz -luv -lssl -lcrypto -misc: - mkdir -p ../misc && openssl req -newkey rsa:2048 -new -nodes -x509 -days 3650 -passout pass:1234 -keyout ../misc/key.pem -out ../misc/cert.pem -rust: - $(MAKE) capi - rustc -C link-arg=$(LIBRARY_NAME).a -C link-args="-lstdc++ -luv" -C opt-level=3 -C lto -L all=. examples/RustHelloWorld.rs -o RustHelloWorld - -clean: - rm -f *.o $(CAPI_EXAMPLE_FILES) $(RUST_EXAMPLE_FILES) $(LIBRARY_NAME).a $(LIBRARY_NAME).so - -all: - for FILE in $(CAPI_EXAMPLE_FILES); do $(CXX) -O3 -flto -I ../src -I ../uSockets/src examples/$$FILE.c *.o -luv -lstdc++ ../uSockets/uSockets.a -o $$FILE & done; \ - wait - diff --git a/packages/bun-uws/capi/examples/Broadcast.c b/packages/bun-uws/capi/examples/Broadcast.c deleted file mode 100644 index ef1b091018479a..00000000000000 --- a/packages/bun-uws/capi/examples/Broadcast.c +++ /dev/null @@ -1,157 +0,0 @@ -#include "../libuwebsockets.h" -#include -#include -#include -#include -#include -#define SSL 1 - - -//Timer close helper -void uws_timer_close(struct us_timer_t *timer) -{ - struct us_timer_t *t = (struct us_timer_t *)timer; - struct timer_handler_data *data; - memcpy(&data, us_timer_ext(t), sizeof(struct timer_handler_data *)); - free(data); - us_timer_close(t, 0); -} -//Timer create helper -struct us_timer_t *uws_create_timer(int ms, int repeat_ms, void (*handler)(void *data), void *data) -{ - struct us_loop_t *loop = uws_get_loop(); - struct us_timer_t *delayTimer = us_create_timer(loop, 0, sizeof(void *)); - - struct timer_handler_data - { - void *data; - void (*handler)(void *data); - bool repeat; - }; - - struct timer_handler_data *timer_data = (struct timer_handler_data *)malloc(sizeof(timer_handler_data)); - timer_data->data = data; - timer_data->handler = handler; - timer_data->repeat = repeat_ms > 0; - memcpy(us_timer_ext(delayTimer), &timer_data, sizeof(struct timer_handler_data *)); - - us_timer_set( - delayTimer, [](struct us_timer_t *t) - { - /* We wrote the pointer to the timer's extension */ - struct timer_handler_data *data; - memcpy(&data, us_timer_ext(t), sizeof(struct timer_handler_data *)); - - data->handler(data->data); - - if (!data->repeat) - { - free(data); - us_timer_close(t, 0); - } - }, - ms, repeat_ms); - - return (struct us_timer_t *)delayTimer; -} - -/* This is a simple WebSocket "sync" upgrade example. - * You may compile it with "WITH_OPENSSL=1 make" or with "make" */ - -/* ws->getUserData returns one of these */ -struct PerSocketData { - /* Fill with user data */ -}; - -int buffer_size(const char* format, ...) { - va_list args; - va_start(args, format); - int result = vsnprintf(NULL, 0, format, args); - va_end(args); - return result + 1; // safe byte for \0 -} - -void listen_handler(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void* user_data) -{ - if (listen_socket){ - printf("Listening on port wss://localhost:%d\n", config.port); - } -} - -void open_handler(uws_websocket_t* ws){ - - /* Open event here, you may access uws_ws_get_user_data(WS) which points to a PerSocketData struct */ - uws_ws_subscribe(SSL, ws, "broadcast", 9); -} - -void message_handler(uws_websocket_t* ws, const char* message, size_t length, uws_opcode_t opcode){ -} - -void close_handler(uws_websocket_t* ws, int code, const char* message, size_t length){ - /* You may access uws_ws_get_user_data(ws) here, but sending or - * doing any kind of I/O with the socket is not valid. */ -} - -void drain_handler(uws_websocket_t* ws){ - /* Check uws_ws_get_buffered_amount(ws) here */ -} - -void ping_handler(uws_websocket_t* ws, const char* message, size_t length){ - /* You don't need to handle this one, we automatically respond to pings as per standard */ -} - -void pong_handler(uws_websocket_t* ws, const char* message, size_t length){ - - /* You don't need to handle this one either */ -} - -void on_timer_interval(void* data){ - - // broadcast the unix time as millis - - uws_app_t * app = (uws_app_t *)data; - struct timespec ts; - timespec_get(&ts, TIME_UTC); - - int64_t millis = ts.tv_sec * 1000 + ts.tv_nsec / 1000000; - - - char* message = (char*)malloc((size_t)buffer_size("%ld", millis)); - size_t message_length = sprintf(message, "%ld", millis); - - uws_publish(SSL, app, "broadcast", 9, message, message_length, uws_opcode_t::TEXT, false); - free(message); -} - -int main() -{ - - - uws_app_t *app = uws_create_app(SSL, (struct us_socket_context_options_t){ - /* There are example certificates in uWebSockets.js repo */ - .key_file_name = "../misc/key.pem", - .cert_file_name = "../misc/cert.pem", - .passphrase = "1234" - }); - - uws_ws(SSL, app, "/*", (uws_socket_behavior_t){ - .compression = uws_compress_options_t::SHARED_COMPRESSOR, - .maxPayloadLength = 16 * 1024, - .idleTimeout = 12, - .maxBackpressure = 1 * 1024 * 1024, - .upgrade = NULL, - .open = open_handler, - .message = message_handler, - .drain = drain_handler, - .ping = ping_handler, - .pong = pong_handler, - .close = close_handler, - }); - - uws_app_listen(SSL, app, 9001, listen_handler, NULL); - - // broadcast the unix time as millis every 8 millis - uws_create_timer(8, 8, on_timer_interval, app); - - uws_app_run(SSL, app); -} \ No newline at end of file diff --git a/packages/bun-uws/capi/examples/BroadcastEchoServer.c b/packages/bun-uws/capi/examples/BroadcastEchoServer.c deleted file mode 100644 index 65e04449bbf30f..00000000000000 --- a/packages/bun-uws/capi/examples/BroadcastEchoServer.c +++ /dev/null @@ -1,175 +0,0 @@ -#include "../libuwebsockets.h" -#include -#include -#include -#include -#include - -#define SSL 1 - - -/* This is a simple WebSocket "sync" upgrade example. - * You may compile it with "WITH_OPENSSL=1 make" or with "make" */ - -typedef struct -{ - size_t length; - char *name; -} topic_t; - -/* ws->getUserData returns one of these */ -struct PerSocketData -{ - /* Fill with user data */ - topic_t **topics; - int topics_quantity; - int nr; -}; - -uws_app_t *app; - -int buffer_size(const char *format, ...) -{ - va_list args; - va_start(args, format); - int result = vsnprintf(NULL, 0, format, args); - va_end(args); - return result + 1; // safe byte for \0 -} - -void listen_handler(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void* user_data) -{ - if (listen_socket) - { - printf("Listening on port wss://localhost:%d\n", config.port); - } -} - -void upgrade_handler(uws_res_t *response, uws_req_t *request, uws_socket_context_t *context) -{ - - /* You may read from req only here, and COPY whatever you need into your PerSocketData. - * PerSocketData is valid from .open to .close event, accessed with uws_ws_get_user_data(ws). - * HttpRequest (req) is ONLY valid in this very callback, so any data you will need later - * has to be COPIED into PerSocketData here. */ - - /* Immediately upgrading without doing anything "async" before, is simple */ - - struct PerSocketData *data = (struct PerSocketData *)malloc(sizeof(struct PerSocketData)); - data->topics = (topic_t **)calloc(32, sizeof(topic_t *)); - data->topics_quantity = 32; - data->nr = 0; - - const char *ws_key = NULL; - const char *ws_protocol = NULL; - const char *ws_extensions = NULL; - - size_t ws_key_length = uws_req_get_header(request, "sec-websocket-key", 17, &ws_key); - size_t ws_protocol_length = uws_req_get_header(request, "sec-websocket-protocol", 22, &ws_protocol); - size_t ws_extensions_length = uws_req_get_header(request, "sec-websocket-extensions", 24, &ws_extensions); - - uws_res_upgrade(SSL, - response, - (void *)data, - ws_key, - ws_key_length, - ws_protocol, - ws_protocol_length, - ws_extensions, - ws_extensions_length, - context); -} - -void open_handler(uws_websocket_t *ws) -{ - - /* Open event here, you may access uws_ws_get_user_data(ws) which points to a PerSocketData struct */ - struct PerSocketData *data = (struct PerSocketData *)uws_ws_get_user_data(SSL, ws); - for (int i = 0; i < data->topics_quantity; i++) - { - - char *topic = (char *)malloc((size_t)buffer_size("%ld-%d", (uintptr_t)ws, i)); - size_t topic_length = sprintf(topic, "%ld-%d", (uintptr_t)ws, i); - - topic_t *new_topic = (topic_t*) malloc(sizeof(topic_t)); - new_topic->length = topic_length; - new_topic->name = topic; - data->topics[i] = new_topic; - uws_ws_subscribe(SSL, ws, topic, topic_length); - } -} - -void message_handler(uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode) -{ - struct PerSocketData *data = (struct PerSocketData *)uws_ws_get_user_data(SSL, ws); - topic_t *topic = data->topics[(size_t)(++data->nr % data->topics_quantity)]; - uws_publish(SSL, app, topic->name, topic->length, message, length, opcode, false); - - topic = data->topics[(size_t)(++data->nr % data->topics_quantity)]; - uws_ws_publish(SSL, ws, topic->name, topic->length, message, length); -} - -void close_handler(uws_websocket_t *ws, int code, const char *message, size_t length) -{ - /* You may access uws_ws_get_user_data(ws) here, but sending or - * doing any kind of I/O with the socket is not valid. */ - struct PerSocketData *data = (struct PerSocketData *)uws_ws_get_user_data(SSL, ws); - if (data) - { - for (int i = 0; i < data->topics_quantity; i++) - { - - topic_t* topic = data->topics[i]; - free(topic->name); - free(topic); - } - free(data->topics); - free(data); - } -} - -void drain_handler(uws_websocket_t *ws) -{ - /* Check uws_ws_get_buffered_amount(ws) here */ -} - -void ping_handler(uws_websocket_t *ws, const char *message, size_t length) -{ - /* You don't need to handle this one, we automatically respond to pings as per standard */ -} - -void pong_handler(uws_websocket_t *ws, const char *message, size_t length) -{ - - /* You don't need to handle this one either */ -} - -int main() -{ - - - uws_app_t *app = uws_create_app(SSL, (struct us_socket_context_options_t){ - /* There are example certificates in uWebSockets.js repo */ - .key_file_name = "../misc/key.pem", - .cert_file_name = "../misc/cert.pem", - .passphrase = "1234" - }); - - uws_ws(SSL, app, "/*", (uws_socket_behavior_t){ - .compression = uws_compress_options_t::SHARED_COMPRESSOR, - .maxPayloadLength = 16 * 1024, - .idleTimeout = 12, - .maxBackpressure = 1 * 1024 * 1024, - .upgrade = upgrade_handler, - .open = open_handler, - .message = message_handler, - .drain = drain_handler, - .ping = ping_handler, - .pong = pong_handler, - .close = close_handler, - }); - - uws_app_listen(SSL, app, 9001, listen_handler, NULL); - - uws_app_run(SSL, app); -} \ No newline at end of file diff --git a/packages/bun-uws/capi/examples/EchoServer.c b/packages/bun-uws/capi/examples/EchoServer.c deleted file mode 100644 index adcf1edb5a2853..00000000000000 --- a/packages/bun-uws/capi/examples/EchoServer.c +++ /dev/null @@ -1,81 +0,0 @@ -#include "../libuwebsockets.h" -#include -#include - -#define SSL 1 - - -/* This is a simple WebSocket "sync" upgrade example. - * You may compile it with "WITH_OPENSSL=1 make" or with "make" */ - -/* ws->getUserData returns one of these */ -struct PerSocketData { - /* Fill with user data */ -}; - -void listen_handler(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void* user_data) -{ - if (listen_socket){ - printf("Listening on port wss://localhost:%d\n", config.port); - } -} - -void open_handler(uws_websocket_t* ws){ - - /* Open event here, you may access uws_ws_get_user_data(WS) which points to a PerSocketData struct */ -} - -void message_handler(uws_websocket_t* ws, const char* message, size_t length, uws_opcode_t opcode){ - uws_ws_send(SSL, ws, message, length, opcode); -} - -void close_handler(uws_websocket_t* ws, int code, const char* message, size_t length){ - - /* You may access uws_ws_get_user_data(ws) here, but sending or - * doing any kind of I/O with the socket is not valid. */ -} - -void drain_handler(uws_websocket_t* ws){ - /* Check uws_ws_get_buffered_amount(ws) here */ -} - -void ping_handler(uws_websocket_t* ws, const char* message, size_t length){ - /* You don't need to handle this one, we automatically respond to pings as per standard */ -} - -void pong_handler(uws_websocket_t* ws, const char* message, size_t length){ - - /* You don't need to handle this one either */ -} - - -int main() -{ - - - uws_app_t *app = uws_create_app(SSL, (struct us_socket_context_options_t){ - /* There are example certificates in uWebSockets.js repo */ - .key_file_name = "../misc/key.pem", - .cert_file_name = "../misc/cert.pem", - .passphrase = "1234" - }); - - uws_ws(SSL, app, "/*", (uws_socket_behavior_t){ - .compression = uws_compress_options_t::SHARED_COMPRESSOR, - .maxPayloadLength = 16 * 1024, - .idleTimeout = 12, - .maxBackpressure = 1 * 1024 * 1024, - .upgrade = NULL, - .open = open_handler, - .message = message_handler, - .drain = drain_handler, - .ping = ping_handler, - .pong = pong_handler, - .close = close_handler, - }); - - uws_app_listen(SSL,app, 9001, listen_handler, NULL); - - - uws_app_run(SSL, app); -} \ No newline at end of file diff --git a/packages/bun-uws/capi/examples/HelloWorld.c b/packages/bun-uws/capi/examples/HelloWorld.c deleted file mode 100644 index 613ba593475ea4..00000000000000 --- a/packages/bun-uws/capi/examples/HelloWorld.c +++ /dev/null @@ -1,33 +0,0 @@ -#include "../libuwebsockets.h" -#include "libusockets.h" -#include - -#define SSL 1 - -void get_handler(uws_res_t *res, uws_req_t *req, void *user_data) -{ - uws_res_end(SSL, res, "Hello CAPI!", 11, false); -} - -void listen_handler(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void *user_data) -{ - if (listen_socket) - { - printf("Listening on port https://localhost:%d now\n", config.port); - } -} - -int main() -{ - /* Overly simple hello world app */ - - uws_app_t *app = uws_create_app(SSL, (struct us_socket_context_options_t){ - /* There are example certificates in uWebSockets.js repo */ - .key_file_name = "../misc/key.pem", - .cert_file_name = "../misc/cert.pem", - .passphrase = "1234" - }); - uws_app_get(SSL, app, "/*", get_handler, NULL); - uws_app_listen(SSL, app, 3000, listen_handler, NULL); - uws_app_run(SSL, app); -} diff --git a/packages/bun-uws/capi/examples/HelloWorldAsync.c b/packages/bun-uws/capi/examples/HelloWorldAsync.c deleted file mode 100644 index e22dd44c1be1f5..00000000000000 --- a/packages/bun-uws/capi/examples/HelloWorldAsync.c +++ /dev/null @@ -1,123 +0,0 @@ -#include "../libuwebsockets.h" -#include "libusockets.h" - -#include -#include -#include - -#define SSL 0 - -typedef struct { - uws_res_t* res; - bool aborted; -} async_request_t; - -//Timer close helper -void uws_timer_close(struct us_timer_t *timer) -{ - struct us_timer_t *t = (struct us_timer_t *)timer; - struct timer_handler_data *data; - memcpy(&data, us_timer_ext(t), sizeof(struct timer_handler_data *)); - free(data); - us_timer_close(t, 0); -} -//Timer create helper -struct us_timer_t *uws_create_timer(int ms, int repeat_ms, void (*handler)(void *data), void *data) -{ - struct us_loop_t *loop = uws_get_loop(); - struct us_timer_t *delayTimer = us_create_timer(loop, 0, sizeof(void *)); - - struct timer_handler_data - { - void *data; - void (*handler)(void *data); - bool repeat; - }; - - struct timer_handler_data *timer_data = (struct timer_handler_data *)malloc(sizeof(timer_handler_data)); - timer_data->data = data; - timer_data->handler = handler; - timer_data->repeat = repeat_ms > 0; - memcpy(us_timer_ext(delayTimer), &timer_data, sizeof(struct timer_handler_data *)); - - us_timer_set( - delayTimer, [](struct us_timer_t *t) - { - /* We wrote the pointer to the timer's extension */ - struct timer_handler_data *data; - memcpy(&data, us_timer_ext(t), sizeof(struct timer_handler_data *)); - - data->handler(data->data); - - if (!data->repeat) - { - free(data); - us_timer_close(t, 0); - } - }, - ms, repeat_ms); - - return (struct us_timer_t *)delayTimer; -} - -void on_res_aborted(uws_res_t *response, void* data){ - async_request_t* request_data = (async_request_t*)data; - /* We don't implement any kind of cancellation here, - * so simply flag us as aborted */ - request_data->aborted = true; -} - -void on_res_corked(uws_res_t *response, void* data){ - uws_res_end(SSL, response, "Hello CAPI!", 11, false); -} -void on_timer_done(void *data){ - async_request_t* request_data = (async_request_t*)data; - /* Were'nt we aborted before our async task finished? Okay, send a message! */ - if(!request_data->aborted){ - - uws_res_cork(SSL, request_data->res,on_res_corked, request_data); - } -} - -void get_handler(uws_res_t *res, uws_req_t *req, void* user_data) -{ - - /* We have to attach an abort handler for us to be aware - * of disconnections while we perform async tasks */ - async_request_t* request_data = (async_request_t*) malloc(sizeof(async_request_t)); - request_data->res = res; - request_data->aborted = false; - - uws_res_on_aborted(SSL, res, on_res_aborted, request_data); - - /* Simulate checking auth for 5 seconds. This looks like crap, never write - * code that utilize us_timer_t like this; they are high-cost and should - * not be created and destroyed more than rarely! - * Either way, here we go!*/ - uws_create_timer(1, 0, on_timer_done, request_data); -} - - -void listen_handler(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void* user_data) -{ - if (listen_socket) - { - printf("Listening on port https://localhost:%d now\n", config.port); - } -} - -int main() -{ - /* Overly simple hello world app with async response */ - - - uws_app_t *app = uws_create_app(SSL, (struct us_socket_context_options_t){ - /* There are example certificates in uWebSockets.js repo */ - .key_file_name = "../misc/key.pem", - .cert_file_name = "../misc/cert.pem", - .passphrase = "1234" - }); - uws_app_get(SSL, app, "/*", get_handler, NULL); - uws_app_listen(SSL, app, 3000, listen_handler, NULL); - uws_app_run(SSL, app); -} diff --git a/packages/bun-uws/capi/examples/RustHelloWorld.rs b/packages/bun-uws/capi/examples/RustHelloWorld.rs deleted file mode 100644 index b122d810ba5974..00000000000000 --- a/packages/bun-uws/capi/examples/RustHelloWorld.rs +++ /dev/null @@ -1,309 +0,0 @@ -/* automatically generated by rust-bindgen 0.59.2 */ -use std::convert::TryInto; -use std::ffi::CString; - -pub type SizeT = ::std::os::raw::c_ulong; -pub type WcharT = ::std::os::raw::c_uint; -#[repr(C)] -#[repr(align(16))] -#[derive(Debug, Copy, Clone)] -pub struct max_align_t { - pub __clang_max_align_nonce1: ::std::os::raw::c_longlong, - pub __bindgen_padding_0: u64, - pub __clang_max_align_nonce2: u128, -} -#[test] -fn bindgen_test_layout_max_align_t() { - assert_eq!( - ::std::mem::size_of::(), - 32usize, - concat!("Size of: ", stringify!(max_align_t)) - ); - assert_eq!( - ::std::mem::align_of::(), - 16usize, - concat!("Alignment of ", stringify!(max_align_t)) - ); - assert_eq!( - unsafe { - &(*(::std::ptr::null::())).__clang_max_align_nonce1 as *const _ as usize - }, - 0usize, - concat!( - "Offset of field: ", - stringify!(max_align_t), - "::", - stringify!(__clang_max_align_nonce1) - ) - ); - assert_eq!( - unsafe { - &(*(::std::ptr::null::())).__clang_max_align_nonce2 as *const _ as usize - }, - 16usize, - concat!( - "Offset of field: ", - stringify!(max_align_t), - "::", - stringify!(__clang_max_align_nonce2) - ) - ); -} -#[repr(C)] -#[derive(Debug, Copy, Clone)] -pub struct uws_app_s { - _unused: [u8; 0], -} -#[repr(C)] -#[derive(Debug, Copy, Clone)] -pub struct uws_req_s { - _unused: [u8; 0], -} -#[repr(C)] -#[derive(Debug, Copy, Clone)] -pub struct uws_res_s { - _unused: [u8; 0], -} - -#[repr(C)] -#[derive(Debug, Copy, Clone)] -pub struct uws_app_listen_config_s { - port: ::std::os::raw::c_int, - host: *const ::std::os::raw::c_char, - options: ::std::os::raw::c_int, -} -#[repr(C)] -#[derive(Debug, Copy, Clone)] -pub struct us_socket_context_options_s { - key_file_name: *const ::std::os::raw::c_char, - cert_file_name: *const ::std::os::raw::c_char, - passphrase: *const ::std::os::raw::c_char, - dh_params_file_name: *const ::std::os::raw::c_char, - ca_file_name: *const ::std::os::raw::c_char, - ssl_prefer_low_memory_usage: ::std::os::raw::c_int, -} - -pub type UwsAppListenConfigT = uws_app_listen_config_s; -pub type UsSocketContextOptionsT = us_socket_context_options_s; -pub struct UsSocketContextOptions<'a> { - key_file_name: &'a str, - cert_file_name: &'a str, - passphrase: &'a str, - dh_params_file_name: &'a str, - ca_file_name: &'a str, - ssl_prefer_low_memory_usage: i32, -} -pub type UwsAppT = uws_app_s; -pub type UwsReqT = uws_req_s; -pub type UwsResT = uws_res_s; -extern "C" { - pub fn uws_create_app( - ssl: ::std::os::raw::c_int, - options: UsSocketContextOptionsT, - ) -> *mut UwsAppT; - pub fn uws_app_get( - ssl: ::std::os::raw::c_int, - app: *mut UwsAppT, - pattern: *const ::std::os::raw::c_char, - handler: ::std::option::Option< - unsafe extern "C" fn( - res: *mut UwsResT, - req: *mut UwsReqT, - user_data: *mut ::std::os::raw::c_void, - ), - >, - user_data: *mut ::std::os::raw::c_void, - ); - pub fn uws_app_run(ssl: ::std::os::raw::c_int, app: *mut UwsAppT); - - pub fn uws_app_listen( - ssl: ::std::os::raw::c_int, - app: *mut UwsAppT, - port: ::std::os::raw::c_int, - handler: ::std::option::Option< - unsafe extern "C" fn( - listen_socket: *mut ::std::os::raw::c_void, - config: UwsAppListenConfigT, - user_data: *mut ::std::os::raw::c_void, - ), - >, - user_data: *mut ::std::os::raw::c_void, - ); - pub fn uws_res_end( - ssl: ::std::os::raw::c_int, - res: *mut UwsResT, - data: *const ::std::os::raw::c_char, - length: SizeT, - close_connection: bool, - ); -} - -pub struct AppResponse { - native: *mut UwsResT, -} -pub struct AppRequest { - native: *mut UwsReqT, -} -impl AppRequest { - pub fn new(native: *mut UwsReqT) -> AppRequest { - AppRequest { native: native } - } -} -impl AppResponse { - pub fn new(native: *mut UwsResT) -> AppResponse { - AppResponse:: { native: native } - } - fn end(self, message: &str) -> AppResponse { - unsafe { - let c_message = - ::std::ffi::CString::new(message).expect("Failed to create message CString"); - //This will now const fold :/ performance impact needs refactor - uws_res_end( - SSL, - self.native, - c_message.as_ptr(), - message.len().try_into().unwrap(), - false, - ); - } - self - } -} - -pub type UwsMethodHandler = fn(res: AppResponse, req: AppRequest); -pub type UwsListenHandler = - fn(listen_socket: *mut ::std::os::raw::c_void, config: UwsAppListenConfigT); - -pub struct TemplateApp { - native: *mut UwsAppT, -} - -extern "C" fn uws_generic_listen_handler( - listen_socket: *mut ::std::os::raw::c_void, - config: UwsAppListenConfigT, - user_data: *mut ::std::os::raw::c_void, -) { - unsafe { - let callback = &mut *(user_data as *mut UwsListenHandler); - callback(listen_socket, config); - } -} - -extern "C" fn uws_generic_method_handler( - res: *mut UwsResT, - req: *mut UwsReqT, - user_data: *mut ::std::os::raw::c_void, -) { - unsafe { - let response = AppResponse::<0>::new(res); - let request = AppRequest::new(req); - let callback = &mut *(user_data as *mut UwsMethodHandler<0>); - callback(response, request); - } -} -extern "C" fn uws_ssl_generic_method_handler( - res: *mut UwsResT, - req: *mut UwsReqT, - user_data: *mut ::std::os::raw::c_void, -) { - unsafe { - let response = AppResponse::<1>::new(res); - let request = AppRequest::new(req); - let callback = &mut *(user_data as *mut UwsMethodHandler<1>); - callback(response, request); - } -} - -impl TemplateApp { - pub fn new(config: UsSocketContextOptions) -> TemplateApp { - unsafe { - let key_file_name_s = - CString::new(config.key_file_name).expect("Failed to create key_file_name CString"); - let cert_file_name_s = CString::new(config.cert_file_name) - .expect("Failed to create cert_file_name CString"); - let passphrase_s = - CString::new(config.passphrase).expect("Failed to create passphrase CString"); - let dh_params_file_name_s = CString::new(config.dh_params_file_name) - .expect("Failed to create dh_params_file_name CString"); - let ca_file_name_s = - CString::new(config.ca_file_name).expect("Failed to create ca_file_name CString"); - - let native_options = UsSocketContextOptionsT { - key_file_name: key_file_name_s.as_ptr(), - cert_file_name: cert_file_name_s.as_ptr(), - passphrase: passphrase_s.as_ptr(), - dh_params_file_name: dh_params_file_name_s.as_ptr(), - ca_file_name: ca_file_name_s.as_ptr(), - ssl_prefer_low_memory_usage: config.ssl_prefer_low_memory_usage, - }; - TemplateApp:: { - native: uws_create_app(SSL, native_options), - } - } - } - pub fn get(self, route: &str, mut handler: UwsMethodHandler) -> TemplateApp { - unsafe { - let c_route = ::std::ffi::CString::new(route).expect("Failed to create route CString"); - if SSL == 1 { - uws_app_get( - SSL, - self.native, - c_route.as_ptr(), - std::option::Option::Some(uws_ssl_generic_method_handler), - &mut handler as *mut _ as *mut ::std::os::raw::c_void, - ); - } else { - uws_app_get( - SSL, - self.native, - c_route.as_ptr(), - std::option::Option::Some(uws_generic_method_handler), - &mut handler as *mut _ as *mut ::std::os::raw::c_void, - ); - } - } - self - } - - pub fn listen(self, port: i32, mut handler: UwsListenHandler) -> TemplateApp { - unsafe { - uws_app_listen( - SSL, - self.native, - port, - ::std::option::Option::Some(uws_generic_listen_handler), - &mut handler as *mut _ as *mut ::std::os::raw::c_void, - ); - } - self - } - - pub fn run(self) -> TemplateApp { - unsafe { - uws_app_run(SSL, self.native); - } - self - } -} -pub type App = TemplateApp<0>; -pub type SSLApp = TemplateApp<1>; - -fn main() { - let config = UsSocketContextOptions { - key_file_name: "../misc/key.pem", - cert_file_name: "../misc/cert.pem", - passphrase: "1234", - ca_file_name: "", - dh_params_file_name: "", - ssl_prefer_low_memory_usage: 0, - }; - - SSLApp::new(config) - .get("/", |res, _req| { - res.end("Hello Rust!"); - }) - .listen(3000, |_listen_socket, config| { - println!("Listening on port https://127.0.0.1:{}", config.port); - }) - .run(); -} diff --git a/packages/bun-uws/capi/examples/ServerName.c b/packages/bun-uws/capi/examples/ServerName.c deleted file mode 100644 index 1415823b293d14..00000000000000 --- a/packages/bun-uws/capi/examples/ServerName.c +++ /dev/null @@ -1,59 +0,0 @@ -#include "../libuwebsockets.h" -#include -#include -#define SSL 1 - - -struct us_listen_socket_t *globalListenSocket; -uws_app_t *app; -void get_handler(uws_res_t *res, uws_req_t *req, void* user_data) -{ - - uws_res_end(SSL, res, "Hello CAPI!", 11, false); -} - -void exit_handler(uws_res_t *res, uws_req_t *req, void* user_data) -{ - uws_res_end(SSL, res, "Shutting down!",14, false); - /* We use this to check graceful closedown */ - us_listen_socket_close(false, globalListenSocket); -} - -void missing_server_name_handler(const char *hostname, void* user_data){ - printf("We are missing server name: <%s>\n", hostname); - - /* Assume it is localhost, so add it */ - uws_add_server_name(SSL, app, "localhost"); -} - -void listen_handler(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void* user_data) -{ - if (listen_socket){ - printf("Listening on port https://localhost:%d\n", config.port); - globalListenSocket = listen_socket; - }else{ - printf("Failed to listen on port https://localhost:%d\n", config.port); - } - -} - -int main() -{ - /* Overly simple hello world app (SNI)*/ - - app = uws_create_app(SSL, (struct us_socket_context_options_t){ - /* There are example certificates in uWebSockets.js repo */ - .key_file_name = "../misc/key.pem", - .cert_file_name = "../misc/cert.pem", - .passphrase = "1234" - }); - uws_missing_server_name(SSL, app, missing_server_name_handler, NULL); - uws_app_get(SSL, app, "/*", get_handler, NULL); - uws_app_get(SSL, app, "/exit", exit_handler, NULL); - uws_app_listen(SSL, app, 3000, listen_handler, NULL); - - /* Let's add a wildcard SNI to begin with */ - uws_add_server_name(SSL, app, "*.google.*"); - - uws_app_run(SSL, app); -} \ No newline at end of file diff --git a/packages/bun-uws/capi/examples/UpgradeAsync.c b/packages/bun-uws/capi/examples/UpgradeAsync.c deleted file mode 100644 index 8c6e735420df75..00000000000000 --- a/packages/bun-uws/capi/examples/UpgradeAsync.c +++ /dev/null @@ -1,255 +0,0 @@ -#include "../libuwebsockets.h" -#include "libusockets.h" -#include -#include -#include -/* This is a simple WebSocket "sync" upgrade example. - * You may compile it with "WITH_OPENSSL=1 make" or with "make" */ - -#define SSL 1 - -typedef struct -{ - char *value; - size_t length; -} header_t; -struct PerSocketData -{ - /* Define your user data */ - int something; -}; - -struct UpgradeData -{ - header_t *secWebSocketKey; - header_t *secWebSocketProtocol; - header_t *secWebSocketExtensions; - uws_socket_context_t *context; - uws_res_t *response; - bool aborted; -}; - -header_t *create_header(size_t length, const char* value) -{ - header_t *header = (header_t *)malloc(sizeof(header_t)); - if(length > 0){ - header->value = (char *)calloc(sizeof(char), length); - header->length = length; - memcpy(header->value, value, length); - }else{ - header->value = NULL; - header->length = 0; - } - return header; -} -void free_header(header_t *header) -{ - - free(header->value); - free(header); -} -void listen_handler(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void *user_data) -{ - if (listen_socket) - { - printf("Listening on port wss://localhost:%d\n", config.port); - } -} -//Timer close helper -void uws_timer_close(struct us_timer_t *timer) -{ - struct us_timer_t *t = (struct us_timer_t *)timer; - struct timer_handler_data *data; - memcpy(&data, us_timer_ext(t), sizeof(struct timer_handler_data *)); - free(data); - us_timer_close(t, 0); -} -//Timer create helper -struct us_timer_t *uws_create_timer(int ms, int repeat_ms, void (*handler)(void *data), void *data) -{ - struct us_loop_t *loop = uws_get_loop(); - struct us_timer_t *delayTimer = us_create_timer(loop, 0, sizeof(void *)); - - struct timer_handler_data - { - void *data; - void (*handler)(void *data); - bool repeat; - }; - - struct timer_handler_data *timer_data = (struct timer_handler_data *)malloc(sizeof(timer_handler_data)); - timer_data->data = data; - timer_data->handler = handler; - timer_data->repeat = repeat_ms > 0; - memcpy(us_timer_ext(delayTimer), &timer_data, sizeof(struct timer_handler_data *)); - - us_timer_set( - delayTimer, [](struct us_timer_t *t) - { - /* We wrote the pointer to the timer's extension */ - struct timer_handler_data *data; - memcpy(&data, us_timer_ext(t), sizeof(struct timer_handler_data *)); - - data->handler(data->data); - - if (!data->repeat) - { - free(data); - us_timer_close(t, 0); - } - }, - ms, repeat_ms); - - return (struct us_timer_t *)delayTimer; -} -void on_timer_done(void *data) -{ - - struct UpgradeData *upgrade_data = (struct UpgradeData *)data; - - /* Were'nt we aborted before our async task finished? Okay, upgrade then! */ - if (!upgrade_data->aborted) - { - struct PerSocketData *socket_data = (struct PerSocketData *)malloc(sizeof(struct PerSocketData)); - socket_data->something = 15; - printf("Async task done, upgrading to WebSocket now!\n"); - - uws_res_upgrade(SSL, - upgrade_data->response, - (void *)socket_data, - upgrade_data->secWebSocketKey->value, - upgrade_data->secWebSocketKey->length, - upgrade_data->secWebSocketProtocol->value, - upgrade_data->secWebSocketProtocol->length, - upgrade_data->secWebSocketExtensions->value, - upgrade_data->secWebSocketExtensions->length, - upgrade_data->context); - } - else - { - printf("Async task done, but the HTTP socket was closed. Skipping upgrade to WebSocket!\n"); - } - free_header(upgrade_data->secWebSocketKey); - free_header(upgrade_data->secWebSocketProtocol); - free_header(upgrade_data->secWebSocketExtensions); - free(upgrade_data); -} - -void on_res_aborted(uws_res_t *response, void *data) -{ - struct UpgradeData *upgrade_data = (struct UpgradeData *)data; - /* We don't implement any kind of cancellation here, - * so simply flag us as aborted */ - upgrade_data->aborted = true; -} -void upgrade_handler(uws_res_t *response, uws_req_t *request, uws_socket_context_t *context) -{ - - /* HttpRequest (req) is only valid in this very callback, so we must COPY the headers - * we need later on while upgrading to WebSocket. You must not access req after first return. - * Here we create a heap allocated struct holding everything we will need later on. */ - - struct UpgradeData *data = (struct UpgradeData *)malloc(sizeof(struct UpgradeData)); - data->aborted = false; - data->context = context; - data->response = response; - - const char *ws_key = NULL; - const char *ws_protocol = NULL; - const char *ws_extensions = NULL; - - size_t ws_key_length = uws_req_get_header(request, "sec-websocket-key", 17, &ws_key); - size_t ws_protocol_length = uws_req_get_header(request, "sec-websocket-protocol", 22, &ws_protocol); - size_t ws_extensions_length = uws_req_get_header(request, "sec-websocket-extensions", 24, &ws_extensions); - - - data->secWebSocketKey = create_header(ws_key_length, ws_key); - data->secWebSocketProtocol = create_header(ws_protocol_length, ws_protocol); - data->secWebSocketExtensions = create_header(ws_extensions_length, ws_extensions); - - /* We have to attach an abort handler for us to be aware - * of disconnections while we perform async tasks */ - - uws_res_on_aborted(SSL, response, on_res_aborted, data); - - /* Simulate checking auth for 5 seconds. This looks like crap, never write - * code that utilize us_timer_t like this; they are high-cost and should - * not be created and destroyed more than rarely! - * Either way, here we go!*/ - uws_create_timer(5000, 0, on_timer_done, data); -} - -void open_handler(uws_websocket_t *ws) -{ - - /* Open event here, you may access uws_ws_get_user_data(ws) which points to a PerSocketData struct. - * Here we simply validate that indeed, something == 15 as set in upgrade handler. */ - - struct PerSocketData *data = (struct PerSocketData *)uws_ws_get_user_data(SSL, ws); - data->something = 15; - printf("Something is: %d\n", data->something); -} - -void message_handler(uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode) -{ - - /* We simply echo whatever data we get */ - uws_ws_send(SSL, ws, message, length, opcode); -} - -void close_handler(uws_websocket_t *ws, int code, const char *message, size_t length) -{ - - /* You may access uws_ws_get_user_data(ws) here, but sending or - * doing any kind of I/O with the socket is not valid. */ - struct PerSocketData *data = (struct PerSocketData *)uws_ws_get_user_data(SSL, ws); - if (data) - { - free(data); - } -} - -void drain_handler(uws_websocket_t *ws) -{ - /* Check uws_ws_get_buffered_amount(ws) here */ -} - -void ping_handler(uws_websocket_t *ws, const char *message, size_t length) -{ - /* You don't need to handle this one, we automatically respond to pings as per standard */ -} - -void pong_handler(uws_websocket_t *ws, const char *message, size_t length) -{ - - /* You don't need to handle this one either */ -} - -int main() -{ - - uws_app_t *app = uws_create_app(SSL, (struct us_socket_context_options_t){ - /* There are example certificates in uWebSockets.js repo */ - .key_file_name = "../misc/key.pem", - .cert_file_name = "../misc/cert.pem", - .passphrase = "1234" - }); - - uws_ws(SSL, app, "/*", (uws_socket_behavior_t){ - .compression = uws_compress_options_t::SHARED_COMPRESSOR, - .maxPayloadLength = 16 * 1024, - .idleTimeout = 12, - .maxBackpressure = 1 * 1024 * 1024, - .upgrade = upgrade_handler, - .open = open_handler, - .message = message_handler, - .drain = drain_handler, - .ping = ping_handler, - .pong = pong_handler, - .close = close_handler, - }); - - uws_app_listen(SSL, app, 9001, listen_handler, NULL); - - uws_app_run(SSL, app); -} \ No newline at end of file diff --git a/packages/bun-uws/capi/examples/UpgradeSync.c b/packages/bun-uws/capi/examples/UpgradeSync.c deleted file mode 100644 index 83252b5e8c18f2..00000000000000 --- a/packages/bun-uws/capi/examples/UpgradeSync.c +++ /dev/null @@ -1,117 +0,0 @@ -#include "../libuwebsockets.h" -#include -#include - -#define SSL 1 - -/* This is a simple WebSocket "sync" upgrade example. - * You may compile it with "WITH_OPENSSL=1 make" or with "make" */ - -/* uws_ws_get_user_data(ws) returns one of these */ - -struct PerSocketData -{ - /* Define your user data */ - int something; -}; - -void listen_handler(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void *user_data) -{ - if (listen_socket) - { - printf("Listening on port wss://localhost:%d\n", config.port); - } -} - -void upgrade_handler(uws_res_t *response, uws_req_t *request, uws_socket_context_t *context) -{ - - /* You may read from req only here, and COPY whatever you need into your PerSocketData. - * PerSocketData is valid from .open to .close event, accessed with uws_ws_get_user_data(ws). - * HttpRequest (req) is ONLY valid in this very callback, so any data you will need later - * has to be COPIED into PerSocketData here. */ - - /* Immediately upgrading without doing anything "async" before, is simple */ - - struct PerSocketData *data = (struct PerSocketData *)malloc(sizeof(struct PerSocketData)); - data->something = 15; - - const char *ws_key = NULL; - const char *ws_protocol = NULL; - const char *ws_extensions = NULL; - - size_t ws_key_length = uws_req_get_header(request, "sec-websocket-key", 17, &ws_key); - size_t ws_protocol_length = uws_req_get_header(request, "sec-websocket-protocol", 22, &ws_protocol); - size_t ws_extensions_length = uws_req_get_header(request, "sec-websocket-extensions", 24, &ws_extensions); - - uws_res_upgrade(SSL, - response, - (void *)data, - ws_key, - ws_key_length, - ws_protocol, - ws_protocol_length, - ws_extensions, - ws_extensions_length, - context); -} - -void open_handler(uws_websocket_t *ws) -{ - - /* Open event here, you may access uws_ws_get_user_data(ws) which points to a PerSocketData struct. - * Here we simply validate that indeed, something == 15 as set in upgrade handler. */ - - struct PerSocketData *data = (struct PerSocketData *)uws_ws_get_user_data(SSL, ws); - data->something = 15; - printf("Something is: %d\n", data->something); -} - -void message_handler(uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode) -{ - /* We simply echo whatever data we get */ - uws_ws_send(SSL, ws, message, length, opcode); -} - -void close_handler(uws_websocket_t *ws, int code, const char *message, size_t length) -{ - - /* You may access uws_ws_get_user_data(ws) here, but sending or - * doing any kind of I/O with the socket is not valid. */ - struct PerSocketData *data = (struct PerSocketData *)uws_ws_get_user_data(SSL, ws); - if (data) - free(data); -} - -void drain_handler(uws_websocket_t *ws) -{ - /* Check uws_ws_get_buffered_amount(ws) here */ -} - -void ping_handler(uws_websocket_t *ws, const char *message, size_t length) -{ - /* You don't need to handle this one, we automatically respond to pings as per standard */ -} - -void pong_handler(uws_websocket_t *ws, const char *message, size_t length) -{ - - /* You don't need to handle this one either */ -} - -int main() -{ - - uws_app_t *app = uws_create_app(SSL, (struct us_socket_context_options_t){ - /* There are example certificates in uWebSockets.js repo */ - .key_file_name = "../misc/key.pem", - .cert_file_name = "../misc/cert.pem", - .passphrase = "1234" - }); - - uws_ws(SSL, app, "/*", (uws_socket_behavior_t){.compression = uws_compress_options_t::SHARED_COMPRESSOR, .maxPayloadLength = 16 * 1024, .idleTimeout = 12, .maxBackpressure = 1 * 1024 * 1024, .upgrade = upgrade_handler, .open = open_handler, .message = message_handler, .drain = drain_handler, .ping = ping_handler, .pong = pong_handler, .close = close_handler}); - - uws_app_listen(SSL, app, 9001, listen_handler, NULL); - - uws_app_run(SSL, app); -} \ No newline at end of file diff --git a/packages/bun-uws/capi/libuwebsockets.cpp b/packages/bun-uws/capi/libuwebsockets.cpp deleted file mode 100644 index 5ca58a5d0b416f..00000000000000 --- a/packages/bun-uws/capi/libuwebsockets.cpp +++ /dev/null @@ -1,1349 +0,0 @@ -/* - * Copyright 2022 Ciro Spaciari - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ -// clang-format off -#include "libuwebsockets.h" -#include -#include "App.h" -#include "ClientApp.h" -#include -extern "C" -{ - - uws_app_t *uws_create_app(int ssl, struct us_bun_socket_context_options_t options) - { - if (ssl) - { - uWS::SocketContextOptions sco; - memcpy(&sco, &options, sizeof(uWS::SocketContextOptions)); - - return (uws_app_t *)new uWS::SSLApp(sco); - } - - return (uws_app_t *)new uWS::App(); - } - - void uws_app_get(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - if (handler == nullptr) - { - uwsApp->get(pattern, nullptr); - return; - } - uwsApp->get(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - if (handler == nullptr) - { - uwsApp->get(pattern, nullptr); - return; - } - uwsApp->get(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - } - void uws_app_post(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data) - { - - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - if (handler == nullptr) - { - uwsApp->post(pattern, nullptr); - return; - } - uwsApp->post(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - if (handler == nullptr) - { - uwsApp->post(pattern, nullptr); - return; - } - uwsApp->post(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - } - void uws_app_options(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - if (handler == nullptr) - { - uwsApp->options(pattern, nullptr); - return; - } - uwsApp->options(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - if (handler == nullptr) - { - uwsApp->options(pattern, nullptr); - return; - } - uwsApp->options(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - } - void uws_app_delete(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - if (handler == nullptr) - { - uwsApp->del(pattern, nullptr); - return; - } - uwsApp->del(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - if (handler == nullptr) - { - uwsApp->del(pattern, nullptr); - return; - } - uwsApp->del(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - } - void uws_app_patch(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - if (handler == nullptr) - { - uwsApp->patch(pattern, nullptr); - return; - } - uwsApp->patch(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - if (handler == nullptr) - { - uwsApp->patch(pattern, nullptr); - return; - } - uwsApp->patch(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - } - void uws_app_put(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - if (handler == nullptr) - { - uwsApp->put(pattern, nullptr); - return; - } - uwsApp->put(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - if (handler == nullptr) - { - uwsApp->put(pattern, nullptr); - return; - } - uwsApp->put(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - } - void uws_app_head(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - if (handler == nullptr) - { - uwsApp->head(pattern, nullptr); - return; - } - uwsApp->head(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - if (handler == nullptr) - { - uwsApp->head(pattern, nullptr); - return; - } - uwsApp->head(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - } - void uws_app_connect(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - if (handler == nullptr) - { - uwsApp->connect(pattern, nullptr); - return; - } - uwsApp->connect(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - if (handler == nullptr) - { - uwsApp->connect(pattern, nullptr); - return; - } - uwsApp->connect(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - } - - void uws_app_trace(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - if (handler == nullptr) - { - uwsApp->trace(pattern, nullptr); - return; - } - uwsApp->trace(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - if (handler == nullptr) - { - uwsApp->trace(pattern, nullptr); - return; - } - uwsApp->trace(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - } - void uws_app_any(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - if (handler == nullptr) - { - uwsApp->any(pattern, nullptr); - return; - } - uwsApp->any(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - if (handler == nullptr) - { - uwsApp->any(pattern, nullptr); - return; - } - uwsApp->any(pattern, [handler, user_data](auto *res, auto *req) - { handler((uws_res_t *)res, (uws_req_t *)req, user_data); }); - } - } - - void uws_app_run(int ssl, uws_app_t *app) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - uwsApp->run(); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - uwsApp->run(); - } - } - - void uws_app_listen(int ssl, uws_app_t *app, int port, uws_listen_handler handler, void *user_data) - { - uws_app_listen_config_t config; - config.port = port; - config.host = nullptr; - config.options = 0; - - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - uwsApp->listen(port, [handler, config, user_data](struct us_listen_socket_t *listen_socket) - { handler((struct us_listen_socket_t *)listen_socket, config, user_data); }); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - - uwsApp->listen(port, [handler, config, user_data](struct us_listen_socket_t *listen_socket) - { handler((struct us_listen_socket_t *)listen_socket, config, user_data); }); - } - } - - void uws_app_listen_with_config(int ssl, uws_app_t *app, uws_app_listen_config_t config, uws_listen_handler handler, void *user_data) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - uwsApp->listen(config.host, config.port, config.options, [handler, config, user_data](struct us_listen_socket_t *listen_socket) - { handler((struct us_listen_socket_t *)listen_socket, config, user_data); }); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - uwsApp->listen(config.host, config.port, config.options, [handler, config, user_data](struct us_listen_socket_t *listen_socket) - { handler((struct us_listen_socket_t *)listen_socket, config, user_data); }); - } - } - - /* callback, path to unix domain socket */ - void uws_app_listen_domain(int ssl, uws_app_t *app, const char *domain, size_t domain_length, uws_listen_domain_handler handler, void *user_data) - { - - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - uwsApp->listen([handler, domain, domain_length, user_data](struct us_listen_socket_t *listen_socket) - { handler((struct us_listen_socket_t *)listen_socket, domain, domain_length, 0, user_data); }, - std::string(domain, domain_length)); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - - uwsApp->listen([handler, domain, domain_length, user_data](struct us_listen_socket_t *listen_socket) - { handler((struct us_listen_socket_t *)listen_socket, domain, domain_length, 0, user_data); }, - std::string(domain, domain_length)); - } - } - - /* callback, path to unix domain socket */ - void uws_app_listen_domain_with_options(int ssl, uws_app_t *app, const char *domain, size_t domain_length, int options, uws_listen_domain_handler handler, void *user_data) - { - - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - uwsApp->listen( - options, [handler, domain, domain_length, options, user_data](struct us_listen_socket_t *listen_socket) - { handler((struct us_listen_socket_t *)listen_socket, domain, domain_length, options, user_data); }, - std::string(domain, domain_length)); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - - uwsApp->listen( - options, [handler, domain, domain_length, options, user_data](struct us_listen_socket_t *listen_socket) - { handler((struct us_listen_socket_t *)listen_socket, domain, domain_length, options, user_data); }, - std::string(domain, domain_length)); - } - } - void uws_app_domain(int ssl, uws_app_t *app, const char *server_name) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - uwsApp->domain(server_name); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - uwsApp->domain(server_name); - } - } - void uws_app_destroy(int ssl, uws_app_t *app) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - delete uwsApp; - } - else - { - - uWS::App *uwsApp = (uWS::App *)app; - delete uwsApp; - } - } - - bool uws_constructor_failed(int ssl, uws_app_t *app) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - if (!uwsApp) - return true; - return uwsApp->constructorFailed(); - } - uWS::App *uwsApp = (uWS::App *)app; - if (!uwsApp) - return true; - return uwsApp->constructorFailed(); - } - - unsigned int uws_num_subscribers(int ssl, uws_app_t *app, const char *topic, size_t topic_length) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - return uwsApp->numSubscribers(std::string_view(topic, topic_length)); - } - uWS::App *uwsApp = (uWS::App *)app; - return uwsApp->numSubscribers(std::string_view(topic, topic_length)); - } - bool uws_publish(int ssl, uws_app_t *app, const char *topic, size_t topic_length, const char *message, size_t message_length, uws_opcode_t opcode, bool compress) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - return uwsApp->publish(std::string_view(topic, topic_length), std::string_view(message, message_length), (uWS::OpCode)(unsigned char)opcode, compress); - } - uWS::App *uwsApp = (uWS::App *)app; - return uwsApp->publish(std::string_view(topic, topic_length), std::string_view(message, message_length), (uWS::OpCode)(unsigned char)opcode, compress); - } - void *uws_get_native_handle(int ssl, uws_app_t *app) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - return uwsApp->getNativeHandle(); - } - uWS::App *uwsApp = (uWS::App *)app; - return uwsApp->getNativeHandle(); - } - void uws_remove_server_name(int ssl, uws_app_t *app, const char *hostname_pattern, size_t hostname_pattern_length) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - uwsApp->removeServerName(std::string(hostname_pattern, hostname_pattern_length)); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - uwsApp->removeServerName(std::string(hostname_pattern, hostname_pattern_length)); - } - } - void uws_add_server_name(int ssl, uws_app_t *app, const char *hostname_pattern, size_t hostname_pattern_length) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - uwsApp->addServerName(std::string(hostname_pattern, hostname_pattern_length)); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - uwsApp->addServerName(std::string(hostname_pattern, hostname_pattern_length)); - } - } - void uws_add_server_name_with_options(int ssl, uws_app_t *app, const char *hostname_pattern, size_t hostname_pattern_length, struct us_bun_socket_context_options_t options) - { - uWS::SocketContextOptions sco; - memcpy(&sco, &options, sizeof(uWS::SocketContextOptions)); - - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - uwsApp->addServerName(std::string(hostname_pattern, hostname_pattern_length), sco); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - uwsApp->addServerName(std::string(hostname_pattern, hostname_pattern_length), sco); - } - } - - void uws_missing_server_name(int ssl, uws_app_t *app, uws_missing_server_handler handler, void *user_data) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - uwsApp->missingServerName([handler, user_data](auto hostname) - { handler(hostname, strlen(hostname), user_data); }); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - uwsApp->missingServerName([handler, user_data](auto hostname) - { handler(hostname, strlen(hostname), user_data); }); - } - } - void uws_filter(int ssl, uws_app_t *app, uws_filter_handler handler, void *user_data) - { - if (ssl) - { - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - uwsApp->filter([handler, user_data](auto res, auto i) - { handler((uws_res_t *)res, i, user_data); }); - } - else - { - uWS::App *uwsApp = (uWS::App *)app; - - uwsApp->filter([handler, user_data](auto res, auto i) - { handler((uws_res_t *)res, i, user_data); }); - } - } - - void uws_ws(int ssl, uws_app_t *app, const char *pattern, uws_socket_behavior_t behavior, void *user_data) - { - if (ssl) - { - auto generic_handler = uWS::SSLApp::WebSocketBehavior{ - .compression = (uWS::CompressOptions)(uint64_t)behavior.compression, - .maxPayloadLength = behavior.maxPayloadLength, - .idleTimeout = behavior.idleTimeout, - .maxBackpressure = behavior.maxBackpressure, - .closeOnBackpressureLimit = behavior.closeOnBackpressureLimit, - .resetIdleTimeoutOnSend = behavior.resetIdleTimeoutOnSend, - .sendPingsAutomatically = behavior.sendPingsAutomatically, - .maxLifetime = behavior.maxLifetime, - }; - - if (behavior.upgrade) - generic_handler.upgrade = [behavior, user_data](auto *res, auto *req, auto *context) - { - behavior.upgrade((uws_res_t *)res, (uws_req_t *)req, (uws_socket_context_t *)context, user_data); - }; - if (behavior.open) - generic_handler.open = [behavior, user_data](auto *ws) - { - behavior.open((uws_websocket_t *)ws, user_data); - }; - if (behavior.message) - generic_handler.message = [behavior, user_data](auto *ws, auto message, auto opcode) - { - behavior.message((uws_websocket_t *)ws, message.data(), message.length(), (uws_opcode_t)opcode, user_data); - }; - if (behavior.drain) - generic_handler.drain = [behavior, user_data](auto *ws) - { - behavior.drain((uws_websocket_t *)ws, user_data); - }; - if (behavior.ping) - generic_handler.ping = [behavior, user_data](auto *ws, auto message) - { - behavior.ping((uws_websocket_t *)ws, message.data(), message.length(), user_data); - }; - if (behavior.pong) - generic_handler.pong = [behavior, user_data](auto *ws, auto message) - { - behavior.pong((uws_websocket_t *)ws, message.data(), message.length(), user_data); - }; - if (behavior.close) - generic_handler.close = [behavior, user_data](auto *ws, int code, auto message) - { - behavior.close((uws_websocket_t *)ws, code, message.data(), message.length(), user_data); - }; - if (behavior.subscription) - generic_handler.subscription = [behavior, user_data](auto *ws, auto topic, int subscribers, int old_subscribers) - { - behavior.subscription((uws_websocket_t *)ws, topic.data(), topic.length(), subscribers, old_subscribers, user_data); - }; - uWS::SSLApp *uwsApp = (uWS::SSLApp *)app; - - uwsApp->ws(pattern, std::move(generic_handler)); - } - else - { - auto generic_handler = uWS::App::WebSocketBehavior{ - .compression = (uWS::CompressOptions)(uint64_t)behavior.compression, - .maxPayloadLength = behavior.maxPayloadLength, - .idleTimeout = behavior.idleTimeout, - .maxBackpressure = behavior.maxBackpressure, - .closeOnBackpressureLimit = behavior.closeOnBackpressureLimit, - .resetIdleTimeoutOnSend = behavior.resetIdleTimeoutOnSend, - .sendPingsAutomatically = behavior.sendPingsAutomatically, - .maxLifetime = behavior.maxLifetime, - }; - - if (behavior.upgrade) - generic_handler.upgrade = [behavior, user_data](auto *res, auto *req, auto *context) - { - behavior.upgrade((uws_res_t *)res, (uws_req_t *)req, (uws_socket_context_t *)context, user_data); - }; - if (behavior.open) - generic_handler.open = [behavior, user_data](auto *ws) - { - behavior.open((uws_websocket_t *)ws, user_data); - }; - if (behavior.message) - generic_handler.message = [behavior, user_data](auto *ws, auto message, auto opcode) - { - behavior.message((uws_websocket_t *)ws, message.data(), message.length(), (uws_opcode_t)opcode, user_data); - }; - if (behavior.drain) - generic_handler.drain = [behavior, user_data](auto *ws) - { - behavior.drain((uws_websocket_t *)ws, user_data); - }; - if (behavior.ping) - generic_handler.ping = [behavior, user_data](auto *ws, auto message) - { - behavior.ping((uws_websocket_t *)ws, message.data(), message.length(), user_data); - }; - if (behavior.pong) - generic_handler.pong = [behavior, user_data](auto *ws, auto message) - { - behavior.pong((uws_websocket_t *)ws, message.data(), message.length(), user_data); - }; - if (behavior.close) - generic_handler.close = [behavior, user_data](auto *ws, int code, auto message) - { - behavior.close((uws_websocket_t *)ws, code, message.data(), message.length(), user_data); - }; - if (behavior.subscription) - generic_handler.subscription = [behavior, user_data](auto *ws, auto topic, int subscribers, int old_subscribers) - { - behavior.subscription((uws_websocket_t *)ws, topic.data(), topic.length(), subscribers, old_subscribers, user_data); - }; - uWS::App *uwsApp = (uWS::App *)app; - uwsApp->ws(pattern, std::move(generic_handler)); - } - } - - void *uws_ws_get_user_data(int ssl, uws_websocket_t *ws) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return *uws->getUserData(); - } - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return *uws->getUserData(); - } - - void uws_ws_close(int ssl, uws_websocket_t *ws) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - uws->close(); - } - else - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - uws->close(); - } - } - - uws_sendstatus_t uws_ws_send(int ssl, uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return (uws_sendstatus_t)uws->send(std::string_view(message, length), (uWS::OpCode)(unsigned char)opcode); - } - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return (uws_sendstatus_t)uws->send(std::string_view(message, length), (uWS::OpCode)(unsigned char)opcode); - } - - uws_sendstatus_t uws_ws_send_with_options(int ssl, uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode, bool compress, bool fin) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return (uws_sendstatus_t)uws->send(std::string_view(message, length), (uWS::OpCode)(unsigned char)opcode, compress, fin); - } - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return (uws_sendstatus_t)uws->send(std::string_view(message, length), (uWS::OpCode)(unsigned char)opcode, compress, fin); - } - - uws_sendstatus_t uws_ws_send_fragment(int ssl, uws_websocket_t *ws, const char *message, size_t length, bool compress) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return (uws_sendstatus_t)uws->sendFragment(std::string_view(message, length), compress); - } - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return (uws_sendstatus_t)uws->sendFragment(std::string_view(message, length), compress); - } - uws_sendstatus_t uws_ws_send_first_fragment(int ssl, uws_websocket_t *ws, const char *message, size_t length, bool compress) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return (uws_sendstatus_t)uws->sendFirstFragment(std::string_view(message, length), uWS::OpCode::BINARY, compress); - } - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return (uws_sendstatus_t)uws->sendFirstFragment(std::string_view(message, length), uWS::OpCode::BINARY, compress); - } - uws_sendstatus_t uws_ws_send_first_fragment_with_opcode(int ssl, uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode, bool compress) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return (uws_sendstatus_t)uws->sendFirstFragment(std::string_view(message, length), (uWS::OpCode)(unsigned char)opcode, compress); - } - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return (uws_sendstatus_t)uws->sendFirstFragment(std::string_view(message, length), (uWS::OpCode)(unsigned char)opcode, compress); - } - uws_sendstatus_t uws_ws_send_last_fragment(int ssl, uws_websocket_t *ws, const char *message, size_t length, bool compress) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return (uws_sendstatus_t)uws->sendLastFragment(std::string_view(message, length), compress); - } - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return (uws_sendstatus_t)uws->sendLastFragment(std::string_view(message, length), compress); - } - - void uws_ws_end(int ssl, uws_websocket_t *ws, int code, const char *message, size_t length) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - uws->end(code, std::string_view(message, length)); - } - else - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - uws->end(code, std::string_view(message, length)); - } - } - - void uws_ws_cork(int ssl, uws_websocket_t *ws, void (*handler)(void *user_data), void *user_data) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - uws->cork([handler, user_data]() - { handler(user_data); }); - } - else - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - - uws->cork([handler, user_data]() - { handler(user_data); }); - } - } - bool uws_ws_subscribe(int ssl, uws_websocket_t *ws, const char *topic, size_t length) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return uws->subscribe(std::string_view(topic, length)); - } - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return uws->subscribe(std::string_view(topic, length)); - } - bool uws_ws_unsubscribe(int ssl, uws_websocket_t *ws, const char *topic, size_t length) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return uws->unsubscribe(std::string_view(topic, length)); - } - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return uws->unsubscribe(std::string_view(topic, length)); - } - - bool uws_ws_is_subscribed(int ssl, uws_websocket_t *ws, const char *topic, size_t length) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return uws->isSubscribed(std::string_view(topic, length)); - } - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return uws->isSubscribed(std::string_view(topic, length)); - } - void uws_ws_iterate_topics(int ssl, uws_websocket_t *ws, void (*callback)(const char *topic, size_t length, void *user_data), void *user_data) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - uws->iterateTopics([callback, user_data](auto topic) - { callback(topic.data(), topic.length(), user_data); }); - } - else - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - - uws->iterateTopics([callback, user_data](auto topic) - { callback(topic.data(), topic.length(), user_data); }); - } - } - - bool uws_ws_publish(int ssl, uws_websocket_t *ws, const char *topic, size_t topic_length, const char *message, size_t message_length) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return uws->publish(std::string_view(topic, topic_length), std::string_view(message, message_length)); - } - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return uws->publish(std::string_view(topic, topic_length), std::string_view(message, message_length)); - } - - bool uws_ws_publish_with_options(int ssl, uws_websocket_t *ws, const char *topic, size_t topic_length, const char *message, size_t message_length, uws_opcode_t opcode, bool compress) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return uws->publish(std::string_view(topic, topic_length), std::string_view(message, message_length), (uWS::OpCode)(unsigned char)opcode, compress); - } - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return uws->publish(std::string_view(topic, topic_length), std::string_view(message, message_length), (uWS::OpCode)(unsigned char)opcode, compress); - } - - unsigned int uws_ws_get_buffered_amount(int ssl, uws_websocket_t *ws) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return uws->getBufferedAmount(); - } - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - return uws->getBufferedAmount(); - } - - size_t uws_ws_get_remote_address(int ssl, uws_websocket_t *ws, const char **dest) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - std::string_view value = uws->getRemoteAddress(); - *dest = value.data(); - return value.length(); - } - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - - std::string_view value = uws->getRemoteAddress(); - *dest = value.data(); - return value.length(); - } - - size_t uws_ws_get_remote_address_as_text(int ssl, uws_websocket_t *ws, const char **dest) - { - if (ssl) - { - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - - std::string_view value = uws->getRemoteAddressAsText(); - *dest = value.data(); - return value.length(); - } - uWS::WebSocket *uws = (uWS::WebSocket *)ws; - - std::string_view value = uws->getRemoteAddressAsText(); - *dest = value.data(); - return value.length(); - } - void uws_res_close(int ssl, uws_res_t *res) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->close(); - } - else - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->close(); - } - } - void uws_res_end(int ssl, uws_res_t *res, const char *data, size_t length, bool close_connection) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->end(std::string_view(data, length), close_connection); - } - else - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->end(std::string_view(data, length), close_connection); - } - } - - size_t uws_res_get_remote_address(int ssl, uws_res_t *res, const char **dest) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - std::string_view value = uwsRes->getRemoteAddress(); - *dest = value.data(); - return value.length(); - } - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - - std::string_view value = uwsRes->getRemoteAddress(); - *dest = value.data(); - return value.length(); - } - - size_t uws_res_get_remote_address_as_text(int ssl, uws_res_t *res, const char **dest) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - std::string_view value = uwsRes->getRemoteAddressAsText(); - *dest = value.data(); - return value.length(); - } - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - - std::string_view value = uwsRes->getRemoteAddressAsText(); - *dest = value.data(); - return value.length(); - } -#ifdef UWS_WITH_PROXY - size_t uws_res_get_proxied_remote_address(int ssl, uws_res_t *res, const char **dest) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - std::string_view value = uwsRes->getProxiedRemoteAddress(); - *dest = value.data(); - return value.length(); - } - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - - std::string_view value = uwsRes->getProxiedRemoteAddress(); - *dest = value.data(); - return value.length(); - } - - size_t uws_res_get_proxied_remote_address_as_text(int ssl, uws_res_t *res, const char **dest) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - std::string_view value = uwsRes->getProxiedRemoteAddressAsText(); - *dest = value.data(); - return value.length(); - } - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - - std::string_view value = uwsRes->getProxiedRemoteAddressAsText(); - *dest = value.data(); - return value.length(); - } -#endif - uws_try_end_result_t uws_res_try_end(int ssl, uws_res_t *res, const char *data, size_t length, uint64_t total_size, bool close_connection) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - // uwsRes->end(std::string_view(data, length), close_connection); - std::pair result = uwsRes->tryEnd(std::string_view(data, length), total_size, close_connection); - return uws_try_end_result_t{ - .ok = result.first, - .has_responded = result.second, - }; - } - else - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - std::pair result = uwsRes->tryEnd(std::string_view(data, length), total_size); - return uws_try_end_result_t{ - .ok = result.first, - .has_responded = result.second, - }; - } - } - - void uws_res_cork(int ssl, uws_res_t *res, void (*callback)(uws_res_t *res, void *user_data), void *user_data) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->cork([=]() - { callback(res, user_data); }); - } - else - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->cork([=]() - { callback(res, user_data); }); - } - } - - void uws_res_pause(int ssl, uws_res_t *res) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->pause(); - } - else - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->pause(); - } - } - - void uws_res_resume(int ssl, uws_res_t *res) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->pause(); - } - else - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->pause(); - } - } - - void uws_res_write_continue(int ssl, uws_res_t *res) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->writeContinue(); - } - else - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->writeContinue(); - } - } - - void uws_res_write_status(int ssl, uws_res_t *res, const char *status, size_t length) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->writeStatus(std::string_view(status, length)); - } - else - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->writeStatus(std::string_view(status, length)); - } - } - - void uws_res_write_header(int ssl, uws_res_t *res, const char *key, size_t key_length, const char *value, size_t value_length) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->writeHeader(std::string_view(key, key_length), std::string_view(value, value_length)); - } - else - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->writeHeader(std::string_view(key, key_length), std::string_view(value, value_length)); - } - } - void uws_res_write_header_int(int ssl, uws_res_t *res, const char *key, size_t key_length, uint64_t value) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->writeHeader(std::string_view(key, key_length), value); - } - else - { - - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->writeHeader(std::string_view(key, key_length), value); - } - } - - void uws_res_end_without_body(int ssl, uws_res_t *res, bool close_connection) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->endWithoutBody(std::nullopt, close_connection); - } - else - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->endWithoutBody(std::nullopt, close_connection); - } - } - - bool uws_res_write(int ssl, uws_res_t *res, const char *data, size_t length) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - return uwsRes->write(std::string_view(data, length)); - } - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - return uwsRes->write(std::string_view(data, length)); - } - uint64_t uws_res_get_write_offset(int ssl, uws_res_t *res) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - return uwsRes->getWriteOffset(); - } - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - return uwsRes->getWriteOffset(); - } - void uws_res_override_write_offset(int ssl, uws_res_t *res, uint64_t offset) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->overrideWriteOffset(offset); - } - else - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->overrideWriteOffset(offset); - } - } - bool uws_res_has_responded(int ssl, uws_res_t *res) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - return uwsRes->hasResponded(); - } - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - return uwsRes->hasResponded(); - } - - void uws_res_on_writable(int ssl, uws_res_t *res, bool (*handler)(uws_res_t *res, uint64_t, void *optional_data), void *optional_data) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->onWritable([handler, res, optional_data](uint64_t a) - { return handler(res, a, optional_data); }); - } - else - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->onWritable([handler, res, optional_data](uint64_t a) - { return handler(res, a, optional_data); }); - } - } - - void uws_res_on_aborted(int ssl, uws_res_t *res, void (*handler)(uws_res_t *res, void *optional_data), void *optional_data) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->onAborted([handler, res, optional_data] - { handler(res, optional_data); }); - } - else - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->onAborted([handler, res, optional_data] - { handler(res, optional_data); }); - } - } - - void uws_res_on_data(int ssl, uws_res_t *res, void (*handler)(uws_res_t *res, const char *chunk, size_t chunk_length, bool is_end, void *optional_data), void *optional_data) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->onData([handler, res, optional_data](auto chunk, bool is_end) - { handler(res, chunk.data(), chunk.length(), is_end, optional_data); }); - } - else - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->onData([handler, res, optional_data](auto chunk, bool is_end) - { handler(res, chunk.data(), chunk.length(), is_end, optional_data); }); - } - } - - bool uws_req_is_ancient(uws_req_t *res) - { - uWS::HttpRequest *uwsReq = (uWS::HttpRequest *)res; - return uwsReq->isAncient(); - } - - bool uws_req_get_yield(uws_req_t *res) - { - uWS::HttpRequest *uwsReq = (uWS::HttpRequest *)res; - return uwsReq->getYield(); - } - - void uws_req_set_yield(uws_req_t *res, bool yield) - { - uWS::HttpRequest *uwsReq = (uWS::HttpRequest *)res; - return uwsReq->setYield(yield); - } - - size_t uws_req_get_url(uws_req_t *res, const char **dest) - { - uWS::HttpRequest *uwsReq = (uWS::HttpRequest *)res; - std::string_view value = uwsReq->getUrl(); - *dest = value.data(); - return value.length(); - } - - size_t uws_req_get_full_url(uws_req_t *res, const char **dest) - { - uWS::HttpRequest *uwsReq = (uWS::HttpRequest *)res; - std::string_view value = uwsReq->getFullUrl(); - *dest = value.data(); - return value.length(); - } - - size_t uws_req_get_method(uws_req_t *res, const char **dest) - { - uWS::HttpRequest *uwsReq = (uWS::HttpRequest *)res; - std::string_view value = uwsReq->getMethod(); - *dest = value.data(); - return value.length(); - } - - size_t uws_req_get_case_sensitive_method(uws_req_t *res, const char **dest) - { - uWS::HttpRequest *uwsReq = (uWS::HttpRequest *)res; - std::string_view value = uwsReq->getCaseSensitiveMethod(); - *dest = value.data(); - return value.length(); - } - - void uws_req_for_each_header(uws_req_t *res, uws_get_headers_server_handler handler, void *user_data) - { - uWS::HttpRequest *uwsReq = (uWS::HttpRequest *)res; - for (auto header : *uwsReq) - { - handler(header.first.data(), header.first.length(), header.second.data(), header.second.length(), user_data); - } - } - - size_t uws_req_get_header(uws_req_t *res, const char *lower_case_header, size_t lower_case_header_length, const char **dest) - { - uWS::HttpRequest *uwsReq = (uWS::HttpRequest *)res; - - std::string_view value = uwsReq->getHeader(std::string_view(lower_case_header, lower_case_header_length)); - *dest = value.data(); - return value.length(); - } - - size_t uws_req_get_query(uws_req_t *res, const char *key, size_t key_length, const char **dest) - { - uWS::HttpRequest *uwsReq = (uWS::HttpRequest *)res; - - std::string_view value = uwsReq->getQuery(std::string_view(key, key_length)); - *dest = value.data(); - return value.length(); - } - - size_t uws_req_get_parameter(uws_req_t *res, unsigned short index, const char **dest) - { - uWS::HttpRequest *uwsReq = (uWS::HttpRequest *)res; - std::string_view value = uwsReq->getParameter(index); - *dest = value.data(); - return value.length(); - } - - void uws_res_upgrade(int ssl, uws_res_t *res, void *data, const char *sec_web_socket_key, size_t sec_web_socket_key_length, const char *sec_web_socket_protocol, size_t sec_web_socket_protocol_length, const char *sec_web_socket_extensions, size_t sec_web_socket_extensions_length, uws_socket_context_t *ws) - { - - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - - uwsRes->template upgrade(data ? std::move(data) : NULL, - std::string_view(sec_web_socket_key, sec_web_socket_key_length), - std::string_view(sec_web_socket_protocol, sec_web_socket_protocol_length), - std::string_view(sec_web_socket_extensions, sec_web_socket_extensions_length), - (struct us_socket_context_t *)ws); - } - else - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - - uwsRes->template upgrade(data ? std::move(data) : NULL, - std::string_view(sec_web_socket_key, sec_web_socket_key_length), - std::string_view(sec_web_socket_protocol, sec_web_socket_protocol_length), - std::string_view(sec_web_socket_extensions, sec_web_socket_extensions_length), - (struct us_socket_context_t *)ws); - } - } - - void *uws_res_get_native_handle(int ssl, uws_res_t *res) - { - if (ssl) - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - return uwsRes->getNativeHandle(); - } - else - { - uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - return uwsRes->getNativeHandle(); - } - } - - struct us_loop_t *uws_get_loop() - { - return (struct us_loop_t *)uWS::Loop::get(); - } - - struct us_loop_t *uws_get_loop_with_native(void *existing_native_loop) - { - return (struct us_loop_t *)uWS::Loop::get(existing_native_loop); - } - void uws_loop_defer(us_loop_t *loop, void( cb(void *user_data) ), void *user_data) - { - uWS::Loop *loop_instance = (uWS::Loop *)loop; - loop_instance->defer([cb, user_data](){ - cb(user_data); - }); - - } -} diff --git a/packages/bun-uws/capi/libuwebsockets.h b/packages/bun-uws/capi/libuwebsockets.h deleted file mode 100644 index 14de7fb6519249..00000000000000 --- a/packages/bun-uws/capi/libuwebsockets.h +++ /dev/null @@ -1,260 +0,0 @@ -/* - * Copyright 2022 Ciro Spaciari - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ -// clang-format off -#ifndef LIBUWS_CAPI_HEADER -#define LIBUWS_CAPI_HEADER - -#include -#include -#include -#include "libusockets.h" - -#ifdef __cplusplus -extern "C" -{ -#endif -#ifdef _WIN32 -# define DLL_EXPORT __declspec( dllexport ) -#else -# define DLL_EXPORT -#endif - - DLL_EXPORT typedef enum - { - /* These are not actual compression options */ - _COMPRESSOR_MASK = 0x00FF, - _DECOMPRESSOR_MASK = 0x0F00, - /* Disabled, shared, shared are "special" values */ - DISABLED = 0, - SHARED_COMPRESSOR = 1, - SHARED_DECOMPRESSOR = 1 << 8, - /* Highest 4 bits describe decompressor */ - DEDICATED_DECOMPRESSOR_32KB = 15 << 8, - DEDICATED_DECOMPRESSOR_16KB = 14 << 8, - DEDICATED_DECOMPRESSOR_8KB = 13 << 8, - DEDICATED_DECOMPRESSOR_4KB = 12 << 8, - DEDICATED_DECOMPRESSOR_2KB = 11 << 8, - DEDICATED_DECOMPRESSOR_1KB = 10 << 8, - DEDICATED_DECOMPRESSOR_512B = 9 << 8, - /* Same as 32kb */ - DEDICATED_DECOMPRESSOR = 15 << 8, - - /* Lowest 8 bit describe compressor */ - DEDICATED_COMPRESSOR_3KB = 9 << 4 | 1, - DEDICATED_COMPRESSOR_4KB = 9 << 4 | 2, - DEDICATED_COMPRESSOR_8KB = 10 << 4 | 3, - DEDICATED_COMPRESSOR_16KB = 11 << 4 | 4, - DEDICATED_COMPRESSOR_32KB = 12 << 4 | 5, - DEDICATED_COMPRESSOR_64KB = 13 << 4 | 6, - DEDICATED_COMPRESSOR_128KB = 14 << 4 | 7, - DEDICATED_COMPRESSOR_256KB = 15 << 4 | 8, - /* Same as 256kb */ - DEDICATED_COMPRESSOR = 15 << 4 | 8 - } uws_compress_options_t; - - DLL_EXPORT typedef enum - { - CONTINUATION = 0, - TEXT = 1, - BINARY = 2, - CLOSE = 8, - PING = 9, - PONG = 10 - } uws_opcode_t; - - DLL_EXPORT typedef enum - { - BACKPRESSURE, - SUCCESS, - DROPPED - } uws_sendstatus_t; - - DLL_EXPORT typedef struct - { - - int port; - const char *host; - int options; - } uws_app_listen_config_t; - - DLL_EXPORT typedef struct { - bool ok; - bool has_responded; - } uws_try_end_result_t; - - DLL_EXPORT struct uws_app_s; - DLL_EXPORT struct uws_req_s; - DLL_EXPORT struct uws_res_s; - DLL_EXPORT struct uws_websocket_s; - DLL_EXPORT struct uws_header_iterator_s; - DLL_EXPORT typedef struct uws_app_s uws_app_t; - DLL_EXPORT typedef struct uws_req_s uws_req_t; - DLL_EXPORT typedef struct uws_res_s uws_res_t; - DLL_EXPORT typedef struct uws_socket_context_s uws_socket_context_t; - DLL_EXPORT typedef struct uws_websocket_s uws_websocket_t; - - DLL_EXPORT typedef void (*uws_websocket_handler)(uws_websocket_t *ws, void* user_data); - DLL_EXPORT typedef void (*uws_websocket_message_handler)(uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode, void* user_data); - DLL_EXPORT typedef void (*uws_websocket_ping_pong_handler)(uws_websocket_t *ws, const char *message, size_t length, void* user_data); - DLL_EXPORT typedef void (*uws_websocket_close_handler)(uws_websocket_t *ws, int code, const char *message, size_t length, void* user_data); - DLL_EXPORT typedef void (*uws_websocket_upgrade_handler)(uws_res_t *response, uws_req_t *request, uws_socket_context_t *context, void* user_data); - DLL_EXPORT typedef void (*uws_websocket_subscription_handler)(uws_websocket_t *ws, const char *topic_name, size_t topic_name_length, int new_number_of_subscriber, int old_number_of_subscriber, void* user_data); - - DLL_EXPORT typedef struct - { - uws_compress_options_t compression; - /* Maximum message size we can receive */ - unsigned int maxPayloadLength; - /* 2 minutes timeout is good */ - unsigned short idleTimeout; - /* 64kb backpressure is probably good */ - unsigned int maxBackpressure; - bool closeOnBackpressureLimit; - /* This one depends on kernel timeouts and is a bad default */ - bool resetIdleTimeoutOnSend; - /* A good default, esp. for newcomers */ - bool sendPingsAutomatically; - /* Maximum socket lifetime in seconds before forced closure (defaults to disabled) */ - unsigned short maxLifetime; - uws_websocket_upgrade_handler upgrade; - uws_websocket_handler open; - uws_websocket_message_handler message; - uws_websocket_handler drain; - uws_websocket_ping_pong_handler ping; - uws_websocket_ping_pong_handler pong; - uws_websocket_close_handler close; - uws_websocket_subscription_handler subscription; - } uws_socket_behavior_t; - - DLL_EXPORT typedef void (*uws_listen_handler)(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void *user_data); - DLL_EXPORT typedef void (*uws_listen_domain_handler)(struct us_listen_socket_t *listen_socket, const char* domain, size_t domain_length, int options, void *user_data); - DLL_EXPORT typedef void (*uws_method_handler)(uws_res_t *response, uws_req_t *request, void *user_data); - DLL_EXPORT typedef void (*uws_filter_handler)(uws_res_t *response, int, void *user_data); - DLL_EXPORT typedef void (*uws_missing_server_handler)(const char *hostname, size_t hostname_length, void *user_data); - DLL_EXPORT typedef void (*uws_get_headers_server_handler)(const char *header_name, size_t header_name_size, const char *header_value, size_t header_value_size, void *user_data); - //Basic HTTP - DLL_EXPORT uws_app_t *uws_create_app(int ssl, struct us_bun_socket_context_options_t options); - DLL_EXPORT void uws_app_destroy(int ssl, uws_app_t *app); - DLL_EXPORT void uws_app_get(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data); - DLL_EXPORT void uws_app_post(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data); - DLL_EXPORT void uws_app_options(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data); - DLL_EXPORT void uws_app_delete(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data); - DLL_EXPORT void uws_app_patch(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data); - DLL_EXPORT void uws_app_put(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data); - DLL_EXPORT void uws_app_head(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data); - DLL_EXPORT void uws_app_connect(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data); - DLL_EXPORT void uws_app_trace(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data); - DLL_EXPORT void uws_app_any(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data); - - DLL_EXPORT void uws_app_run(int ssl, uws_app_t *); - - DLL_EXPORT void uws_app_listen(int ssl, uws_app_t *app, int port, uws_listen_handler handler, void *user_data); - DLL_EXPORT void uws_app_listen_with_config(int ssl, uws_app_t *app, uws_app_listen_config_t config, uws_listen_handler handler, void *user_data); - DLL_EXPORT void uws_app_listen_domain(int ssl, uws_app_t *app, const char *domain, size_t domain_length, uws_listen_domain_handler handler, void *user_data); - DLL_EXPORT void uws_app_listen_domain_with_options(int ssl, uws_app_t *app, const char *domain,size_t domain_length, int options, uws_listen_domain_handler handler, void *user_data); - DLL_EXPORT void uws_app_domain(int ssl, uws_app_t *app, const char* server_name, size_t server_name_length); - - DLL_EXPORT bool uws_constructor_failed(int ssl, uws_app_t *app); - DLL_EXPORT unsigned int uws_num_subscribers(int ssl, uws_app_t *app, const char *topic, size_t topic_length); - DLL_EXPORT bool uws_publish(int ssl, uws_app_t *app, const char *topic, size_t topic_length, const char *message, size_t message_length, uws_opcode_t opcode, bool compress); - DLL_EXPORT void *uws_get_native_handle(int ssl, uws_app_t *app); - DLL_EXPORT void uws_remove_server_name(int ssl, uws_app_t *app, const char *hostname_pattern, size_t hostname_pattern_length); - DLL_EXPORT void uws_add_server_name(int ssl, uws_app_t *app, const char *hostname_pattern, size_t hostname_pattern_length); - DLL_EXPORT void uws_add_server_name_with_options(int ssl, uws_app_t *app, const char *hostname_pattern, size_t hostname_pattern_length, struct us_bun_socket_context_options_t options); - DLL_EXPORT void uws_missing_server_name(int ssl, uws_app_t *app, uws_missing_server_handler handler, void *user_data); - DLL_EXPORT void uws_filter(int ssl, uws_app_t *app, uws_filter_handler handler, void *user_data); - - //WebSocket - DLL_EXPORT void uws_ws(int ssl, uws_app_t *app, const char *pattern, uws_socket_behavior_t behavior, void* user_data); - DLL_EXPORT void *uws_ws_get_user_data(int ssl, uws_websocket_t *ws); - DLL_EXPORT void uws_ws_close(int ssl, uws_websocket_t *ws); - DLL_EXPORT uws_sendstatus_t uws_ws_send(int ssl, uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode); - DLL_EXPORT uws_sendstatus_t uws_ws_send_with_options(int ssl, uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode, bool compress, bool fin); - DLL_EXPORT uws_sendstatus_t uws_ws_send_fragment(int ssl, uws_websocket_t *ws, const char *message, size_t length, bool compress); - DLL_EXPORT uws_sendstatus_t uws_ws_send_first_fragment(int ssl, uws_websocket_t *ws, const char *message, size_t length, bool compress); - DLL_EXPORT uws_sendstatus_t uws_ws_send_first_fragment_with_opcode(int ssl, uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode, bool compress); - DLL_EXPORT uws_sendstatus_t uws_ws_send_last_fragment(int ssl, uws_websocket_t *ws, const char *message, size_t length, bool compress); - DLL_EXPORT void uws_ws_end(int ssl, uws_websocket_t *ws, int code, const char *message, size_t length); - DLL_EXPORT void uws_ws_cork(int ssl, uws_websocket_t *ws, void (*handler)(void *user_data), void *user_data); - - DLL_EXPORT bool uws_ws_subscribe(int ssl, uws_websocket_t *ws, const char *topic, size_t length); - DLL_EXPORT bool uws_ws_unsubscribe(int ssl, uws_websocket_t *ws, const char *topic, size_t length); - DLL_EXPORT bool uws_ws_is_subscribed(int ssl, uws_websocket_t *ws, const char *topic, size_t length); - DLL_EXPORT void uws_ws_iterate_topics(int ssl, uws_websocket_t *ws, void (*callback)(const char *topic, size_t length, void *user_data), void *user_data); - DLL_EXPORT bool uws_ws_publish(int ssl, uws_websocket_t *ws, const char *topic, size_t topic_length, const char *message, size_t message_length); - DLL_EXPORT bool uws_ws_publish_with_options(int ssl, uws_websocket_t *ws, const char *topic, size_t topic_length, const char *message, size_t message_length, uws_opcode_t opcode, bool compress); - DLL_EXPORT unsigned int uws_ws_get_buffered_amount(int ssl, uws_websocket_t *ws); - DLL_EXPORT size_t uws_ws_get_remote_address(int ssl, uws_websocket_t *ws, const char **dest); - DLL_EXPORT size_t uws_ws_get_remote_address_as_text(int ssl, uws_websocket_t *ws, const char **dest); - DLL_EXPORT void uws_res_get_remote_address_info(uws_res_t *res, const char **dest, size_t *length, unsigned int *port); - - //Response - DLL_EXPORT void uws_res_end(int ssl, uws_res_t *res, const char *data, size_t length, bool close_connection); - DLL_EXPORT uws_try_end_result_t uws_res_try_end(int ssl, uws_res_t *res, const char *data, size_t length, uint64_t total_size, bool close_connection); - DLL_EXPORT void uws_res_cork(int ssl, uws_res_t *res, void(*callback)(uws_res_t *res, void* user_data) ,void* user_data); - DLL_EXPORT void uws_res_pause(int ssl, uws_res_t *res); - DLL_EXPORT void uws_res_resume(int ssl, uws_res_t *res); - DLL_EXPORT void uws_res_write_continue(int ssl, uws_res_t *res); - DLL_EXPORT void uws_res_write_status(int ssl, uws_res_t *res, const char *status, size_t length); - DLL_EXPORT void uws_res_write_header(int ssl, uws_res_t *res, const char *key, size_t key_length, const char *value, size_t value_length); - - DLL_EXPORT void uws_res_write_header_int(int ssl, uws_res_t *res, const char *key, size_t key_length, uint64_t value); - DLL_EXPORT void uws_res_end_without_body(int ssl, uws_res_t *res, bool close_connection); - DLL_EXPORT bool uws_res_write(int ssl, uws_res_t *res, const char *data, size_t length); - DLL_EXPORT uint64_t uws_res_get_write_offset(int ssl, uws_res_t *res); - DLL_EXPORT void uws_res_override_write_offset(int ssl, uws_res_t *res, uint64_t offset); - DLL_EXPORT bool uws_res_has_responded(int ssl, uws_res_t *res); - DLL_EXPORT void uws_res_on_writable(int ssl, uws_res_t *res, bool (*handler)(uws_res_t *res, uint64_t, void *optional_data), void *user_data); - DLL_EXPORT void uws_res_on_aborted(int ssl, uws_res_t *res, void (*handler)(uws_res_t *res, void *optional_data), void *optional_data); - DLL_EXPORT void uws_res_on_data(int ssl, uws_res_t *res, void (*handler)(uws_res_t *res, const char *chunk, size_t chunk_length, bool is_end, void *optional_data), void *optional_data); - DLL_EXPORT void uws_res_upgrade(int ssl, uws_res_t *res, void *data, const char *sec_web_socket_key, size_t sec_web_socket_key_length, const char *sec_web_socket_protocol, size_t sec_web_socket_protocol_length, const char *sec_web_socket_extensions, size_t sec_web_socket_extensions_length, uws_socket_context_t *ws); - DLL_EXPORT size_t uws_res_get_remote_address(int ssl, uws_res_t *res, const char **dest); - DLL_EXPORT size_t uws_res_get_remote_address_as_text(int ssl, uws_res_t *res, const char **dest); -#ifdef UWS_WITH_PROXY - DLL_EXPORT size_t uws_res_get_proxied_remote_address(int ssl, uws_res_t *res, const char **dest); - DLL_EXPORT size_t uws_res_get_proxied_remote_address_as_text(int ssl, uws_res_t *res, const char **dest); -#endif - DLL_EXPORT void *uws_res_get_native_handle(int ssl, uws_res_t *res); - - //Request - DLL_EXPORT bool uws_req_is_ancient(uws_req_t *res); - DLL_EXPORT bool uws_req_get_yield(uws_req_t *res); - DLL_EXPORT void uws_req_set_yield(uws_req_t *res, bool yield); - DLL_EXPORT size_t uws_req_get_url(uws_req_t *res, const char **dest); - DLL_EXPORT size_t uws_req_get_full_url(uws_req_t *res, const char **dest); - DLL_EXPORT size_t uws_req_get_method(uws_req_t *res, const char **dest); - DLL_EXPORT size_t uws_req_get_case_sensitive_method(uws_req_t *res, const char **dest); - - DLL_EXPORT size_t uws_req_get_header(uws_req_t *res, const char *lower_case_header, size_t lower_case_header_length, const char **dest); - DLL_EXPORT void uws_req_for_each_header(uws_req_t *res, uws_get_headers_server_handler handler, void *user_data); - DLL_EXPORT size_t uws_req_get_query(uws_req_t *res, const char *key, size_t key_length, const char **dest); - DLL_EXPORT size_t uws_req_get_parameter(uws_req_t *res, unsigned short index, const char **dest); - - DLL_EXPORT struct us_loop_t *uws_get_loop(); - DLL_EXPORT struct us_loop_t *uws_get_loop_with_native(void* existing_native_loop); - DLL_EXPORT void uws_loop_defer(struct us_loop_t *loop, void( cb(void *user_data) ), void *user_data); - -#ifdef __cplusplus -} -#endif - -#endif diff --git a/packages/bun-uws/src/HttpContext.h b/packages/bun-uws/src/HttpContext.h index 96373612047f5a..43c68fd90f9e42 100644 --- a/packages/bun-uws/src/HttpContext.h +++ b/packages/bun-uws/src/HttpContext.h @@ -124,7 +124,7 @@ struct HttpContext { /* Signal broken HTTP request only if we have a pending request */ if (httpResponseData->onAborted) { - httpResponseData->onAborted(); + httpResponseData->onAborted((HttpResponse *)s, httpResponseData->userData); } /* Destruct socket ext */ @@ -258,7 +258,7 @@ struct HttpContext { } /* We might respond in the handler, so do not change timeout after this */ - httpResponseData->inStream(data, fin); + httpResponseData->inStream(static_cast*>(user), data.data(), data.length(), fin, httpResponseData->userData); /* Was the socket closed? */ if (us_socket_is_closed(SSL, (struct us_socket_t *) user)) { @@ -366,7 +366,7 @@ struct HttpContext { /* We expect the developer to return whether or not write was successful (true). * If write was never called, the developer should still return true so that we may drain. */ - bool success = httpResponseData->callOnWritable(httpResponseData->offset); + bool success = httpResponseData->callOnWritable((HttpResponse *)asyncSocket, httpResponseData->offset); /* The developer indicated that their onWritable failed. */ if (!success) { diff --git a/packages/bun-uws/src/HttpResponse.h b/packages/bun-uws/src/HttpResponse.h index e4f325113c0191..15e7057a11f667 100644 --- a/packages/bun-uws/src/HttpResponse.h +++ b/packages/bun-uws/src/HttpResponse.h @@ -558,10 +558,11 @@ struct HttpResponse : public AsyncSocket { } /* Attach handler for writable HTTP response */ - HttpResponse *onWritable(MoveOnlyFunction &&handler) { + HttpResponse *onWritable(void* userData, HttpResponseData::OnWritableCallback handler) { HttpResponseData *httpResponseData = getHttpResponseData(); - httpResponseData->onWritable = std::move(handler); + httpResponseData->userData = userData; + httpResponseData->onWritable = handler; return this; } @@ -574,10 +575,11 @@ struct HttpResponse : public AsyncSocket { } /* Attach handler for aborted HTTP request */ - HttpResponse *onAborted(MoveOnlyFunction &&handler) { + HttpResponse *onAborted(void* userData, HttpResponseData::OnAbortedCallback handler) { HttpResponseData *httpResponseData = getHttpResponseData(); - - httpResponseData->onAborted = std::move(handler); + + httpResponseData->userData = userData; + httpResponseData->onAborted = handler; return this; } HttpResponse* clearOnWritableAndAborted() { @@ -594,9 +596,10 @@ struct HttpResponse : public AsyncSocket { return this; } /* Attach a read handler for data sent. Will be called with FIN set true if last segment. */ - void onData(MoveOnlyFunction &&handler) { + void onData(void* userData, HttpResponseData::OnDataCallback handler) { HttpResponseData *data = getHttpResponseData(); - data->inStream = std::move(handler); + data->userData = userData; + data->inStream = handler; /* Always reset this counter here */ data->received_bytes_per_timeout = 0; diff --git a/packages/bun-uws/src/HttpResponseData.h b/packages/bun-uws/src/HttpResponseData.h index 6be7b993037924..00320e4ab9e1f6 100644 --- a/packages/bun-uws/src/HttpResponseData.h +++ b/packages/bun-uws/src/HttpResponseData.h @@ -33,6 +33,10 @@ struct HttpResponseData : AsyncSocketData, HttpParser { template friend struct HttpResponse; template friend struct HttpContext; public: + using OnWritableCallback = bool (*)(uWS::HttpResponse*, uint64_t, void*); + using OnAbortedCallback = void (*)(uWS::HttpResponse*, void*); + using OnDataCallback = void (*)(uWS::HttpResponse* response, const char* chunk, size_t chunk_length, bool, void*); + /* When we are done with a response we mark it like so */ void markDone() { onAborted = nullptr; @@ -46,15 +50,15 @@ struct HttpResponseData : AsyncSocketData, HttpParser { } /* Caller of onWritable. It is possible onWritable calls markDone so we need to borrow it. */ - bool callOnWritable(uint64_t offset) { + bool callOnWritable( uWS::HttpResponse* response, uint64_t offset) { /* Borrow real onWritable */ - MoveOnlyFunction borrowedOnWritable = std::move(onWritable); + auto* borrowedOnWritable = std::move(onWritable); /* Set onWritable to placeholder */ - onWritable = [](uint64_t) {return true;}; + onWritable = [](uWS::HttpResponse*, uint64_t, void*) {return true;}; /* Run borrowed onWritable */ - bool ret = borrowedOnWritable(offset); + bool ret = borrowedOnWritable(response, offset, userData); /* If we still have onWritable (the placeholder) then move back the real one */ if (onWritable) { @@ -74,10 +78,13 @@ struct HttpResponseData : AsyncSocketData, HttpParser { HTTP_CONNECTION_CLOSE = 16 // used }; + /* Shared context pointer */ + void* userData = nullptr; + /* Per socket event handlers */ - MoveOnlyFunction onWritable; - MoveOnlyFunction onAborted; - MoveOnlyFunction inStream; // onData + OnWritableCallback onWritable = nullptr; + OnAbortedCallback onAborted = nullptr; + OnDataCallback inStream = nullptr; /* Outgoing offset */ uint64_t offset = 0; diff --git a/src/deps/libuwsockets.cpp b/src/deps/libuwsockets.cpp index 2741bfca8d6e61..26da8228b220e3 100644 --- a/src/deps/libuwsockets.cpp +++ b/src/deps/libuwsockets.cpp @@ -1224,14 +1224,14 @@ extern "C" if (ssl) { uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->onWritable([handler, res, opcional_data](uint64_t a) - { return handler(res, a, opcional_data); }); + auto onWritable = reinterpret_cast*, uint64_t, void*)>(handler); + uwsRes->onWritable(opcional_data, onWritable); } else { uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; - uwsRes->onWritable([handler, res, opcional_data](uint64_t a) - { return handler(res, a, opcional_data); }); + auto onWritable = reinterpret_cast*, uint64_t, void*)>(handler); + uwsRes->onWritable(opcional_data, onWritable); } } @@ -1252,11 +1252,10 @@ extern "C" if (ssl) { uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; + auto* onAborted = reinterpret_cast*, void*)>(handler); if (handler) { - uwsRes->onAborted( - [handler, res, opcional_data] - { handler(res, opcional_data); }); + uwsRes->onAborted(opcional_data, onAborted); } else { @@ -1266,11 +1265,10 @@ extern "C" else { uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; + auto* onAborted = reinterpret_cast*, void*)>(handler); if (handler) { - uwsRes->onAborted( - [handler, res, opcional_data] - { handler(res, opcional_data); }); + uwsRes->onAborted(opcional_data, onAborted); } else { @@ -1288,21 +1286,21 @@ extern "C" if (ssl) { uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; + auto onData = reinterpret_cast* response, const char* chunk, size_t chunk_length, bool, void*)>(handler); if (handler) { - uwsRes->onData([handler, res, opcional_data](auto chunk, bool is_end) - { handler(res, chunk.data(), chunk.length(), is_end, opcional_data); }); + uwsRes->onData(opcional_data, onData); } else { - uwsRes->onData(nullptr); + uwsRes->onData(opcional_data, nullptr); } } else { uWS::HttpResponse *uwsRes = (uWS::HttpResponse *)res; + auto onData = reinterpret_cast* response, const char* chunk, size_t chunk_length, bool, void*)>(handler); if (handler) { - uwsRes->onData([handler, res, opcional_data](auto chunk, bool is_end) - { handler(res, chunk.data(), chunk.length(), is_end, opcional_data); }); + uwsRes->onData(opcional_data, onData); } else { - uwsRes->onData(nullptr); + uwsRes->onData(opcional_data, nullptr); } } } From 4dd70619c98a1a4d2b9db5adf3f3da84c2841877 Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Mon, 29 Jul 2024 14:39:50 -0700 Subject: [PATCH 37/46] Enable buildkite (#12653) --- .buildkite/bootstrap.yml | 5 +- .buildkite/ci.yml | 859 ++------------------- .buildkite/scripts/build-bun.sh | 55 ++ .buildkite/scripts/build-cpp.sh | 34 + .buildkite/scripts/build-deps.ps1 | 0 .buildkite/scripts/build-deps.sh | 22 + .buildkite/scripts/build-old-js.sh | 40 + .buildkite/scripts/build-zig.sh | 80 ++ .buildkite/scripts/download-artifact.ps1 | 47 ++ .buildkite/scripts/download-artifact.sh | 46 ++ .buildkite/scripts/env.sh | 118 +++ .buildkite/scripts/prepare-build.sh | 90 +++ .buildkite/scripts/upload-artifact.ps1 | 47 ++ .buildkite/scripts/upload-artifact.sh | 54 ++ .buildkite/scripts/upload-release.sh | 190 +++-- .github/workflows/build-darwin.yml | 286 ------- .github/workflows/build-linux.yml | 64 -- .github/workflows/build-windows.yml | 348 --------- .github/workflows/build-zig.yml | 122 --- .github/workflows/ci.yml | 245 ------ .github/workflows/comment.yml | 55 -- .github/workflows/create-release-build.yml | 183 ----- .github/workflows/release.yml | 3 + .github/workflows/run-test.yml | 224 ------ .github/workflows/upload.yml | 94 --- scripts/all-dependencies.sh | 6 +- scripts/build-bun-cpp.ps1 | 40 +- scripts/build-bun-cpp.sh | 48 -- scripts/build-bun-zig.sh | 95 --- scripts/build-tinycc.ps1 | 2 - scripts/buildkite-link-bun.ps1 | 33 +- scripts/buildkite-link-bun.sh | 80 -- scripts/env.ps1 | 35 +- scripts/env.sh | 2 +- scripts/experimental-build.mjs | 320 ++++---- scripts/runner.node.mjs | 36 +- test/cli/run/run-crash-handler.test.ts | 16 +- test/js/bun/dns/resolve-dns.test.ts | 2 +- 38 files changed, 1099 insertions(+), 2927 deletions(-) create mode 100755 .buildkite/scripts/build-bun.sh create mode 100755 .buildkite/scripts/build-cpp.sh create mode 100644 .buildkite/scripts/build-deps.ps1 create mode 100755 .buildkite/scripts/build-deps.sh create mode 100755 .buildkite/scripts/build-old-js.sh create mode 100755 .buildkite/scripts/build-zig.sh create mode 100755 .buildkite/scripts/download-artifact.ps1 create mode 100755 .buildkite/scripts/download-artifact.sh create mode 100755 .buildkite/scripts/env.sh create mode 100755 .buildkite/scripts/prepare-build.sh create mode 100755 .buildkite/scripts/upload-artifact.ps1 create mode 100755 .buildkite/scripts/upload-artifact.sh delete mode 100644 .github/workflows/build-darwin.yml delete mode 100644 .github/workflows/build-linux.yml delete mode 100644 .github/workflows/build-windows.yml delete mode 100644 .github/workflows/build-zig.yml delete mode 100644 .github/workflows/ci.yml delete mode 100644 .github/workflows/comment.yml delete mode 100644 .github/workflows/create-release-build.yml delete mode 100644 .github/workflows/run-test.yml delete mode 100644 .github/workflows/upload.yml delete mode 100755 scripts/build-bun-cpp.sh delete mode 100755 scripts/build-bun-zig.sh delete mode 100755 scripts/buildkite-link-bun.sh diff --git a/.buildkite/bootstrap.yml b/.buildkite/bootstrap.yml index 1fef8b6cc0992b..b0b84616b3eb50 100644 --- a/.buildkite/bootstrap.yml +++ b/.buildkite/bootstrap.yml @@ -10,9 +10,10 @@ steps: blocked_state: "running" - label: ":pipeline:" - command: "buildkite-agent pipeline upload .buildkite/ci.yml" agents: - queue: "build-linux" + queue: "build-darwin" + command: + - ".buildkite/scripts/prepare-build.sh" - if: "build.branch == 'main' && !build.pull_request.repository.fork" label: ":github:" diff --git a/.buildkite/ci.yml b/.buildkite/ci.yml index e586d3ec538a27..b7aa6f29b23cb2 100644 --- a/.buildkite/ci.yml +++ b/.buildkite/ci.yml @@ -25,16 +25,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "aarch64" - artifact_paths: - - "build/bun-deps/**/*" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - BUN_DEPS_CACHE_DIR: "$$HOME/.cache/bun-deps" command: - - "./scripts/all-dependencies.sh" + - "./.buildkite/scripts/build-deps.sh" - key: "darwin-aarch64-build-zig" label: ":darwin: aarch64 - build-zig" @@ -42,15 +34,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "aarch64" - artifact_paths: - - "build/bun-zig.o" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/build-bun-zig.sh darwin aarch64" + - "./.buildkite/scripts/build-zig.sh darwin aarch64" - key: "darwin-aarch64-build-cpp" label: ":darwin: aarch64 - build-cpp" @@ -58,37 +43,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "aarch64" - artifact_paths: - - "build/bun-cpp-objects.a" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - command: - - "./scripts/build-bun-cpp.sh" - - - key: "darwin-aarch64-build-bun-nolto" - label: ":darwin: aarch64 - build-bun (no-lto)" - if: "build.branch != 'main'" - depends_on: - - "darwin-aarch64-build-deps" - - "darwin-aarch64-build-zig" - - "darwin-aarch64-build-cpp" - agents: - queue: "build-darwin" - os: "darwin" - arch: "aarch64" - artifact_paths: - - "bun-darwin-aarch64-nolto.zip" - - "bun-darwin-aarch64-nolto-profile.zip" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/buildkite-link-bun.sh --tag darwin-aarch64 --fast" + - "./.buildkite/scripts/build-cpp.sh" - key: "darwin-aarch64-build-bun" label: ":darwin: aarch64 - build-bun" @@ -100,16 +56,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "aarch64" - artifact_paths: - - "bun-darwin-aarch64.zip" - - "bun-darwin-aarch64-profile.zip" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/buildkite-link-bun.sh --tag darwin-aarch64" + - "./.buildkite/scripts/build-bun.sh" - key: "darwin-aarch64-test-macos-14" label: ":darwin: 14 aarch64 - test-bun" @@ -129,77 +77,20 @@ steps: limit: 3 - signal: SIGTERM limit: 3 - depends_on: - - "darwin-aarch64-build-bun-nolto" + - "darwin-aarch64-build-bun" agents: queue: "test-darwin" os: "darwin" arch: "aarch64" release: "14" command: - - "./scripts/runner.node.mjs --step darwin-aarch64-build-bun-nolto" + - "./scripts/runner.node.mjs --step darwin-aarch64-build-bun" - key: "darwin-aarch64-test-macos-13" label: ":darwin: 13 aarch64 - test-bun" if: "build.branch != 'main'" parallelism: 3 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "darwin-aarch64-build-bun-nolto" - agents: - queue: "test-darwin" - os: "darwin" - arch: "aarch64" - release: "13" - command: - - "./scripts/runner.node.mjs --step darwin-aarch64-build-bun-nolto" - - - key: "darwin-aarch64-test-macos-14-smoke" - label: ":darwin: 14 aarch64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "darwin-aarch64-build-bun" - agents: - queue: "test-darwin" - os: "darwin" - arch: "aarch64" - release: "14" - command: - - "./scripts/runner.node.mjs --step darwin-aarch64-build-bun --smoke 0.05" - - - key: "darwin-aarch64-test-macos-13-smoke" - label: ":darwin: 13 aarch64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 soft_fail: - exit_status: 2 retry: @@ -222,7 +113,7 @@ steps: arch: "aarch64" release: "13" command: - - "./scripts/runner.node.mjs --step darwin-aarch64-build-bun --smoke 0.05" + - "./scripts/runner.node.mjs --step darwin-aarch64-build-bun" # macOS x64 - key: "darwin-x64" @@ -234,16 +125,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "x64" - artifact_paths: - - "build/bun-deps/**/*" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - BUN_DEPS_CACHE_DIR: "$$HOME/.cache/bun-deps" command: - - "./scripts/all-dependencies.sh" + - "./.buildkite/scripts/build-deps.sh" - key: "darwin-x64-build-zig" label: ":darwin: x64 - build-zig" @@ -251,15 +134,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "aarch64" - artifact_paths: - - "build/bun-zig.o" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/build-bun-zig.sh darwin x64" + - "./.buildkite/scripts/build-zig.sh darwin x64" - key: "darwin-x64-build-cpp" label: ":darwin: x64 - build-cpp" @@ -267,37 +143,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "x64" - artifact_paths: - - "build/bun-cpp-objects.a" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - command: - - "./scripts/build-bun-cpp.sh" - - - key: "darwin-x64-build-bun-nolto" - label: ":darwin: x64 - build-bun (no-lto)" - if: "build.branch != 'main'" - depends_on: - - "darwin-x64-build-deps" - - "darwin-x64-build-zig" - - "darwin-x64-build-cpp" - agents: - queue: "build-darwin" - os: "darwin" - arch: "x64" - artifact_paths: - - "bun-darwin-x64-nolto.zip" - - "bun-darwin-x64-nolto-profile.zip" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/buildkite-link-bun.sh --tag darwin-x64 --fast" + - "./.buildkite/scripts/build-cpp.sh" - key: "darwin-x64-build-bun" label: ":darwin: x64 - build-bun" @@ -309,21 +156,13 @@ steps: queue: "build-darwin" os: "darwin" arch: "x64" - artifact_paths: - - "bun-darwin-x64.zip" - - "bun-darwin-x64-profile.zip" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/buildkite-link-bun.sh --tag darwin-x64" + - "./.buildkite/scripts/build-bun.sh" - key: "darwin-x64-test-macos-14" label: ":darwin: 14 x64 - test-bun" if: "build.branch != 'main'" - parallelism: 3 + parallelism: 2 soft_fail: - exit_status: 2 retry: @@ -339,47 +178,19 @@ steps: - signal: SIGTERM limit: 3 depends_on: - - "darwin-x64-build-bun-nolto" + - "darwin-x64-build-bun" agents: queue: "test-darwin" os: "darwin" arch: "x64" release: "14" command: - - "./scripts/runner.node.mjs --step darwin-x64-build-bun-nolto" + - "./scripts/runner.node.mjs --step darwin-x64-build-bun" - key: "darwin-x64-test-macos-13" label: ":darwin: 13 x64 - test-bun" if: "build.branch != 'main'" - parallelism: 3 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "darwin-x64-build-bun-nolto" - agents: - queue: "test-darwin" - os: "darwin" - arch: "x64" - release: "13" - command: - - "./scripts/runner.node.mjs --step darwin-x64-build-bun-nolto" - - - key: "darwin-x64-test-macos-14-smoke" - label: ":darwin: 14 x64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 + parallelism: 2 soft_fail: - exit_status: 2 retry: @@ -396,41 +207,13 @@ steps: limit: 3 depends_on: - "darwin-x64-build-bun" - agents: - queue: "test-darwin" - os: "darwin" - arch: "x64" - release: "14" - command: - - "./scripts/runner.node.mjs --step darwin-x64-build-bun --smoke 0.05" - - - key: "darwin-x64-test-macos-13-smoke" - label: ":darwin: 13 x64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "darwin-x64-build-bun-nolto" agents: queue: "test-darwin" os: "darwin" arch: "x64" release: "13" command: - - "./scripts/runner.node.mjs --step darwin-x64-build-bun --smoke 0.05" + - "./scripts/runner.node.mjs --step darwin-x64-build-bun" # Linux aarch64 - key: "linux-aarch64" @@ -442,16 +225,8 @@ steps: queue: "build-linux" os: "linux" arch: "aarch64" - artifact_paths: - - "build/bun-deps/**/*" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - BUN_DEPS_CACHE_DIR: "$$HOME/.cache/bun-deps" command: - - "./scripts/all-dependencies.sh" + - "./.buildkite/scripts/build-deps.sh" - key: "linux-aarch64-build-zig" label: ":linux: aarch64 - build-zig" @@ -459,15 +234,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "aarch64" - artifact_paths: - - "build/bun-zig.o" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/build-bun-zig.sh linux aarch64" + - "./.buildkite/scripts/build-zig.sh linux aarch64" - key: "linux-aarch64-build-cpp" label: ":linux: aarch64 - build-cpp" @@ -475,37 +243,8 @@ steps: queue: "build-linux" os: "linux" arch: "aarch64" - artifact_paths: - - "build/bun-cpp-objects.a" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - command: - - "./scripts/build-bun-cpp.sh" - - - key: "linux-aarch64-build-bun-nolto" - label: ":linux: aarch64 - build-bun (no-lto)" - if: "build.branch != 'main'" - depends_on: - - "linux-aarch64-build-deps" - - "linux-aarch64-build-zig" - - "linux-aarch64-build-cpp" - agents: - queue: "build-linux" - os: "linux" - arch: "aarch64" - artifact_paths: - - "bun-linux-aarch64-nolto.zip" - - "bun-linux-aarch64-nolto-profile.zip" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/buildkite-link-bun.sh --tag linux-aarch64 --fast" + - "./.buildkite/scripts/build-cpp.sh" - key: "linux-aarch64-build-bun" label: ":linux: aarch64 - build-bun" @@ -517,16 +256,8 @@ steps: queue: "build-linux" os: "linux" arch: "aarch64" - artifact_paths: - - "bun-linux-aarch64.zip" - - "bun-linux-aarch64-profile.zip" - env: - CPU_TARGET: "native" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/buildkite-link-bun.sh --tag linux-aarch64" + - "./.buildkite/scripts/build-bun.sh" - key: "linux-aarch64-test-debian-12" label: ":debian: 12 aarch64 - test-bun" @@ -547,7 +278,7 @@ steps: - signal: SIGTERM limit: 3 depends_on: - - "linux-aarch64-build-bun-nolto" + - "linux-aarch64-build-bun" agents: robobun: "true" os: "linux" @@ -555,7 +286,7 @@ steps: distro: "debian" release: "12" command: - - "./scripts/runner.node.mjs --step linux-aarch64-build-bun-nolto" + - "./scripts/runner.node.mjs --step linux-aarch64-build-bun" - key: "linux-aarch64-test-ubuntu-2204" label: ":ubuntu: 22.04 aarch64 - test-bun" @@ -576,7 +307,7 @@ steps: - signal: SIGTERM limit: 3 depends_on: - - "linux-aarch64-build-bun-nolto" + - "linux-aarch64-build-bun" agents: robobun: "true" os: "linux" @@ -584,99 +315,12 @@ steps: distro: "ubuntu" release: "22.04" command: - - "./scripts/runner.node.mjs --step linux-aarch64-build-bun-nolto" + - "./scripts/runner.node.mjs --step linux-aarch64-build-bun" - key: "linux-aarch64-test-ubuntu-2004" label: ":ubuntu: 20.04 aarch64 - test-bun" if: "build.branch != 'main'" parallelism: 5 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-aarch64-build-bun-nolto" - agents: - robobun: "true" - os: "linux" - arch: "aarch64" - distro: "ubuntu" - release: "20.04" - command: - - "./scripts/runner.node.mjs --step linux-aarch64-build-bun-nolto" - - - key: "linux-aarch64-test-debian-12-smoke" - label: ":debian: 12 aarch64 - test-bun-smoke" - if: "build.branch != 'main'" - parallelism: 1 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-aarch64-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "aarch64" - distro: "debian" - release: "12" - command: - - "./scripts/runner.node.mjs --step linux-aarch64-build-bun --smoke 0.05" - - - key: "linux-aarch64-test-ubuntu-2204-smoke" - label: ":ubuntu: 22.04 aarch64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-aarch64-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "aarch64" - distro: "ubuntu" - release: "22.04" - command: - - "./scripts/runner.node.mjs --step linux-aarch64-build-bun --smoke 0.05" - - - key: "linux-aarch64-test-ubuntu-2004-smoke" - label: ":ubuntu: 20.04 aarch64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 soft_fail: - exit_status: 2 retry: @@ -700,7 +344,7 @@ steps: distro: "ubuntu" release: "20.04" command: - - "./scripts/runner.node.mjs --step linux-aarch64-build-bun --smoke 0.05" + - "./scripts/runner.node.mjs --step linux-aarch64-build-bun" # Linux x64 - key: "linux-x64" @@ -712,16 +356,8 @@ steps: queue: "build-linux" os: "linux" arch: "x64" - artifact_paths: - - "build/bun-deps/**/*" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - BUN_DEPS_CACHE_DIR: "$$HOME/.cache/bun-deps" command: - - "./scripts/all-dependencies.sh" + - "./.buildkite/scripts/build-deps.sh" - key: "linux-x64-build-zig" label: ":linux: x64 - build-zig" @@ -729,15 +365,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "aarch64" - artifact_paths: - - "build/bun-zig.o" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/build-bun-zig.sh linux x64" + - "./.buildkite/scripts/build-zig.sh linux x64" - key: "linux-x64-build-cpp" label: ":linux: x64 - build-cpp" @@ -745,37 +374,8 @@ steps: queue: "build-linux" os: "linux" arch: "x64" - artifact_paths: - - "build/bun-cpp-objects.a" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - command: - - "./scripts/build-bun-cpp.sh" - - - key: "linux-x64-build-bun-nolto" - label: ":linux: x64 - build-bun (no-lto)" - if: "build.branch != 'main'" - depends_on: - - "linux-x64-build-deps" - - "linux-x64-build-zig" - - "linux-x64-build-cpp" - agents: - queue: "build-linux" - os: "linux" - arch: "x64" - artifact_paths: - - "bun-linux-x64-nolto.zip" - - "bun-linux-x64-nolto-profile.zip" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/buildkite-link-bun.sh --tag linux-x64 --fast" + - "./.buildkite/scripts/build-cpp.sh" - key: "linux-x64-build-bun" label: ":linux: x64 - build-bun" @@ -787,16 +387,8 @@ steps: queue: "build-linux" os: "linux" arch: "x64" - artifact_paths: - - "bun-linux-x64.zip" - - "bun-linux-x64-profile.zip" - env: - CPU_TARGET: "haswell" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/buildkite-link-bun.sh --tag linux-x64" + - "./.buildkite/scripts/build-bun.sh" - key: "linux-x64-test-debian-12" label: ":debian: 12 x64 - test-bun" @@ -817,7 +409,7 @@ steps: - signal: SIGTERM limit: 3 depends_on: - - "linux-x64-build-bun-nolto" + - "linux-x64-build-bun" agents: robobun: "true" os: "linux" @@ -825,7 +417,7 @@ steps: distro: "debian" release: "12" command: - - "./scripts/runner.node.mjs --step linux-x64-build-bun-nolto" + - "./scripts/runner.node.mjs --step linux-x64-build-bun" - key: "linux-x64-test-ubuntu-2204" label: ":ubuntu: 22.04 x64 - test-bun" @@ -846,7 +438,7 @@ steps: - signal: SIGTERM limit: 3 depends_on: - - "linux-x64-build-bun-nolto" + - "linux-x64-build-bun" agents: robobun: "true" os: "linux" @@ -854,99 +446,12 @@ steps: distro: "ubuntu" release: "22.04" command: - - "./scripts/runner.node.mjs --step linux-x64-build-bun-nolto" + - "./scripts/runner.node.mjs --step linux-x64-build-bun" - key: "linux-x64-test-ubuntu-2004" label: ":ubuntu: 20.04 x64 - test-bun" if: "build.branch != 'main'" parallelism: 5 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-build-bun-nolto" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "ubuntu" - release: "20.04" - command: - - "./scripts/runner.node.mjs --step linux-x64-build-bun-nolto" - - - key: "linux-x64-test-debian-12-smoke" - label: ":debian: 12 x64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "debian" - release: "12" - command: - - "./scripts/runner.node.mjs --step linux-x64-build-bun --smoke 0.05" - - - key: "linux-x64-test-ubuntu-2204-smoke" - label: ":ubuntu: 22.04 x64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "ubuntu" - release: "22.04" - command: - - "./scripts/runner.node.mjs --step linux-x64-build-bun --smoke 0.05" - - - key: "linux-x64-test-ubuntu-2004-smoke" - label: ":ubuntu: 20.04 x64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 soft_fail: - exit_status: 2 retry: @@ -970,7 +475,7 @@ steps: distro: "ubuntu" release: "20.04" command: - - "./scripts/runner.node.mjs --step linux-x64-build-bun --smoke 0.05" + - "./scripts/runner.node.mjs --step linux-x64-build-bun" # Linux x64-baseline - key: "linux-x64-baseline" @@ -982,16 +487,8 @@ steps: queue: "build-linux" os: "linux" arch: "x64" - artifact_paths: - - "build/bun-deps/**/*" - env: - CPU_TARGET: "nehalem" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - BUN_DEPS_CACHE_DIR: "$$HOME/.cache/bun-deps" command: - - "./scripts/all-dependencies.sh" + - "./.buildkite/scripts/build-deps.sh" - key: "linux-x64-baseline-build-zig" label: ":linux: x64-baseline - build-zig" @@ -999,15 +496,8 @@ steps: queue: "build-darwin" os: "darwin" arch: "aarch64" - artifact_paths: - - "build/bun-zig.o" - env: - CPU_TARGET: "nehalem" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/build-bun-zig.sh linux x64" + - "./.buildkite/scripts/build-zig.sh linux x64" - key: "linux-x64-baseline-build-cpp" label: ":linux: x64-baseline - build-cpp" @@ -1015,37 +505,8 @@ steps: queue: "build-linux" os: "linux" arch: "x64" - artifact_paths: - - "build/bun-cpp-objects.a" - env: - CPU_TARGET: "nehalem" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" - command: - - "./scripts/build-bun-cpp.sh" - - - key: "linux-x64-baseline-build-bun-nolto" - label: ":linux: x64-baseline - build-bun (no-lto)" - if: "build.branch != 'main'" - depends_on: - - "linux-x64-baseline-build-deps" - - "linux-x64-baseline-build-zig" - - "linux-x64-baseline-build-cpp" - agents: - queue: "build-linux" - os: "linux" - arch: "x64" - artifact_paths: - - "bun-linux-x64-baseline-nolto.zip" - - "bun-linux-x64-baseline-nolto-profile.zip" - env: - CPU_TARGET: "nehalem" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/buildkite-link-bun.sh --tag linux-x64-baseline --fast" + - "./.buildkite/scripts/build-cpp.sh" - key: "linux-x64-baseline-build-bun" label: ":linux: x64-baseline - build-bun" @@ -1057,16 +518,8 @@ steps: queue: "build-linux" os: "linux" arch: "x64" - artifact_paths: - - "bun-linux-x64-baseline.zip" - - "bun-linux-x64-baseline-profile.zip" - env: - CPU_TARGET: "nehalem" - CCACHE_DIR: "$$HOME/.cache/ccache" - SCCACHE_DIR: "$$HOME/.cache/sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME/.cache/zig-cache" command: - - "./scripts/buildkite-link-bun.sh --tag linux-x64-baseline" + - "./.buildkite/scripts/build-bun.sh" - key: "linux-x64-baseline-test-debian-12" label: ":debian: 12 x64-baseline - test-bun" @@ -1087,7 +540,7 @@ steps: - signal: SIGTERM limit: 3 depends_on: - - "linux-x64-baseline-build-bun-nolto" + - "linux-x64-baseline-build-bun" agents: robobun: "true" os: "linux" @@ -1095,7 +548,7 @@ steps: distro: "debian" release: "12" command: - - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun-nolto" + - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun" - key: "linux-x64-baseline-test-ubuntu-2204" label: ":ubuntu: 22.04 x64-baseline - test-bun" @@ -1116,7 +569,7 @@ steps: - signal: SIGTERM limit: 3 depends_on: - - "linux-x64-baseline-build-bun-nolto" + - "linux-x64-baseline-build-bun" agents: robobun: "true" os: "linux" @@ -1124,99 +577,12 @@ steps: distro: "ubuntu" release: "22.04" command: - - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun-nolto" + - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun" - key: "linux-x64-baseline-test-ubuntu-2004" label: ":ubuntu: 20.04 x64-baseline - test-bun" if: "build.branch != 'main'" parallelism: 5 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-baseline-build-bun-nolto" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "ubuntu" - release: "20.04" - command: - - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun-nolto" - - - key: "linux-x64-baseline-test-debian-12-smoke" - label: ":debian: 12 x64-baseline - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-baseline-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "debian" - release: "12" - command: - - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun --smoke 0.05" - - - key: "linux-x64-baseline-test-ubuntu-2204-smoke" - label: ":ubuntu: 22.04 x64-baseline - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 - soft_fail: - - exit_status: 2 - retry: - automatic: - - exit_status: 1 - limit: 1 - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "linux-x64-baseline-build-bun" - agents: - robobun: "true" - os: "linux" - arch: "x64" - distro: "ubuntu" - release: "22.04" - command: - - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun --smoke 0.05" - - - key: "linux-x64-baseline-test-ubuntu-2004-smoke" - label: ":ubuntu: 20.04 x64-baseline - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 soft_fail: - exit_status: 2 retry: @@ -1240,7 +606,7 @@ steps: distro: "ubuntu" release: "20.04" command: - - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun --smoke 0.05" + - "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun" # Windows x64 - key: "windows-x64" @@ -1255,9 +621,9 @@ steps: artifact_paths: - "build\\bun-deps\\*.lib" env: - CPU_TARGET: "haswell" SCCACHE_DIR: "$$HOME\\.cache\\sccache" ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" + SCCACHE_IGNORE_SERVER_IO_ERROR: "1" command: - ".\\scripts\\all-dependencies.ps1" @@ -1267,14 +633,8 @@ steps: queue: "build-darwin" os: "darwin" # cross-compile on Linux or Darwin arch: "aarch64" - artifact_paths: - - "build/bun-zig.o" - env: - CPU_TARGET: "haswell" - SCCACHE_DIR: "$$HOME\\.cache\\sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" command: - - "./scripts/build-bun-zig.sh windows x64" + - "./.buildkite/scripts/build-zig.sh windows x64" - key: "windows-x64-build-cpp" label: ":windows: x64 - build-cpp" @@ -1283,31 +643,11 @@ steps: os: "windows" arch: "x64" artifact_paths: - - "build\\bun-cpp-objects.a" + # HACK: See scripts/build-bun-cpp.ps1 + # - "build\\bun-cpp-objects.a" + - "build\\bun-cpp-objects.a.*" command: - - ".\\scripts\\build-bun-cpp.ps1 -Fast $$True" - - - key: "windows-x64-build-bun-nolto" - label: ":windows: x64 - build-bun (no-lto)" - if: "build.branch != 'main'" - depends_on: - - "windows-x64-build-deps" - - "windows-x64-build-zig" - - "windows-x64-build-cpp" - agents: - queue: "build-windows" - os: "windows" - arch: "x64" - artifact_paths: - - "bun-windows-x64-nolto.zip" - - "bun-windows-x64-nolto-profile.zip" - - "features.json" - env: - CPU_TARGET: "haswell" - SCCACHE_DIR: "$$HOME\\.cache\\sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" - command: - - ".\\scripts\\buildkite-link-bun.ps1 -Fast $$True" + - ".\\scripts\\build-bun-cpp.ps1" - key: "windows-x64-build-bun" label: ":windows: x64 - build-bun" @@ -1324,9 +664,9 @@ steps: - "bun-windows-x64-profile.zip" - "features.json" env: - CPU_TARGET: "haswell" SCCACHE_DIR: "$$HOME\\.cache\\sccache" ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" + SCCACHE_IGNORE_SERVER_IO_ERROR: "1" command: - ".\\scripts\\buildkite-link-bun.ps1" @@ -1334,31 +674,6 @@ steps: label: ":windows: x64 - test-bun" if: "build.branch != 'main'" parallelism: 10 - soft_fail: - - exit_status: 1 - retry: - automatic: - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "windows-x64-build-bun-nolto" - agents: - robobun: "true" - os: "windows" - arch: "x64" - command: - - "node .\\scripts\\runner.node.mjs --step windows-x64-build-bun-nolto" - - - key: "windows-x64-test-bun-smoke" - label: ":windows: x64 - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 soft_fail: - exit_status: 1 retry: @@ -1378,7 +693,7 @@ steps: os: "windows" arch: "x64" command: - - "node .\\scripts\\runner.node.mjs --step windows-x64-build-bun --smoke 0.05" + - "node .\\scripts\\runner.node.mjs --step windows-x64-build-bun" # Windows x64-baseline - key: "windows-x64-baseline" @@ -1393,9 +708,10 @@ steps: artifact_paths: - "build\\bun-deps\\*.lib" env: - CPU_TARGET: "nehalem" SCCACHE_DIR: "$$HOME\\.cache\\sccache" ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" + SCCACHE_IGNORE_SERVER_IO_ERROR: "1" + USE_BASELINE_BUILD: "1" command: - ".\\scripts\\all-dependencies.ps1" @@ -1405,14 +721,8 @@ steps: queue: "build-darwin" os: "darwin" # cross-compile on Linux or Darwin arch: "aarch64" - artifact_paths: - - "build/bun-zig.o" - env: - CPU_TARGET: "nehalem" - SCCACHE_DIR: "$$HOME\\.cache\\sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" command: - - "./scripts/build-bun-zig.sh windows x64" + - "./.buildkite/scripts/build-zig.sh windows x64" - key: "windows-x64-baseline-build-cpp" label: ":windows: x64-baseline - build-cpp" @@ -1421,35 +731,16 @@ steps: os: "windows" arch: "x64" artifact_paths: - - "build\\bun-cpp-objects.a" + # HACK: See scripts/build-bun-cpp.ps1 + # - "build\\bun-cpp-objects.a" + - "build\\bun-cpp-objects.a.*" env: - CPU_TARGET: "nehalem" SCCACHE_DIR: "$$HOME\\.cache\\sccache" ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" + SCCACHE_IGNORE_SERVER_IO_ERROR: "1" + USE_BASELINE_BUILD: "1" command: - - ".\\scripts\\build-bun-cpp.ps1 -Baseline $$True" - - - key: "windows-x64-baseline-build-bun-nolto" - label: ":windows: x64-baseline - build-bun (no-lto)" - if: "build.branch != 'main'" - depends_on: - - "windows-x64-baseline-build-deps" - - "windows-x64-baseline-build-zig" - - "windows-x64-baseline-build-cpp" - agents: - queue: "build-windows" - os: "windows" - arch: "x64" - artifact_paths: - - "bun-windows-x64-baseline-nolto.zip" - - "bun-windows-x64-baseline-nolto-profile.zip" - - "features.json" - env: - CPU_TARGET: "nehalem" - SCCACHE_DIR: "$$HOME\\.cache\\sccache" - ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" - command: - - ".\\scripts\\buildkite-link-bun.ps1 -Baseline $$True -Fast $$True" + - ".\\scripts\\build-bun-cpp.ps1" - key: "windows-x64-baseline-build-bun" label: ":windows: x64-baseline - build-bun" @@ -1466,9 +757,10 @@ steps: - "bun-windows-x64-baseline-profile.zip" - "features.json" env: - CPU_TARGET: "nehalem" SCCACHE_DIR: "$$HOME\\.cache\\sccache" ZIG_LOCAL_CACHE_DIR: "$$HOME\\.cache\\zig-cache" + SCCACHE_IGNORE_SERVER_IO_ERROR: "1" + USE_BASELINE_BUILD: "1" command: - ".\\scripts\\buildkite-link-bun.ps1 -Baseline $$True" @@ -1476,31 +768,6 @@ steps: label: ":windows: x64-baseline - test-bun" if: "build.branch != 'main'" parallelism: 10 - soft_fail: - - exit_status: 1 - retry: - automatic: - - exit_status: -1 - limit: 3 - - exit_status: 255 - limit: 3 - - signal_reason: agent_stop - limit: 3 - - signal: SIGTERM - limit: 3 - depends_on: - - "windows-x64-baseline-build-bun-nolto" - agents: - robobun: "true" - os: "windows" - arch: "x64" - command: - - "node .\\scripts\\runner.node.mjs --step windows-x64-baseline-build-bun-nolto" - - - key: "windows-x64-baseline-test-bun-smoke" - label: ":windows: x64-baseline - test-bun (smoke)" - if: "build.branch != 'main'" - parallelism: 1 soft_fail: - exit_status: 1 retry: @@ -1520,4 +787,4 @@ steps: os: "windows" arch: "x64" command: - - "node .\\scripts\\runner.node.mjs --step windows-x64-baseline-build-bun --smoke 0.05" + - "node .\\scripts\\runner.node.mjs --step windows-x64-baseline-build-bun" diff --git a/.buildkite/scripts/build-bun.sh b/.buildkite/scripts/build-bun.sh new file mode 100755 index 00000000000000..59363a39fdc2c3 --- /dev/null +++ b/.buildkite/scripts/build-bun.sh @@ -0,0 +1,55 @@ +#!/bin/bash + +set -eo pipefail +source "$(dirname "$0")/env.sh" + +function run_command() { + set -x + "$@" + { set +x; } 2>/dev/null +} + +cwd="$(pwd)" + +mkdir -p build +source "$(dirname "$0")/download-artifact.sh" "build/bun-deps/**" --step "$BUILDKITE_GROUP_KEY-build-deps" +source "$(dirname "$0")/download-artifact.sh" "build/bun-zig.o" --step "$BUILDKITE_GROUP_KEY-build-zig" +source "$(dirname "$0")/download-artifact.sh" "build/bun-cpp-objects.a" --step "$BUILDKITE_GROUP_KEY-build-cpp" --split +cd build + +run_command cmake .. "${CMAKE_FLAGS[@]}" \ + -GNinja \ + -DBUN_LINK_ONLY="1" \ + -DNO_CONFIGURE_DEPENDS="1" \ + -DBUN_ZIG_OBJ_DIR="$cwd/build" \ + -DBUN_CPP_ARCHIVE="$cwd/build/bun-cpp-objects.a" \ + -DBUN_DEPS_OUT_DIR="$cwd/build/bun-deps" \ + -DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \ + -DCPU_TARGET="$CPU_TARGET" \ + -DUSE_LTO="$USE_LTO" \ + -DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \ + -DCANARY="$CANARY" \ + -DGIT_SHA="$GIT_SHA" +run_command ninja -v -j "$CPUS" +run_command ls + +tag="bun-$BUILDKITE_GROUP_KEY" +if [ "$USE_LTO" == "OFF" ]; then + # Remove OS check when LTO is enabled on macOS again + if [[ "$tag" == *"darwin"* ]]; then + tag="$tag-nolto" + fi +fi + +for name in bun bun-profile; do + dir="$tag" + if [ "$name" == "bun-profile" ]; then + dir="$tag-profile" + fi + run_command chmod +x "$name" + run_command "./$name" --revision + run_command mkdir -p "$dir" + run_command mv "$name" "$dir/$name" + run_command zip -r "$dir.zip" "$dir" + source "$cwd/.buildkite/scripts/upload-artifact.sh" "$dir.zip" +done diff --git a/.buildkite/scripts/build-cpp.sh b/.buildkite/scripts/build-cpp.sh new file mode 100755 index 00000000000000..f307918189aba8 --- /dev/null +++ b/.buildkite/scripts/build-cpp.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +set -eo pipefail +source "$(dirname "$0")/env.sh" +source "$(realpath $(dirname "$0")/../../scripts/update-submodules.sh)" +{ set +x; } 2>/dev/null + +function run_command() { + set -x + "$@" + { set +x; } 2>/dev/null +} + +mkdir -p build +cd build +mkdir -p tmp_modules tmp_functions js codegen + +run_command cmake .. "${CMAKE_FLAGS[@]}" \ + -GNinja \ + -DBUN_CPP_ONLY="1" \ + -DNO_CONFIGURE_DEPENDS="1" \ + -DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \ + -DCPU_TARGET="$CPU_TARGET" \ + -DUSE_LTO="$USE_LTO" \ + -DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \ + -DCANARY="$CANARY" \ + -DGIT_SHA="$GIT_SHA" + +chmod +x compile-cpp-only.sh +source compile-cpp-only.sh -v -j "$CPUS" +{ set +x; } 2>/dev/null + +cd .. +source "$(dirname "$0")/upload-artifact.sh" "build/bun-cpp-objects.a" --split diff --git a/.buildkite/scripts/build-deps.ps1 b/.buildkite/scripts/build-deps.ps1 new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/.buildkite/scripts/build-deps.sh b/.buildkite/scripts/build-deps.sh new file mode 100755 index 00000000000000..e736fb43ffc4c3 --- /dev/null +++ b/.buildkite/scripts/build-deps.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +set -eo pipefail +source "$(dirname "$0")/env.sh" +source "$(realpath $(dirname "$0")/../../scripts/all-dependencies.sh)" + +artifacts=( + libcrypto.a libssl.a libdecrepit.a + libcares.a + libarchive.a + liblolhtml.a + libmimalloc.a libmimalloc.o + libtcc.a + libz.a + libzstd.a + libdeflate.a + liblshpack.a +) + +for artifact in "${artifacts[@]}"; do + source "$(dirname "$0")/upload-artifact.sh" "build/bun-deps/$artifact" +done diff --git a/.buildkite/scripts/build-old-js.sh b/.buildkite/scripts/build-old-js.sh new file mode 100755 index 00000000000000..92484aebe1bd19 --- /dev/null +++ b/.buildkite/scripts/build-old-js.sh @@ -0,0 +1,40 @@ +#!/bin/bash + +set -eo pipefail +source "$(dirname "$0")/env.sh" + +function assert_bun() { + if ! command -v bun &>/dev/null; then + echo "error: bun is not installed" 1>&2 + exit 1 + fi +} + +function assert_make() { + if ! command -v make &>/dev/null; then + echo "error: make is not installed" 1>&2 + exit 1 + fi +} + +function run_command() { + set -x + "$@" + { set +x; } 2>/dev/null +} + +function build_node_fallbacks() { + local cwd="src/node-fallbacks" + run_command bun install --cwd "$cwd" --frozen-lockfile + run_command bun run --cwd "$cwd" build +} + +function build_old_js() { + run_command bun install --frozen-lockfile + run_command make runtime_js fallback_decoder bun_error +} + +assert_bun +assert_make +build_node_fallbacks +build_old_js diff --git a/.buildkite/scripts/build-zig.sh b/.buildkite/scripts/build-zig.sh new file mode 100755 index 00000000000000..e7a2614556fd0f --- /dev/null +++ b/.buildkite/scripts/build-zig.sh @@ -0,0 +1,80 @@ +#!/bin/bash + +set -eo pipefail +source "$(dirname "$0")/env.sh" + +function assert_target() { + local arch="${2-$(uname -m)}" + case "$(echo "$arch" | tr '[:upper:]' '[:lower:]')" in + x64 | x86_64 | amd64) + export ZIG_ARCH="x86_64" + if [[ "$BUILDKITE_STEP_KEY" == *"baseline"* ]]; then + export ZIG_CPU_TARGET="nehalem" + else + export ZIG_CPU_TARGET="haswell" + fi + ;; + aarch64 | arm64) + export ZIG_ARCH="aarch64" + export ZIG_CPU_TARGET="native" + ;; + *) + echo "error: Unsupported architecture: $arch" 1>&2 + exit 1 + ;; + esac + local os="${1-$(uname -s)}" + case "$(echo "$os" | tr '[:upper:]' '[:lower:]')" in + linux) + export ZIG_TARGET="$ZIG_ARCH-linux-gnu" ;; + darwin) + export ZIG_TARGET="$ZIG_ARCH-macos-none" ;; + windows) + export ZIG_TARGET="$ZIG_ARCH-windows-msvc" ;; + *) + echo "error: Unsupported operating system: $os" 1>&2 + exit 1 + ;; + esac +} + +function run_command() { + set -x + "$@" + { set +x; } 2>/dev/null +} + +assert_target "$@" + +# Since the zig build depends on files from the zig submodule, +# make sure to update the submodule before building. +run_command git submodule update --init --recursive --progress --depth=1 --checkout src/deps/zig + +# TODO: Move these to be part of the CMake build +source "$(dirname "$0")/build-old-js.sh" + +cwd="$(pwd)" +mkdir -p build +cd build + +run_command cmake .. "${CMAKE_FLAGS[@]}" \ + -GNinja \ + -DNO_CONFIGURE_DEPENDS="1" \ + -DNO_CODEGEN="0" \ + -DWEBKIT_DIR="omit" \ + -DBUN_ZIG_OBJ_DIR="$cwd/build" \ + -DZIG_LIB_DIR="$cwd/src/deps/zig/lib" \ + -DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \ + -DARCH="$ZIG_ARCH" \ + -DCPU_TARGET="$ZIG_CPU_TARGET" \ + -DZIG_TARGET="$ZIG_TARGET" \ + -DUSE_LTO="$USE_LTO" \ + -DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \ + -DCANARY="$CANARY" \ + -DGIT_SHA="$GIT_SHA" + +export ONLY_ZIG="1" +run_command ninja "$cwd/build/bun-zig.o" -v -j "$CPUS" + +cd .. +source "$(dirname "$0")/upload-artifact.sh" "build/bun-zig.o" diff --git a/.buildkite/scripts/download-artifact.ps1 b/.buildkite/scripts/download-artifact.ps1 new file mode 100755 index 00000000000000..0504474077d304 --- /dev/null +++ b/.buildkite/scripts/download-artifact.ps1 @@ -0,0 +1,47 @@ +param ( + [Parameter(Mandatory=$true)] + [string[]] $Paths, + [switch] $Split +) + +$ErrorActionPreference = "Stop" + +function Assert-Buildkite-Agent() { + if (-not (Get-Command "buildkite-agent" -ErrorAction SilentlyContinue)) { + Write-Error "Cannot find buildkite-agent, please install it: https://buildkite.com/docs/agent/v3/install" + exit 1 + } +} + +function Assert-Join-File() { + if (-not (Get-Command "Join-File" -ErrorAction SilentlyContinue)) { + Write-Error "Cannot find Join-File, please install it: https://www.powershellgallery.com/packages/FileSplitter/1.3" + exit 1 + } +} + +function Download-Buildkite-Artifact() { + param ( + [Parameter(Mandatory=$true)] + [string] $Path, + ) + if ($Split) { + & buildkite-agent artifact download "$Path.*" --debug --debug-http + Join-File -Path "$(Resolve-Path .)\$Path" -Verbose -DeletePartFiles + } else { + & buildkite-agent artifact download "$Path" --debug --debug-http + } + if (-not (Test-Path $Path)) { + Write-Error "Could not find artifact: $Path" + exit 1 + } +} + +Assert-Buildkite-Agent +if ($Split) { + Assert-Join-File +} + +foreach ($Path in $Paths) { + Download-Buildkite-Artifact $Path +} diff --git a/.buildkite/scripts/download-artifact.sh b/.buildkite/scripts/download-artifact.sh new file mode 100755 index 00000000000000..59075618532294 --- /dev/null +++ b/.buildkite/scripts/download-artifact.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +set -eo pipefail + +function assert_buildkite_agent() { + if ! command -v buildkite-agent &> /dev/null; then + echo "error: Cannot find buildkite-agent, please install it:" + echo "https://buildkite.com/docs/agent/v3/install" + exit 1 + fi +} + +function download_buildkite_artifact() { + local path="$1"; shift + local split="0" + local args=() + while true; do + if [ -z "$1" ]; then + break + fi + case "$1" in + --split) split="1"; shift ;; + *) args+=("$1"); shift ;; + esac + done + if [ "$split" == "1" ]; then + run_command buildkite-agent artifact download "$path.*" . "${args[@]}" + run_command cat $path.?? > "$path" + run_command rm -f $path.?? + else + run_command buildkite-agent artifact download "$path" . "${args[@]}" + fi + if [[ "$path" != *"*"* ]] && [ ! -f "$path" ]; then + echo "error: Could not find artifact: $path" + exit 1 + fi +} + +function run_command() { + set -x + "$@" + { set +x; } 2>/dev/null +} + +assert_buildkite_agent +download_buildkite_artifact "$@" diff --git a/.buildkite/scripts/env.sh b/.buildkite/scripts/env.sh new file mode 100755 index 00000000000000..61b8382e358a23 --- /dev/null +++ b/.buildkite/scripts/env.sh @@ -0,0 +1,118 @@ +#!/bin/bash + +set -eo pipefail + +function assert_os() { + local os="$(uname -s)" + case "$os" in + Linux) + echo "linux" ;; + Darwin) + echo "darwin" ;; + *) + echo "error: Unsupported operating system: $os" 1>&2 + exit 1 + ;; + esac +} + +function assert_arch() { + local arch="$(uname -m)" + case "$arch" in + aarch64 | arm64) + echo "aarch64" ;; + x86_64 | amd64) + echo "x64" ;; + *) + echo "error: Unknown architecture: $arch" 1>&2 + exit 1 + ;; + esac +} + +function assert_build() { + if [ -z "$BUILDKITE_REPO" ]; then + echo "error: Cannot find repository for this build" + exit 1 + fi + if [ -z "$BUILDKITE_COMMIT" ]; then + echo "error: Cannot find commit for this build" + exit 1 + fi + if [ -z "$BUILDKITE_STEP_KEY" ]; then + echo "error: Cannot find step key for this build" + exit 1 + fi + if [ -n "$BUILDKITE_GROUP_KEY" ] && [[ "$BUILDKITE_STEP_KEY" != "$BUILDKITE_GROUP_KEY"* ]]; then + echo "error: Build step '$BUILDKITE_STEP_KEY' does not start with group key '$BUILDKITE_GROUP_KEY'" + exit 1 + fi + # Skip os and arch checks for Zig, since it's cross-compiled on macOS + if [[ "$BUILDKITE_STEP_KEY" != *"zig"* ]]; then + local os="$(assert_os)" + if [[ "$BUILDKITE_STEP_KEY" != *"$os"* ]]; then + echo "error: Build step '$BUILDKITE_STEP_KEY' does not match operating system '$os'" + exit 1 + fi + local arch="$(assert_arch)" + if [[ "$BUILDKITE_STEP_KEY" != *"$arch"* ]]; then + echo "error: Build step '$BUILDKITE_STEP_KEY' does not match architecture '$arch'" + exit 1 + fi + fi +} + +function assert_buildkite_agent() { + if ! command -v buildkite-agent &> /dev/null; then + echo "error: Cannot find buildkite-agent, please install it:" + echo "https://buildkite.com/docs/agent/v3/install" + exit 1 + fi +} + +function export_environment() { + source "$(realpath $(dirname "$0")/../../scripts/env.sh)" + { set +x; } 2>/dev/null + export GIT_SHA="$BUILDKITE_COMMIT" + export CCACHE_DIR="$HOME/.cache/ccache/$BUILDKITE_STEP_KEY" + export SCCACHE_DIR="$HOME/.cache/sccache/$BUILDKITE_STEP_KEY" + export ZIG_LOCAL_CACHE_DIR="$HOME/.cache/zig-cache/$BUILDKITE_STEP_KEY" + export BUN_DEPS_CACHE_DIR="$HOME/.cache/bun-deps/$BUILDKITE_STEP_KEY" + if [ "$(assert_arch)" == "aarch64" ]; then + export CPU_TARGET="native" + elif [[ "$BUILDKITE_STEP_KEY" == *"baseline"* ]]; then + export CPU_TARGET="nehalem" + else + export CPU_TARGET="haswell" + fi + if [[ "$BUILDKITE_STEP_KEY" == *"nolto"* ]]; then + export USE_LTO="OFF" + else + export USE_LTO="ON" + fi + if $(buildkite-agent meta-data exists release &> /dev/null); then + export CMAKE_BUILD_TYPE="$(buildkite-agent meta-data get release)" + else + export CMAKE_BUILD_TYPE="Release" + fi + if $(buildkite-agent meta-data exists canary &> /dev/null); then + export CANARY="$(buildkite-agent meta-data get canary)" + else + export CANARY="1" + fi + if $(buildkite-agent meta-data exists assertions &> /dev/null); then + export USE_DEBUG_JSC="$(buildkite-agent meta-data get assertions)" + else + export USE_DEBUG_JSC="OFF" + fi + if [ "$BUILDKITE_CLEAN_CHECKOUT" == "true" ]; then + rm -rf "$CCACHE_DIR" + rm -rf "$SCCACHE_DIR" + rm -rf "$ZIG_LOCAL_CACHE_DIR" + rm -rf "$BUN_DEPS_CACHE_DIR" + fi +} + +assert_build +assert_buildkite_agent +export_environment diff --git a/.buildkite/scripts/prepare-build.sh b/.buildkite/scripts/prepare-build.sh new file mode 100755 index 00000000000000..faaa046811f47d --- /dev/null +++ b/.buildkite/scripts/prepare-build.sh @@ -0,0 +1,90 @@ +#!/bin/bash + +set -eo pipefail + +function assert_build() { + if [ -z "$BUILDKITE_REPO" ]; then + echo "error: Cannot find repository for this build" + exit 1 + fi + if [ -z "$BUILDKITE_COMMIT" ]; then + echo "error: Cannot find commit for this build" + exit 1 + fi +} + +function assert_buildkite_agent() { + if ! command -v buildkite-agent &> /dev/null; then + echo "error: Cannot find buildkite-agent, please install it:" + echo "https://buildkite.com/docs/agent/v3/install" + exit 1 + fi +} + +function assert_jq() { + assert_command "jq" "jq" "https://stedolan.github.io/jq/" +} + +function assert_curl() { + assert_command "curl" "curl" "https://curl.se/download.html" +} + +function assert_command() { + local command="$1" + local package="$2" + local help_url="$3" + if ! command -v "$command" &> /dev/null; then + echo "warning: $command is not installed, installing..." + if command -v brew &> /dev/null; then + HOMEBREW_NO_AUTO_UPDATE=1 brew install "$package" + else + echo "error: Cannot install $command, please install it" + if [ -n "$help_url" ]; then + echo "" + echo "hint: See $help_url for help" + fi + exit 1 + fi + fi +} + +function assert_canary() { + local canary="$(buildkite-agent meta-data get canary 2>/dev/null)" + if [ -z "$canary" ]; then + local repo=$(echo "$BUILDKITE_REPO" | sed -E 's#https://github.com/([^/]+)/([^/]+).git#\1/\2#g') + local tag="$(curl -sL "https://api.github.com/repos/$repo/releases/latest" | jq -r ".tag_name")" + if [ "$tag" == "null" ]; then + canary="1" + else + local revision=$(curl -sL "https://api.github.com/repos/$repo/compare/$tag...$BUILDKITE_COMMIT" | jq -r ".ahead_by") + if [ "$revision" == "null" ]; then + canary="1" + else + canary="$revision" + fi + fi + fi + run_command buildkite-agent meta-data set canary "$canary" +} + +function upload_buildkite_pipeline() { + local path="$1" + if [ ! -f "$path" ]; then + echo "error: Cannot find pipeline: $path" + exit 1 + fi + run_command buildkite-agent pipeline upload "$path" +} + +function run_command() { + set -x + "$@" + { set +x; } 2>/dev/null +} + +assert_build +assert_buildkite_agent +assert_jq +assert_curl +assert_canary +upload_buildkite_pipeline ".buildkite/ci.yml" diff --git a/.buildkite/scripts/upload-artifact.ps1 b/.buildkite/scripts/upload-artifact.ps1 new file mode 100755 index 00000000000000..b7d79a410bdab6 --- /dev/null +++ b/.buildkite/scripts/upload-artifact.ps1 @@ -0,0 +1,47 @@ +param ( + [Parameter(Mandatory=$true)] + [string[]] $Paths, + [switch] $Split +) + +$ErrorActionPreference = "Stop" + +function Assert-Buildkite-Agent() { + if (-not (Get-Command "buildkite-agent" -ErrorAction SilentlyContinue)) { + Write-Error "Cannot find buildkite-agent, please install it: https://buildkite.com/docs/agent/v3/install" + exit 1 + } +} + +function Assert-Split-File() { + if (-not (Get-Command "Split-File" -ErrorAction SilentlyContinue)) { + Write-Error "Cannot find Split-File, please install it: https://www.powershellgallery.com/packages/FileSplitter/1.3" + exit 1 + } +} + +function Upload-Buildkite-Artifact() { + param ( + [Parameter(Mandatory=$true)] + [string] $Path, + ) + if (-not (Test-Path $Path)) { + Write-Error "Could not find artifact: $Path" + exit 1 + } + if ($Split) { + Remove-Item -Path "$Path.*" -Force + Split-File -Path (Resolve-Path $Path) -PartSizeBytes "50MB" -Verbose + $Path = "$Path.*" + } + & buildkite-agent artifact upload "$Path" --debug --debug-http +} + +Assert-Buildkite-Agent +if ($Split) { + Assert-Split-File +} + +foreach ($Path in $Paths) { + Upload-Buildkite-Artifact $Path +} diff --git a/.buildkite/scripts/upload-artifact.sh b/.buildkite/scripts/upload-artifact.sh new file mode 100755 index 00000000000000..0284a93c79b5f2 --- /dev/null +++ b/.buildkite/scripts/upload-artifact.sh @@ -0,0 +1,54 @@ +#!/bin/bash + +set -eo pipefail + +function assert_buildkite_agent() { + if ! command -v buildkite-agent &> /dev/null; then + echo "error: Cannot find buildkite-agent, please install it:" + echo "https://buildkite.com/docs/agent/v3/install" + exit 1 + fi +} + +function assert_split() { + if ! command -v split &> /dev/null; then + echo "error: Cannot find split, please install it:" + echo "https://www.gnu.org/software/coreutils/split" + exit 1 + fi +} + +function upload_buildkite_artifact() { + local path="$1"; shift + local split="0" + local args=() + while true; do + if [ -z "$1" ]; then + break + fi + case "$1" in + --split) split="1"; shift ;; + *) args+=("$1"); shift ;; + esac + done + if [ ! -f "$path" ]; then + echo "error: Could not find artifact: $path" + exit 1 + fi + if [ "$split" == "1" ]; then + run_command rm -f "$path."* + run_command split -b 50MB -d "$path" "$path." + run_command buildkite-agent artifact upload "$path.*" "${args[@]}" + else + run_command buildkite-agent artifact upload "$path" "${args[@]}" + fi +} + +function run_command() { + set -x + "$@" + { set +x; } 2>/dev/null +} + +assert_buildkite_agent +upload_buildkite_artifact "$@" diff --git a/.buildkite/scripts/upload-release.sh b/.buildkite/scripts/upload-release.sh index c1576bd620bd27..b3d4c0a415975b 100755 --- a/.buildkite/scripts/upload-release.sh +++ b/.buildkite/scripts/upload-release.sh @@ -3,7 +3,15 @@ set -eo pipefail function assert_main() { - if [[ "$BUILDKITE_PULL_REQUEST_REPO" && "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]]; then + if [ -z "$BUILDKITE_REPO" ]; then + echo "error: Cannot find repository for this build" + exit 1 + fi + if [ -z "$BUILDKITE_COMMIT" ]; then + echo "error: Cannot find commit for this build" + exit 1 + fi + if [ -n "$BUILDKITE_PULL_REQUEST_REPO" ] && [ "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]; then echo "error: Cannot upload release from a fork" exit 1 fi @@ -25,70 +33,152 @@ function assert_buildkite_agent() { fi } -function assert_gh() { - if ! command -v gh &> /dev/null; then - echo "warning: gh is not installed, installing..." +function assert_github() { + assert_command "gh" "gh" "https://github.com/cli/cli#installation" + assert_buildkite_secret "GITHUB_TOKEN" + # gh expects the token in $GH_TOKEN + export GH_TOKEN="$GITHUB_TOKEN" +} + +function assert_aws() { + assert_command "aws" "awscli" "https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html" + for secret in AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_ENDPOINT AWS_BUCKET; do + assert_buildkite_secret "$secret" + done +} + +function assert_sentry() { + assert_command "sentry-cli" "getsentry/tools/sentry-cli" "https://docs.sentry.io/cli/installation/" + for secret in SENTRY_AUTH_TOKEN SENTRY_ORG SENTRY_PROJECT; do + assert_buildkite_secret "$secret" + done +} + +function run_command() { + set -x + "$@" + { set +x; } 2>/dev/null +} + +function assert_command() { + local command="$1" + local package="$2" + local help_url="$3" + if ! command -v "$command" &> /dev/null; then + echo "warning: $command is not installed, installing..." if command -v brew &> /dev/null; then - brew install gh + HOMEBREW_NO_AUTO_UPDATE=1 run_command brew install "$package" else - echo "error: Cannot install gh, please install it:" - echo "https://github.com/cli/cli#installation" + echo "error: Cannot install $command, please install it" + if [ -n "$help_url" ]; then + echo "" + echo "hint: See $help_url for help" + fi exit 1 fi fi } -function assert_gh_token() { - local token=$(buildkite-agent secret get GITHUB_TOKEN) - if [ -z "$token" ]; then - echo "error: Cannot find GITHUB_TOKEN secret" +function assert_buildkite_secret() { + local key="$1" + local value=$(buildkite-agent secret get "$key") + if [ -z "$value" ]; then + echo "error: Cannot find $key secret" echo "" - echo "hint: Create a secret named GITHUB_TOKEN with a GitHub access token:" + echo "hint: Create a secret named $key with a value:" echo "https://buildkite.com/docs/pipelines/buildkite-secrets" exit 1 fi - export GH_TOKEN="$token" + export "$key"="$value" } -function download_artifact() { - local name=$1 - buildkite-agent artifact download "$name" . - if [ ! -f "$name" ]; then - echo "error: Cannot find Buildkite artifact: $name" - exit 1 +function release_tag() { + local version="$1" + if [ "$version" == "canary" ]; then + echo "canary" + else + echo "bun-v$version" fi } -function upload_assets() { - local tag=$1 - local files=${@:2} - gh release upload "$tag" $files --clobber --repo "$BUILDKITE_REPO" +function create_sentry_release() { + local version="$1" + local release="$version" + if [ "$version" == "canary" ]; then + release="$BUILDKITE_COMMIT-canary" + fi + run_command sentry-cli releases new "$release" --finalize + run_command sentry-cli releases set-commits "$release" --auto --ignore-missing + if [ "$version" == "canary" ]; then + run_command sentry-cli deploys new --env="canary" --release="$release" + fi +} + +function download_buildkite_artifacts() { + local dir="$1" + local names="${@:2}" + for name in "${names[@]}"; do + run_command buildkite-agent artifact download "$name" "$dir" + if [ ! -f "$dir/$name" ]; then + echo "error: Cannot find Buildkite artifact: $name" + exit 1 + fi + done +} + +function upload_github_assets() { + local version="$1" + local tag="$(release_tag "$version")" + local files="${@:2}" + for file in "${files[@]}"; do + run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO" + done + if [ "$version" == "canary" ]; then + run_command gh release edit "$tag" --repo "$BUILDKITE_REPO" \ + --notes "This canary release of Bun corresponds to the commit: $BUILDKITE_COMMIT" + fi +} + +function upload_s3_files() { + local folder="$1" + local files="${@:2}" + for file in "${files[@]}"; do + run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file" + done +} + +function create_release() { + assert_main + assert_buildkite_agent + assert_github + assert_sentry + + local tag="$1" # 'canary' or 'x.y.z' + local artifacts=( + bun-darwin-aarch64.zip + bun-darwin-aarch64-profile.zip + bun-darwin-x64.zip + bun-darwin-x64-profile.zip + bun-linux-aarch64.zip + bun-linux-aarch64-profile.zip + bun-linux-x64.zip + bun-linux-x64-profile.zip + bun-linux-x64-baseline.zip + bun-linux-x64-baseline-profile.zip + bun-windows-x64.zip + bun-windows-x64-profile.zip + bun-windows-x64-baseline.zip + bun-windows-x64-baseline-profile.zip + ) + + for artifact in "${artifacts[@]}"; do + download_buildkite_artifact "$artifact" + done + + upload_github_assets "$tag" "${artifacts[@]}" + upload_s3_files "releases/$BUILDKITE_COMMIT" "${artifacts[@]}" + upload_s3_files "releases/$tag" "${artifacts[@]}" + create_sentry_release "$tag" } -assert_main -assert_buildkite_agent -assert_gh -assert_gh_token - -declare artifacts=( - bun-darwin-aarch64.zip - bun-darwin-aarch64-profile.zip - bun-darwin-x64.zip - bun-darwin-x64-profile.zip - bun-linux-aarch64.zip - bun-linux-aarch64-profile.zip - bun-linux-x64.zip - bun-linux-x64-profile.zip - bun-linux-x64-baseline.zip - bun-linux-x64-baseline-profile.zip - bun-windows-x64.zip - bun-windows-x64-profile.zip - bun-windows-x64-baseline.zip - bun-windows-x64-baseline-profile.zip -) - -for artifact in "${artifacts[@]}"; do - download_artifact $artifact -done - -upload_assets "canary" "${artifacts[@]}" +create_release "canary" diff --git a/.github/workflows/build-darwin.yml b/.github/workflows/build-darwin.yml deleted file mode 100644 index b2bf1f12dabf23..00000000000000 --- a/.github/workflows/build-darwin.yml +++ /dev/null @@ -1,286 +0,0 @@ -name: Build Darwin - -permissions: - contents: read - actions: write - -on: - workflow_call: - inputs: - runs-on: - type: string - default: macos-13-large - tag: - type: string - required: true - arch: - type: string - required: true - cpu: - type: string - required: true - assertions: - type: boolean - canary: - type: boolean - no-cache: - type: boolean - -env: - LLVM_VERSION: 18 - BUN_VERSION: 1.1.8 - LC_CTYPE: "en_US.UTF-8" - LC_ALL: "en_US.UTF-8" - # LTO is disabled because we cannot use lld on macOS currently - BUN_ENABLE_LTO: "0" - -jobs: - build-submodules: - name: Build Submodules - runs-on: ${{ inputs.runs-on }} - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - sparse-checkout: | - .gitmodules - src/deps - scripts - - name: Hash Submodules - id: hash - run: | - print_versions() { - git submodule | grep -v WebKit - echo "LLVM_VERSION=${{ env.LLVM_VERSION }}" - cat $(echo scripts/build*.sh scripts/all-dependencies.sh | tr " " "\n" | sort) - } - echo "hash=$(print_versions | shasum)" >> $GITHUB_OUTPUT - - name: Install Dependencies - env: - HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1 - HOMEBREW_NO_AUTO_UPDATE: 1 - HOMEBREW_NO_INSTALL_CLEANUP: 1 - run: | - brew install \ - llvm@${{ env.LLVM_VERSION }} \ - ccache \ - rust \ - pkg-config \ - coreutils \ - libtool \ - cmake \ - libiconv \ - automake \ - openssl@1.1 \ - ninja \ - golang \ - gnu-sed --force --overwrite - echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH - echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH - echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH - brew link --overwrite llvm@$LLVM_VERSION - - name: Clone Submodules - run: | - ./scripts/update-submodules.sh - - name: Build Submodules - env: - CPU_TARGET: ${{ inputs.cpu }} - BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps - run: | - mkdir -p $BUN_DEPS_OUT_DIR - ./scripts/all-dependencies.sh - - name: Upload bun-${{ inputs.tag }}-deps - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-deps - path: ${{ runner.temp }}/bun-deps - if-no-files-found: error - build-cpp: - name: Build C++ - runs-on: ${{ inputs.runs-on }} - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - submodules: recursive - # TODO: Figure out how to cache homebrew dependencies - - name: Install Dependencies - env: - HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1 - HOMEBREW_NO_AUTO_UPDATE: 1 - HOMEBREW_NO_INSTALL_CLEANUP: 1 - run: | - brew install \ - llvm@${{ env.LLVM_VERSION }} \ - ccache \ - rust \ - pkg-config \ - coreutils \ - libtool \ - cmake \ - libiconv \ - automake \ - openssl@1.1 \ - ninja \ - golang \ - gnu-sed --force --overwrite - echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH - echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH - echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH - brew link --overwrite llvm@$LLVM_VERSION - - name: Setup Bun - uses: ./.github/actions/setup-bun - with: - bun-version: ${{ env.BUN_VERSION }} - - name: Compile - env: - CPU_TARGET: ${{ inputs.cpu }} - SOURCE_DIR: ${{ github.workspace }} - OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj - BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps - CCACHE_DIR: ${{ runner.temp }}/ccache - run: | - mkdir -p $OBJ_DIR - cd $OBJ_DIR - cmake -S $SOURCE_DIR -B $OBJ_DIR \ - -G Ninja \ - -DCMAKE_BUILD_TYPE=Release \ - -DUSE_LTO=ON \ - -DBUN_CPP_ONLY=1 \ - -DNO_CONFIGURE_DEPENDS=1 - chmod +x compile-cpp-only.sh - ./compile-cpp-only.sh -v - - name: Upload bun-${{ inputs.tag }}-cpp - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-cpp - path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a - if-no-files-found: error - build-zig: - name: Build Zig - uses: ./.github/workflows/build-zig.yml - with: - os: darwin - only-zig: true - tag: ${{ inputs.tag }} - arch: ${{ inputs.arch }} - cpu: ${{ inputs.cpu }} - assertions: ${{ inputs.assertions }} - canary: ${{ inputs.canary }} - no-cache: ${{ inputs.no-cache }} - link: - name: Link - runs-on: ${{ inputs.runs-on }} - needs: - - build-submodules - - build-cpp - - build-zig - steps: - - uses: actions/checkout@v4 - # TODO: Figure out how to cache homebrew dependencies - - name: Install Dependencies - env: - HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1 - HOMEBREW_NO_AUTO_UPDATE: 1 - HOMEBREW_NO_INSTALL_CLEANUP: 1 - run: | - brew install \ - llvm@${{ env.LLVM_VERSION }} \ - ccache \ - rust \ - pkg-config \ - coreutils \ - libtool \ - cmake \ - libiconv \ - automake \ - openssl@1.1 \ - ninja \ - golang \ - gnu-sed --force --overwrite - echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH - echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH - echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH - brew link --overwrite llvm@$LLVM_VERSION - - name: Setup Bun - uses: ./.github/actions/setup-bun - with: - bun-version: ${{ env.BUN_VERSION }} - - name: Download bun-${{ inputs.tag }}-deps - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }}-deps - path: ${{ runner.temp }}/bun-deps - - name: Download bun-${{ inputs.tag }}-cpp - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }}-cpp - path: ${{ runner.temp }}/bun-cpp-obj - - name: Download bun-${{ inputs.tag }}-zig - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }}-zig - path: ${{ runner.temp }}/release - - name: Link - env: - CPU_TARGET: ${{ inputs.cpu }} - run: | - SRC_DIR=$PWD - mkdir ${{ runner.temp }}/link-build - cd ${{ runner.temp }}/link-build - cmake $SRC_DIR \ - -G Ninja \ - -DCMAKE_BUILD_TYPE=Release \ - -DUSE_LTO=ON \ - -DBUN_LINK_ONLY=1 \ - -DBUN_ZIG_OBJ_DIR="${{ runner.temp }}/release" \ - -DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \ - -DBUN_DEPS_OUT_DIR="${{ runner.temp }}/bun-deps" \ - -DNO_CONFIGURE_DEPENDS=1 - ninja -v - - name: Prepare - run: | - cd ${{ runner.temp }}/link-build - chmod +x bun-profile bun - mkdir -p bun-${{ inputs.tag }}-profile/ bun-${{ inputs.tag }}/ - mv bun-profile bun-${{ inputs.tag }}-profile/bun-profile - if [ -f bun-profile.dSYM || -d bun-profile.dSYM ]; then - mv bun-profile.dSYM bun-${{ inputs.tag }}-profile/bun-profile.dSYM - fi - if [ -f bun.dSYM || -d bun.dSYM ]; then - mv bun.dSYM bun-${{ inputs.tag }}-profile/bun-profile.dSYM - fi - mv bun bun-${{ inputs.tag }}/bun - zip -r bun-${{ inputs.tag }}-profile.zip bun-${{ inputs.tag }}-profile - zip -r bun-${{ inputs.tag }}.zip bun-${{ inputs.tag }} - - name: Upload bun-${{ inputs.tag }} - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }} - path: ${{ runner.temp }}/link-build/bun-${{ inputs.tag }}.zip - if-no-files-found: error - - name: Upload bun-${{ inputs.tag }}-profile - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-profile - path: ${{ runner.temp }}/link-build/bun-${{ inputs.tag }}-profile.zip - if-no-files-found: error - on-failure: - if: ${{ github.repository_owner == 'oven-sh' && failure() }} - name: On Failure - needs: link - runs-on: ubuntu-latest - steps: - - name: Send Message - uses: sarisia/actions-status-discord@v1 - with: - webhook: ${{ secrets.DISCORD_WEBHOOK }} - nodetail: true - color: "#FF0000" - title: "" - description: | - ### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }}) - - @${{ github.actor }}, the build for bun-${{ inputs.tag }} failed. - - **[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})** diff --git a/.github/workflows/build-linux.yml b/.github/workflows/build-linux.yml deleted file mode 100644 index c1bde9271cbdc6..00000000000000 --- a/.github/workflows/build-linux.yml +++ /dev/null @@ -1,64 +0,0 @@ -name: Build Linux - -permissions: - contents: read - actions: write - -on: - workflow_call: - inputs: - runs-on: - type: string - required: true - tag: - type: string - required: true - arch: - type: string - required: true - cpu: - type: string - required: true - assertions: - type: boolean - zig-optimize: - type: string - canary: - type: boolean - no-cache: - type: boolean - -jobs: - build: - name: Build Linux - uses: ./.github/workflows/build-zig.yml - with: - os: linux - only-zig: false - runs-on: ${{ inputs.runs-on }} - tag: ${{ inputs.tag }} - arch: ${{ inputs.arch }} - cpu: ${{ inputs.cpu }} - assertions: ${{ inputs.assertions }} - zig-optimize: ${{ inputs.zig-optimize }} - canary: ${{ inputs.canary }} - no-cache: ${{ inputs.no-cache }} - on-failure: - if: ${{ github.repository_owner == 'oven-sh' && failure() }} - name: On Failure - needs: build - runs-on: ubuntu-latest - steps: - - name: Send Message - uses: sarisia/actions-status-discord@v1 - with: - webhook: ${{ secrets.DISCORD_WEBHOOK }} - nodetail: true - color: "#FF0000" - title: "" - description: | - ### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }}) - - @${{ github.actor }}, the build for bun-${{ inputs.tag }} failed. - - **[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})** diff --git a/.github/workflows/build-windows.yml b/.github/workflows/build-windows.yml deleted file mode 100644 index 3585a77f6b8e00..00000000000000 --- a/.github/workflows/build-windows.yml +++ /dev/null @@ -1,348 +0,0 @@ -name: Build Windows - -permissions: - contents: read - actions: write - -on: - workflow_call: - inputs: - runs-on: - type: string - default: windows - tag: - type: string - required: true - arch: - type: string - required: true - cpu: - type: string - required: true - assertions: - type: boolean - canary: - type: boolean - no-cache: - type: boolean - bun-version: - type: string - default: 1.1.7 - -env: - # Must specify exact version of LLVM for Windows - LLVM_VERSION: 18.1.8 - BUN_VERSION: ${{ inputs.bun-version }} - BUN_GARBAGE_COLLECTOR_LEVEL: 1 - BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: 1 - CI: true - USE_LTO: 1 - -jobs: - build-submodules: - name: Build Submodules - runs-on: ${{ inputs.runs-on }} - steps: - - name: Install Scoop - run: | - Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser - Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression - Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH - - name: Setup Git - run: | - git config --global core.autocrlf false - git config --global core.eol lf - - name: Checkout - uses: actions/checkout@v4 - with: - sparse-checkout: | - .gitmodules - src/deps - scripts - - name: Hash Submodules - id: hash - run: | - $data = "$(& { - git submodule | Where-Object { $_ -notmatch 'WebKit' } - echo "LLVM_VERSION=${{ env.LLVM_VERSION }}" - Get-Content -Path (Get-ChildItem -Path 'scripts/build*.ps1', 'scripts/all-dependencies.ps1', 'scripts/env.ps1' | Sort-Object -Property Name).FullName | Out-String - echo 1 - })" - $hash = ( -join ((New-Object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider).ComputeHash([System.Text.Encoding]::UTF8.GetBytes($data)) | ForEach-Object { $_.ToString("x2") } )).Substring(0, 10) - echo "hash=${hash}" >> $env:GITHUB_OUTPUT - - if: ${{ !inputs.no-cache }} - name: Restore Cache - id: cache - uses: actions/cache/restore@v4 - with: - path: bun-deps - key: bun-${{ inputs.tag }}-deps-${{ steps.hash.outputs.hash }} - - if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }} - name: Install LLVM and Ninja - run: | - scoop install ninja - scoop install llvm@${{ env.LLVM_VERSION }} - scoop install nasm@2.16.01 - - if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }} - name: Clone Submodules - run: | - .\scripts\update-submodules.ps1 - - if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }} - name: Build Dependencies - env: - CPU_TARGET: ${{ inputs.cpu }} - CCACHE_DIR: ccache - USE_LTO: 1 - run: | - .\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }} - $env:BUN_DEPS_OUT_DIR = (mkdir -Force "./bun-deps") - .\scripts\all-dependencies.ps1 - - name: Save Cache - if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }} - uses: actions/cache/save@v4 - with: - path: bun-deps - key: ${{ steps.cache.outputs.cache-primary-key }} - - name: Upload bun-${{ inputs.tag }}-deps - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-deps - path: bun-deps - if-no-files-found: error - codegen: - name: Codegen - runs-on: ubuntu-latest - steps: - - name: Setup Git - run: | - git config --global core.autocrlf false - git config --global core.eol lf - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Bun - uses: ./.github/actions/setup-bun - with: - bun-version: ${{ inputs.bun-version }} - - name: Codegen - run: | - ./scripts/cross-compile-codegen.sh win32 x64 - - if: ${{ inputs.canary }} - name: Calculate Revision - run: | - echo "canary_revision=$(GITHUB_TOKEN="${{ github.token }}" - bash ./scripts/calculate-canary-revision.sh --raw)" > build-codegen-win32-x64/.canary_revision - - name: Upload bun-${{ inputs.tag }}-codegen - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-codegen - path: build-codegen-win32-x64 - if-no-files-found: error - build-cpp: - name: Build C++ - needs: codegen - runs-on: ${{ inputs.runs-on }} - steps: - - name: Install Scoop - run: | - Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser - Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression - Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH - - name: Setup Git - run: | - git config --global core.autocrlf false - git config --global core.eol lf - - name: Checkout - uses: actions/checkout@v4 - with: - submodules: recursive - - name: Install LLVM and Ninja - run: | - scoop install ninja - scoop install llvm@${{ env.LLVM_VERSION }} - - name: Setup Bun - uses: ./.github/actions/setup-bun - with: - bun-version: ${{ inputs.bun-version }} - - if: ${{ !inputs.no-cache }} - name: Restore Cache - uses: actions/cache@v4 - with: - path: ccache - key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }} - restore-keys: | - bun-${{ inputs.tag }}-cpp- - - name: Download bun-${{ inputs.tag }}-codegen - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }}-codegen - path: build - - name: Compile - env: - CPU_TARGET: ${{ inputs.cpu }} - CCACHE_DIR: ccache - USE_LTO: 1 - run: | - # $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" } - $CANARY_REVISION = 0 - .\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }} - .\scripts\update-submodules.ps1 - .\scripts\build-libuv.ps1 -CloneOnly $True - cd build - cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release ` - -DNO_CODEGEN=1 ` - -DUSE_LTO=1 ` - -DNO_CONFIGURE_DEPENDS=1 ` - "-DCANARY=${CANARY_REVISION}" ` - -DBUN_CPP_ONLY=1 ${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }} - if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" } - .\compile-cpp-only.ps1 -v - if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" } - - name: Upload bun-${{ inputs.tag }}-cpp - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-cpp - path: build/bun-cpp-objects.a - if-no-files-found: error - build-zig: - name: Build Zig - uses: ./.github/workflows/build-zig.yml - with: - os: windows - zig-optimize: ReleaseSafe - only-zig: true - tag: ${{ inputs.tag }} - arch: ${{ inputs.arch }} - cpu: ${{ inputs.cpu }} - assertions: ${{ inputs.assertions }} - canary: ${{ inputs.canary }} - no-cache: ${{ inputs.no-cache }} - link: - name: Link - runs-on: ${{ inputs.runs-on }} - needs: - - build-submodules - - build-cpp - - build-zig - - codegen - steps: - - name: Install Scoop - run: | - Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser - Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression - Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH - - name: Setup Git - run: | - git config --global core.autocrlf false - git config --global core.eol lf - - name: Checkout - uses: actions/checkout@v4 - with: - submodules: recursive - - name: Install Ninja - run: | - scoop install ninja - scoop install llvm@${{ env.LLVM_VERSION }} - - name: Setup Bun - uses: ./.github/actions/setup-bun - with: - bun-version: ${{ inputs.bun-version }} - - name: Download bun-${{ inputs.tag }}-deps - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }}-deps - path: bun-deps - - name: Download bun-${{ inputs.tag }}-cpp - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }}-cpp - path: bun-cpp - - name: Download bun-${{ inputs.tag }}-zig - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }}-zig - path: bun-zig - - name: Download bun-${{ inputs.tag }}-codegen - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }}-codegen - path: build - - if: ${{ !inputs.no-cache }} - name: Restore Cache - uses: actions/cache@v4 - with: - path: ccache - key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }} - restore-keys: | - bun-${{ inputs.tag }}-cpp- - - name: Link - env: - CPU_TARGET: ${{ inputs.cpu }} - CCACHE_DIR: ccache - run: | - .\scripts\update-submodules.ps1 - .\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }} - Set-Location build - # $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" } - $CANARY_REVISION = 0 - cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release ` - -DNO_CODEGEN=1 ` - -DNO_CONFIGURE_DEPENDS=1 ` - "-DCANARY=${CANARY_REVISION}" ` - -DBUN_LINK_ONLY=1 ` - -DUSE_LTO=1 ` - "-DBUN_DEPS_OUT_DIR=$(Resolve-Path ../bun-deps)" ` - "-DBUN_CPP_ARCHIVE=$(Resolve-Path ../bun-cpp/bun-cpp-objects.a)" ` - "-DBUN_ZIG_OBJ_DIR=$(Resolve-Path ../bun-zig)" ` - ${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }} - if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" } - ninja -v - if ($LASTEXITCODE -ne 0) { throw "Link failed!" } - - name: Prepare - run: | - $Dist = mkdir -Force "bun-${{ inputs.tag }}" - cp -r build\bun.exe "$Dist\bun.exe" - Compress-Archive -Force "$Dist" "${Dist}.zip" - $Dist = "$Dist-profile" - MkDir -Force "$Dist" - cp -r build\bun.exe "$Dist\bun.exe" - cp -r build\bun.pdb "$Dist\bun.pdb" - Compress-Archive -Force "$Dist" "$Dist.zip" - .\build\bun.exe --print "JSON.stringify(require('bun:internal-for-testing').crash_handler.getFeatureData())" > .\features.json - - name: Upload bun-${{ inputs.tag }} - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }} - path: bun-${{ inputs.tag }}.zip - if-no-files-found: error - - name: Upload bun-${{ inputs.tag }}-profile - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-profile - path: bun-${{ inputs.tag }}-profile.zip - if-no-files-found: error - - name: Upload bun-feature-data - uses: actions/upload-artifact@v4 - with: - name: bun-feature-data - path: features.json - if-no-files-found: error - overwrite: true - on-failure: - if: ${{ github.repository_owner == 'oven-sh' && failure() }} - name: On Failure - needs: link - runs-on: ubuntu-latest - steps: - - name: Send Message - uses: sarisia/actions-status-discord@v1 - with: - webhook: ${{ secrets.DISCORD_WEBHOOK }} - nodetail: true - color: "#FF0000" - title: "" - description: | - ### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }}) - - @${{ github.actor }}, the build for bun-${{ inputs.tag }} failed. - - **[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})** diff --git a/.github/workflows/build-zig.yml b/.github/workflows/build-zig.yml deleted file mode 100644 index eaea6bc1ef2e51..00000000000000 --- a/.github/workflows/build-zig.yml +++ /dev/null @@ -1,122 +0,0 @@ -name: Build Zig - -permissions: - contents: read - actions: write - -on: - workflow_call: - inputs: - runs-on: - type: string - default: ${{ github.repository_owner != 'oven-sh' && 'ubuntu-latest' || inputs.only-zig && 'namespace-profile-bun-ci-linux-x64' || inputs.arch == 'x64' && 'namespace-profile-bun-ci-linux-x64' || 'namespace-profile-bun-ci-linux-aarch64' }} - tag: - type: string - required: true - os: - type: string - required: true - arch: - type: string - required: true - cpu: - type: string - required: true - assertions: - type: boolean - default: false - zig-optimize: - type: string # 'ReleaseSafe' or 'ReleaseFast' - default: ReleaseFast - canary: - type: boolean - default: ${{ github.ref == 'refs/heads/main' }} - only-zig: - type: boolean - default: true - no-cache: - type: boolean - default: false - -jobs: - build-zig: - name: ${{ inputs.only-zig && 'Build Zig' || 'Build & Link' }} - runs-on: ${{ inputs.runs-on }} - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Calculate Cache Key - id: cache - run: | - echo "key=${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}-canary-${{inputs.canary}}" >> $GITHUB_OUTPUT - - if: ${{ !inputs.no-cache }} - name: Restore Cache - uses: actions/cache@v4 - with: - key: bun-${{ inputs.tag }}-docker-${{ steps.cache.outputs.key }} - restore-keys: | - bun-${{ inputs.tag }}-docker- - path: | - ${{ runner.temp }}/dockercache - - name: Setup Docker - uses: docker/setup-buildx-action@v3 - with: - install: true - platforms: | - linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }} - - name: Build - uses: docker/build-push-action@v5 - with: - push: false - target: ${{ inputs.only-zig && 'build_release_obj' || 'artifact' }} - cache-from: | - type=local,src=${{ runner.temp }}/dockercache - cache-to: | - type=local,dest=${{ runner.temp }}/dockercache,mode=max - outputs: | - type=local,dest=${{ runner.temp }}/release - platforms: | - linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }} - build-args: | - GIT_SHA=${{ github.event.workflow_run.head_sha || github.sha }} - TRIPLET=${{ inputs.os == 'darwin' && format('{0}-macos-none', inputs.arch == 'x64' && 'x86_64' || 'aarch64') || inputs.os == 'windows' && format('{0}-windows-msvc', inputs.arch == 'x64' && 'x86_64' || 'aarch64') || format('{0}-linux-gnu', inputs.arch == 'x64' && 'x86_64' || 'aarch64') }} - ARCH=${{ inputs.arch == 'x64' && 'x86_64' || 'aarch64' }} - BUILDARCH=${{ inputs.arch == 'x64' && 'amd64' || 'arm64' }} - BUILD_MACHINE_ARCH=${{ inputs.arch == 'x64' && 'x86_64' || 'aarch64' }} - CPU_TARGET=${{ inputs.arch == 'x64' && inputs.cpu || 'native' }} - ASSERTIONS=${{ inputs.assertions && 'ON' || 'OFF' }} - ZIG_OPTIMIZE=${{ inputs.zig-optimize }} - CANARY=${{ inputs.canary && '1' || '0' }} - - if: ${{ inputs.only-zig }} - name: Upload bun-${{ inputs.tag }}-zig - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-zig - path: ${{ runner.temp }}/release/bun-zig.o - if-no-files-found: error - - if: ${{ !inputs.only-zig }} - name: Prepare - run: | - cd ${{ runner.temp }}/release - chmod +x bun-profile bun - mkdir bun-${{ inputs.tag }}-profile - mkdir bun-${{ inputs.tag }} - strip bun - mv bun-profile bun-${{ inputs.tag }}-profile/bun-profile - mv bun bun-${{ inputs.tag }}/bun - zip -r bun-${{ inputs.tag }}-profile.zip bun-${{ inputs.tag }}-profile - zip -r bun-${{ inputs.tag }}.zip bun-${{ inputs.tag }} - - if: ${{ !inputs.only-zig }} - name: Upload bun-${{ inputs.tag }} - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }} - path: ${{ runner.temp }}/release/bun-${{ inputs.tag }}.zip - if-no-files-found: error - - if: ${{ !inputs.only-zig }} - name: Upload bun-${{ inputs.tag }}-profile - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-profile - path: ${{ runner.temp }}/release/bun-${{ inputs.tag }}-profile.zip - if-no-files-found: error diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 203190c58f3b2a..00000000000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,245 +0,0 @@ -name: CI - -permissions: - contents: read - actions: write - -concurrency: - group: ${{ github.workflow }}-${{ github.event_name == 'workflow_dispatch' && inputs.run-id || github.ref }} - cancel-in-progress: true - -on: - workflow_dispatch: - inputs: - run-id: - type: string - description: The workflow ID to download artifacts (skips the build step) - pull_request: - paths-ignore: - - .vscode/**/* - - docs/**/* - - examples/**/* - push: - branches: - - main - paths-ignore: - - .vscode/**/* - - docs/**/* - - examples/**/* - -jobs: - format: - if: ${{ !inputs.run-id }} - name: Format - uses: ./.github/workflows/run-format.yml - secrets: inherit - with: - zig-version: 0.13.0 - permissions: - contents: write - lint: - if: ${{ !inputs.run-id }} - name: Lint - uses: ./.github/workflows/run-lint.yml - secrets: inherit - linux-x64: - if: ${{ !inputs.run-id }} - name: Build linux-x64 - uses: ./.github/workflows/build-linux.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }} - tag: linux-x64 - arch: x64 - cpu: haswell - canary: true - no-cache: true - linux-x64-baseline: - if: ${{ !inputs.run-id }} - name: Build linux-x64-baseline - uses: ./.github/workflows/build-linux.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }} - tag: linux-x64-baseline - arch: x64 - cpu: nehalem - canary: true - no-cache: true - linux-aarch64: - if: ${{ !inputs.run-id && github.repository_owner == 'oven-sh' }} - name: Build linux-aarch64 - uses: ./.github/workflows/build-linux.yml - secrets: inherit - with: - runs-on: namespace-profile-bun-ci-linux-aarch64 - tag: linux-aarch64 - arch: aarch64 - cpu: native - canary: true - no-cache: true - darwin-x64: - if: ${{ !inputs.run-id }} - name: Build darwin-x64 - uses: ./.github/workflows/build-darwin.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }} - tag: darwin-x64 - arch: x64 - cpu: haswell - canary: true - darwin-x64-baseline: - if: ${{ !inputs.run-id }} - name: Build darwin-x64-baseline - uses: ./.github/workflows/build-darwin.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }} - tag: darwin-x64-baseline - arch: x64 - cpu: nehalem - canary: true - darwin-aarch64: - if: ${{ !inputs.run-id }} - name: Build darwin-aarch64 - uses: ./.github/workflows/build-darwin.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-13' }} - tag: darwin-aarch64 - arch: aarch64 - cpu: native - canary: true - windows-x64: - if: ${{ !inputs.run-id }} - name: Build windows-x64 - uses: ./.github/workflows/build-windows.yml - secrets: inherit - with: - runs-on: windows - tag: windows-x64 - arch: x64 - cpu: haswell - canary: true - windows-x64-baseline: - if: ${{ !inputs.run-id }} - name: Build windows-x64-baseline - uses: ./.github/workflows/build-windows.yml - secrets: inherit - with: - runs-on: windows - tag: windows-x64-baseline - arch: x64 - cpu: nehalem - canary: true - linux-x64-test: - if: ${{ inputs.run-id || github.event_name == 'pull_request' }} - name: Test linux-x64 - needs: linux-x64 - uses: ./.github/workflows/run-test.yml - secrets: inherit - with: - run-id: ${{ inputs.run-id }} - pr-number: ${{ github.event.number }} - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }} - tag: linux-x64 - linux-x64-baseline-test: - if: ${{ inputs.run-id || github.event_name == 'pull_request' }} - name: Test linux-x64-baseline - needs: linux-x64-baseline - uses: ./.github/workflows/run-test.yml - secrets: inherit - with: - run-id: ${{ inputs.run-id }} - pr-number: ${{ github.event.number }} - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }} - tag: linux-x64-baseline - linux-aarch64-test: - if: ${{ inputs.run-id || github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'}} - name: Test linux-aarch64 - needs: linux-aarch64 - uses: ./.github/workflows/run-test.yml - secrets: inherit - with: - run-id: ${{ inputs.run-id }} - pr-number: ${{ github.event.number }} - runs-on: namespace-profile-bun-ci-linux-aarch64 - tag: linux-aarch64 - darwin-x64-test: - if: ${{ inputs.run-id || github.event_name == 'pull_request' }} - name: Test darwin-x64 - needs: darwin-x64 - uses: ./.github/workflows/run-test.yml - secrets: inherit - with: - run-id: ${{ inputs.run-id }} - pr-number: ${{ github.event.number }} - runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }} - tag: darwin-x64 - darwin-x64-baseline-test: - if: ${{ inputs.run-id || github.event_name == 'pull_request' }} - name: Test darwin-x64-baseline - needs: darwin-x64-baseline - uses: ./.github/workflows/run-test.yml - secrets: inherit - with: - run-id: ${{ inputs.run-id }} - pr-number: ${{ github.event.number }} - runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }} - tag: darwin-x64-baseline - darwin-aarch64-test: - if: ${{ inputs.run-id || github.event_name == 'pull_request' }} - name: Test darwin-aarch64 - needs: darwin-aarch64 - uses: ./.github/workflows/run-test.yml - secrets: inherit - with: - run-id: ${{ inputs.run-id }} - pr-number: ${{ github.event.number }} - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-13' }} - tag: darwin-aarch64 - windows-x64-test: - if: ${{ inputs.run-id || github.event_name == 'pull_request' }} - name: Test windows-x64 - needs: windows-x64 - uses: ./.github/workflows/run-test.yml - secrets: inherit - with: - run-id: ${{ inputs.run-id }} - pr-number: ${{ github.event.number }} - runs-on: windows - tag: windows-x64 - windows-x64-baseline-test: - if: ${{ inputs.run-id || github.event_name == 'pull_request' }} - name: Test windows-x64-baseline - needs: windows-x64-baseline - uses: ./.github/workflows/run-test.yml - secrets: inherit - with: - run-id: ${{ inputs.run-id }} - pr-number: ${{ github.event.number }} - runs-on: windows - tag: windows-x64-baseline - cleanup: - if: ${{ always() }} - name: Cleanup - needs: - - linux-x64 - - linux-x64-baseline - - linux-aarch64 - - darwin-x64 - - darwin-x64-baseline - - darwin-aarch64 - - windows-x64 - - windows-x64-baseline - runs-on: ubuntu-latest - steps: - - name: Cleanup Artifacts - uses: geekyeggo/delete-artifact@v5 - with: - name: | - bun-*-cpp - bun-*-zig - bun-*-deps - bun-*-codegen diff --git a/.github/workflows/comment.yml b/.github/workflows/comment.yml deleted file mode 100644 index 3c798e8fcc041d..00000000000000 --- a/.github/workflows/comment.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Comment - -permissions: - actions: read - pull-requests: write - -on: - workflow_run: - workflows: - - CI - types: - - completed - -jobs: - comment: - if: ${{ github.repository_owner == 'oven-sh' }} - name: Comment - runs-on: ubuntu-latest - steps: - - name: Download Tests - uses: actions/download-artifact@v4 - with: - path: bun - pattern: bun-*-tests - github-token: ${{ github.token }} - run-id: ${{ github.event.workflow_run.id }} - - name: Setup Environment - id: env - shell: bash - run: | - echo "pr-number=$(> $GITHUB_OUTPUT - - name: Generate Comment - run: | - cat bun/bun-*-tests/comment.md > comment.md - if [ -s comment.md ]; then - echo -e "❌ @${{ github.actor }}, your commit has failing tests :(\n\n$(cat comment.md)" > comment.md - else - echo -e "✅ @${{ github.actor }}, all tests passed!" > comment.md - fi - echo -e "\n**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }})**" >> comment.md - echo -e "" >> comment.md - - name: Find Comment - id: comment - uses: peter-evans/find-comment@v3 - with: - issue-number: ${{ steps.env.outputs.pr-number }} - comment-author: github-actions[bot] - body-includes: - - name: Write Comment - uses: peter-evans/create-or-update-comment@v4 - with: - comment-id: ${{ steps.comment.outputs.comment-id }} - issue-number: ${{ steps.env.outputs.pr-number }} - body-path: comment.md - edit-mode: replace diff --git a/.github/workflows/create-release-build.yml b/.github/workflows/create-release-build.yml deleted file mode 100644 index 42adea058572c0..00000000000000 --- a/.github/workflows/create-release-build.yml +++ /dev/null @@ -1,183 +0,0 @@ -name: Create Release Build -run-name: Compile Bun v${{ inputs.version }} by ${{ github.actor }} - -concurrency: - group: release - cancel-in-progress: true - -permissions: - contents: write - actions: write - -on: - workflow_dispatch: - inputs: - version: - type: string - required: true - description: "Release version. Example: 1.1.4. Exclude the 'v' prefix." - tag: - type: string - required: true - description: "GitHub tag to use" - clobber: - type: boolean - required: false - default: false - description: "Overwrite existing release artifacts?" - release: - types: - - created - -jobs: - notify-start: - if: ${{ github.repository_owner == 'oven-sh' }} - name: Notify Start - runs-on: ubuntu-latest - steps: - - name: Send Message - uses: sarisia/actions-status-discord@v1 - with: - webhook: ${{ secrets.DISCORD_WEBHOOK_PUBLIC }} - nodetail: true - color: "#1F6FEB" - title: "Bun v${{ inputs.version }} is compiling" - description: | - ### @${{ github.actor }} started compiling Bun v${{inputs.version}} - - name: Send Message - uses: sarisia/actions-status-discord@v1 - with: - webhook: ${{ secrets.BUN_DISCORD_GITHUB_CHANNEL_WEBHOOK }} - nodetail: true - color: "#1F6FEB" - title: "Bun v${{ inputs.version }} is compiling" - description: | - ### @${{ github.actor }} started compiling Bun v${{inputs.version}} - - **[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})** - linux-x64: - name: Build linux-x64 - uses: ./.github/workflows/build-linux.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }} - tag: linux-x64 - arch: x64 - cpu: haswell - canary: false - linux-x64-baseline: - name: Build linux-x64-baseline - uses: ./.github/workflows/build-linux.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }} - tag: linux-x64-baseline - arch: x64 - cpu: nehalem - canary: false - linux-aarch64: - name: Build linux-aarch64 - uses: ./.github/workflows/build-linux.yml - secrets: inherit - with: - runs-on: namespace-profile-bun-ci-linux-aarch64 - tag: linux-aarch64 - arch: aarch64 - cpu: native - canary: false - darwin-x64: - name: Build darwin-x64 - uses: ./.github/workflows/build-darwin.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }} - tag: darwin-x64 - arch: x64 - cpu: haswell - canary: false - darwin-x64-baseline: - name: Build darwin-x64-baseline - uses: ./.github/workflows/build-darwin.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }} - tag: darwin-x64-baseline - arch: x64 - cpu: nehalem - canary: false - darwin-aarch64: - name: Build darwin-aarch64 - uses: ./.github/workflows/build-darwin.yml - secrets: inherit - with: - runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-13' }} - tag: darwin-aarch64 - arch: aarch64 - cpu: native - canary: false - windows-x64: - name: Build windows-x64 - uses: ./.github/workflows/build-windows.yml - secrets: inherit - with: - runs-on: windows - tag: windows-x64 - arch: x64 - cpu: haswell - canary: false - windows-x64-baseline: - name: Build windows-x64-baseline - uses: ./.github/workflows/build-windows.yml - secrets: inherit - with: - runs-on: windows - tag: windows-x64-baseline - arch: x64 - cpu: nehalem - canary: false - - upload-artifacts: - needs: - - linux-x64 - - linux-x64-baseline - - linux-aarch64 - - darwin-x64 - - darwin-x64-baseline - - darwin-aarch64 - - windows-x64 - - windows-x64-baseline - runs-on: ubuntu-latest - steps: - - name: Download Artifacts - uses: actions/download-artifact@v4 - with: - path: bun-releases - pattern: bun-* - merge-multiple: true - github-token: ${{ github.token }} - - name: Check for Artifacts - run: | - if [ ! -d "bun-releases" ] || [ -z "$(ls -A bun-releases)" ]; then - echo "Error: No artifacts were downloaded or 'bun-releases' directory does not exist." - exit 1 # Fail the job if the condition is met - else - echo "Artifacts downloaded successfully." - fi - - name: Send Message - uses: sarisia/actions-status-discord@v1 - with: - webhook: ${{ secrets.DISCORD_WEBHOOK }} - nodetail: true - color: "#FF0000" - title: "Bun v${{ inputs.version }} release artifacts uploaded" - - name: "Upload Artifacts" - env: - GH_TOKEN: ${{ github.token }} - run: | - # Unzip one level deep each artifact - cd bun-releases - for f in *.zip; do - unzip -o $f - done - cd .. - gh release upload --repo=${{ github.repository }} ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.tag || github.event.release.id }} ${{ inputs.clobber && '--clobber' || '' }} bun-releases/*.zip diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 5c31ba85896c30..cbe6b3e93aa33e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,3 +1,6 @@ +# TODO: Move this to bash scripts intead of Github Actions +# so it can be run from Buildkite, see: .buildkite/scripts/release.sh + name: Release concurrency: release diff --git a/.github/workflows/run-test.yml b/.github/workflows/run-test.yml deleted file mode 100644 index 6efe322a54afac..00000000000000 --- a/.github/workflows/run-test.yml +++ /dev/null @@ -1,224 +0,0 @@ -name: Test - -permissions: - contents: read - actions: read - -on: - workflow_call: - inputs: - runs-on: - type: string - required: true - tag: - type: string - required: true - pr-number: - type: string - required: true - run-id: - type: string - default: ${{ github.run_id }} - -jobs: - test: - name: Tests - runs-on: ${{ inputs.runs-on }} - steps: - - if: ${{ runner.os == 'Windows' }} - name: Setup Git - run: | - git config --global core.autocrlf false - git config --global core.eol lf - - name: Checkout - uses: actions/checkout@v4 - with: - sparse-checkout: | - package.json - bun.lockb - test - packages/bun-internal-test - packages/bun-types - - name: Setup Environment - shell: bash - run: | - echo "${{ inputs.pr-number }}" > pr-number.txt - - name: Download Bun - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }} - path: bun - github-token: ${{ github.token }} - run-id: ${{ inputs.run-id || github.run_id }} - - name: Download pnpm - uses: pnpm/action-setup@v4 - with: - version: 8 - - if: ${{ runner.os != 'Windows' }} - name: Setup Bun - shell: bash - run: | - unzip bun/bun-*.zip - cd bun-* - pwd >> $GITHUB_PATH - - if: ${{ runner.os == 'Windows' }} - name: Setup Cygwin - uses: secondlife/setup-cygwin@v3 - with: - packages: bash - - if: ${{ runner.os == 'Windows' }} - name: Setup Bun (Windows) - run: | - unzip bun/bun-*.zip - cd bun-* - pwd >> $env:GITHUB_PATH - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: 20 - - name: Install Dependencies - timeout-minutes: 5 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - bun install - - name: Install Dependencies (test) - timeout-minutes: 5 - run: | - bun install --cwd test - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Install Dependencies (runner) - timeout-minutes: 5 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - bun install --cwd packages/bun-internal-test - - name: Run Tests - id: test - timeout-minutes: 90 - shell: bash - env: - IS_BUN_CI: 1 - TMPDIR: ${{ runner.temp }} - BUN_TAG: ${{ inputs.tag }} - BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true" - SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }} - TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }} - TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }} - TEST_INFO_STRIPE: ${{ secrets.TEST_INFO_STRIPE }} - TEST_INFO_AZURE_SERVICE_BUS: ${{ secrets.TEST_INFO_AZURE_SERVICE_BUS }} - SHELLOPTS: igncr - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - node packages/bun-internal-test/src/runner.node.mjs $(which bun) - - if: ${{ always() }} - name: Upload Results - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-tests - path: | - test-report.* - comment.md - pr-number.txt - if-no-files-found: error - overwrite: true - - if: ${{ always() && steps.test.outputs.failing_tests != '' && github.event.pull_request && github.repository_owner == 'oven-sh' }} - name: Send Message - uses: sarisia/actions-status-discord@v1 - with: - webhook: ${{ secrets.DISCORD_WEBHOOK }} - nodetail: true - color: "#FF0000" - title: "" - description: | - ### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }}) - - @${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count || 'some' }} failing tests on bun-${{ inputs.tag }}. - - ${{ steps.test.outputs.failing_tests }} - - **[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})** - - name: Fail - if: ${{ failure() || always() && steps.test.outputs.failing_tests != '' }} - run: | - echo "There are ${{ steps.test.outputs.failing_tests_count || 'some' }} failing tests on bun-${{ inputs.tag }}." - exit 1 - test-node: - name: Node.js Tests - # TODO: enable when we start paying attention to the results. In the meantime, this causes CI to queue jobs wasting developer time. - if: 0 - runs-on: ${{ inputs.runs-on }} - steps: - - if: ${{ runner.os == 'Windows' }} - name: Setup Git - run: | - git config --global core.autocrlf false - git config --global core.eol lf - - name: Checkout - uses: actions/checkout@v4 - with: - sparse-checkout: | - test/node.js - - name: Setup Environment - shell: bash - run: | - echo "${{ inputs.pr-number }}" > pr-number.txt - - name: Download Bun - uses: actions/download-artifact@v4 - with: - name: bun-${{ inputs.tag }} - path: bun - github-token: ${{ github.token }} - run-id: ${{ inputs.run-id || github.run_id }} - - if: ${{ runner.os != 'Windows' }} - name: Setup Bun - shell: bash - run: | - unzip bun/bun-*.zip - cd bun-* - pwd >> $GITHUB_PATH - - if: ${{ runner.os == 'Windows' }} - name: Setup Cygwin - uses: secondlife/setup-cygwin@v3 - with: - packages: bash - - if: ${{ runner.os == 'Windows' }} - name: Setup Bun (Windows) - run: | - unzip bun/bun-*.zip - cd bun-* - pwd >> $env:GITHUB_PATH - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: 20 - - name: Checkout Tests - shell: bash - working-directory: test/node.js - run: | - node runner.mjs --pull - - name: Install Dependencies - timeout-minutes: 5 - shell: bash - working-directory: test/node.js - run: | - bun install - - name: Run Tests - timeout-minutes: 10 # Increase when more tests are added - shell: bash - working-directory: test/node.js - env: - TMPDIR: ${{ runner.temp }} - BUN_GARBAGE_COLLECTOR_LEVEL: "0" - BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true" - run: | - node runner.mjs - - name: Upload Results - uses: actions/upload-artifact@v4 - with: - name: bun-${{ inputs.tag }}-node-tests - path: | - test/node.js/summary/*.json - if-no-files-found: error - overwrite: true diff --git a/.github/workflows/upload.yml b/.github/workflows/upload.yml deleted file mode 100644 index d111d17f6bc88a..00000000000000 --- a/.github/workflows/upload.yml +++ /dev/null @@ -1,94 +0,0 @@ -name: Upload Artifacts -run-name: Canary release ${{github.sha}} upload - -permissions: - contents: write - -on: - workflow_run: - workflows: - - CI - types: - - completed - branches: - - main - -jobs: - upload: - if: ${{ github.repository_owner == 'oven-sh' }} - name: Upload Artifacts - runs-on: ubuntu-latest - steps: - - name: Download Artifacts - uses: actions/download-artifact@v4 - with: - path: bun - pattern: bun-* - merge-multiple: true - github-token: ${{ github.token }} - run-id: ${{ github.event.workflow_run.id }} - - name: Check for Artifacts - run: | - if [ ! -d "bun" ] || [ -z "$(ls -A bun)" ]; then - echo "Error: No artifacts were downloaded or 'bun' directory does not exist." - exit 1 # Fail the job if the condition is met - else - echo "Artifacts downloaded successfully." - fi - - name: Upload to GitHub Releases - uses: ncipollo/release-action@v1 - with: - tag: canary - name: Canary (${{ github.sha }}) - prerelease: true - body: This canary release of Bun corresponds to the commit [${{ github.sha }}] - allowUpdates: true - replacesArtifacts: true - generateReleaseNotes: true - artifactErrorsFailBuild: true - artifacts: bun/**/bun-*.zip - token: ${{ github.token }} - - name: Upload to S3 (using SHA) - uses: shallwefootball/s3-upload-action@4350529f410221787ccf424e50133cbc1b52704e - with: - endpoint: ${{ secrets.AWS_ENDPOINT }} - aws_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY}} - aws_bucket: ${{ secrets.AWS_BUCKET }} - source_dir: bun - destination_dir: releases/${{ github.event.workflow_run.head_sha || github.sha }}-canary - - name: Upload to S3 (using tag) - uses: shallwefootball/s3-upload-action@4350529f410221787ccf424e50133cbc1b52704e - with: - endpoint: ${{ secrets.AWS_ENDPOINT }} - aws_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY}} - aws_bucket: ${{ secrets.AWS_BUCKET }} - source_dir: bun - destination_dir: releases/canary - - name: Announce on Discord - uses: sarisia/actions-status-discord@v1 - with: - webhook: ${{ secrets.BUN_DISCORD_GITHUB_CHANNEL_WEBHOOK }} - nodetail: true - color: "#1F6FEB" - title: "New Bun Canary available" - url: https://github.com/oven-sh/bun/commit/${{ github.sha }} - description: | - A new canary build of Bun has been automatically uploaded. To upgrade, run: - ```sh - bun upgrade --canary - # bun upgrade --stable <- to downgrade - ``` - # If notifying sentry fails, don't fail the rest of the build. - - name: Notify Sentry - uses: getsentry/action-release@v1.7.0 - env: - SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} - SENTRY_ORG: ${{ secrets.SENTRY_ORG }} - SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }} - with: - ignore_missing: true - ignore_empty: true - version: ${{ github.event.workflow_run.head_sha || github.sha }}-canary - environment: canary diff --git a/scripts/all-dependencies.sh b/scripts/all-dependencies.sh index 50a22fe8f8aaef..627339a0775dde 100755 --- a/scripts/all-dependencies.sh +++ b/scripts/all-dependencies.sh @@ -36,9 +36,11 @@ fi dep() { local submodule="$1" local script="$2" - CACHE_KEY= if [ "$CACHE" == "1" ]; then - CACHE_KEY="$submodule/$(echo "$SUBMODULES" | grep "$submodule" | git hash-object --stdin)" + local hash="$(echo "$SUBMODULES" | grep "$submodule" | awk '{print $1}')" + local os="$(uname -s | tr '[:upper:]' '[:lower:]')" + local arch="$(uname -m)" + CACHE_KEY="$submodule/$hash-$os-$arch-$CPU_TARGET" fi if [ -z "$FORCE" ]; then HAS_ALL_DEPS=1 diff --git a/scripts/build-bun-cpp.ps1 b/scripts/build-bun-cpp.ps1 index ab79b7f1b374d1..adb1a57cf7e679 100755 --- a/scripts/build-bun-cpp.ps1 +++ b/scripts/build-bun-cpp.ps1 @@ -1,29 +1,29 @@ -param ( - [switch] $Baseline = $False, - [switch] $Fast = $False -) - $ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash -$Tag = If ($Baseline) { "-Baseline" } Else { "" } -$UseBaselineBuild = If ($Baseline) { "ON" } Else { "OFF" } -$UseLto = If ($Fast) { "OFF" } Else { "ON" } - -# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" } -$CANARY_REVISION = 0 -.\scripts\env.ps1 $Tag +.\scripts\env.ps1 .\scripts\update-submodules.ps1 .\scripts\build-libuv.ps1 -CloneOnly $True -cd build -cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release ` +# libdeflate.h is needed otherwise the build fails +git submodule update --init --recursive --progress --depth=1 --checkout src/deps/libdeflate + +cd build +cmake .. @CMAKE_FLAGS ` + -G Ninja ` + -DCMAKE_BUILD_TYPE=Release ` -DNO_CODEGEN=0 ` -DNO_CONFIGURE_DEPENDS=1 ` - "-DUSE_BASELINE_BUILD=${UseBaselineBuild}" ` - "-DUSE_LTO=${UseLto}" ` - "-DCANARY=${CANARY_REVISION}" ` - -DBUN_CPP_ONLY=1 $Flags + -DBUN_CPP_ONLY=1 if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" } -.\compile-cpp-only.ps1 -v -if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" } \ No newline at end of file +.\compile-cpp-only.ps1 -v -j $env:CPUS +if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" } + +# HACK: For some reason, the buildkite agent is hanging when uploading bun-cpp-objects.a +# Best guess is that there is an issue when uploading files larger than 500 MB +# +# For now, use FileSplitter to split the file into smaller chunks: +# https://www.powershellgallery.com/packages/FileSplitter/1.3 +if ($env:BUILDKITE) { + Split-File -Path (Resolve-Path "bun-cpp-objects.a") -PartSizeBytes "50MB" -Verbose +} diff --git a/scripts/build-bun-cpp.sh b/scripts/build-bun-cpp.sh deleted file mode 100755 index 631452d9427e6b..00000000000000 --- a/scripts/build-bun-cpp.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env bash -set -exo pipefail -source $(dirname -- "${BASH_SOURCE[0]}")/env.sh - -export USE_LTO="${USE_LTO:-ON}" -case "$(uname -m)" in - aarch64|arm64) - export CPU_TARGET="${CPU_TARGET:-native}" - ;; - *) - export CPU_TARGET="${CPU_TARGET:-haswell}" - ;; -esac - -while [[ $# -gt 0 ]]; do - case "$1" in - --fast|--no-lto) - export USE_LTO="OFF" - shift - ;; - --baseline) - export CPU_TARGET="nehalem" - shift - ;; - --cpu) - export CPU_TARGET="$2" - shift - shift - ;; - *|-*|--*) - echo "Unknown option $1" - exit 1 - ;; - esac -done - -mkdir -p build -cd build -mkdir -p tmp_modules tmp_functions js codegen -cmake .. \ - -GNinja \ - -DCMAKE_BUILD_TYPE=Release \ - -DUSE_LTO=${USE_LTO} \ - -DCPU_TARGET=${CPU_TARGET} \ - -DBUN_CPP_ONLY=1 \ - -DNO_CONFIGURE_DEPENDS=1 -chmod +x ./compile-cpp-only.sh -bash ./compile-cpp-only.sh -v diff --git a/scripts/build-bun-zig.sh b/scripts/build-bun-zig.sh deleted file mode 100755 index 489c635d12756c..00000000000000 --- a/scripts/build-bun-zig.sh +++ /dev/null @@ -1,95 +0,0 @@ -#!/usr/bin/env bash -set -exo pipefail -source $(dirname -- "${BASH_SOURCE[0]}")/env.sh - -cwd=$(pwd) -zig= - -if [[ "$CI" ]]; then - # Since the zig build depends on files from the zig submodule, - # make sure to update the submodule before building. - git submodule update --init --recursive --progress --depth=1 --checkout src/deps/zig - - # Also update the correct version of zig in the submodule. - $(dirname -- "${BASH_SOURCE[0]}")/download-zig.sh -fi - -if [ -f "$cwd/.cache/zig/zig" ]; then - zig="$cwd/.cache/zig/zig" -else - zig=$(which zig) -fi - -ZIG_OPTIMIZE="${ZIG_OPTIMIZE:-ReleaseFast}" -CANARY="${CANARY:-0}" -GIT_SHA="${GIT_SHA:-$(git rev-parse HEAD)}" - -BUILD_MACHINE_ARCH="${BUILD_MACHINE_ARCH:-$(uname -m)}" -DOCKER_MACHINE_ARCH="" -if [[ "$BUILD_MACHINE_ARCH" == "x86_64" || "$BUILD_MACHINE_ARCH" == "amd64" ]]; then - BUILD_MACHINE_ARCH="x86_64" - DOCKER_MACHINE_ARCH="amd64" -elif [[ "$BUILD_MACHINE_ARCH" == "aarch64" || "$BUILD_MACHINE_ARCH" == "arm64" ]]; then - BUILD_MACHINE_ARCH="aarch64" - DOCKER_MACHINE_ARCH="arm64" -fi - -TARGET_OS="${1:-linux}" -TARGET_ARCH="${2:-x64}" -TARGET_CPU="${3:-${CPU_TARGET:-native}}" - -BUILDARCH="" -if [[ "$TARGET_ARCH" == "x64" || "$TARGET_ARCH" == "x86_64" || "$TARGET_ARCH" == "amd64" ]]; then - TARGET_ARCH="x86_64" - BUILDARCH="amd64" -elif [[ "$TARGET_ARCH" == "aarch64" || "$TARGET_ARCH" == "arm64" ]]; then - TARGET_ARCH="aarch64" - BUILDARCH="arm64" -fi - -TRIPLET="" -if [[ "$TARGET_OS" == "linux" ]]; then - TRIPLET="$TARGET_ARCH-linux-gnu" -elif [[ "$TARGET_OS" == "darwin" ]]; then - TRIPLET="$TARGET_ARCH-macos-none" -elif [[ "$TARGET_OS" == "windows" ]]; then - TRIPLET="$TARGET_ARCH-windows-msvc" -fi - -echo "--- Building identifier-cache" -$zig run src/js_lexer/identifier_data.zig - -echo "--- Building node-fallbacks" -cd src/node-fallbacks -bun install --frozen-lockfile -bun run build -cd "$cwd" - -echo "--- Building codegen" -bun install --frozen-lockfile -make runtime_js fallback_decoder bun_error - -echo "--- Building modules" -mkdir -p build -bun run src/codegen/bundle-modules.ts --debug=OFF build - -echo "--- Building zig" -cd build -cmake .. \ - -GNinja \ - -DCMAKE_BUILD_TYPE=Release \ - -DUSE_LTO=ON \ - -DZIG_OPTIMIZE="${ZIG_OPTIMIZE}" \ - -DGIT_SHA="${GIT_SHA}" \ - -DARCH="${TARGET_ARCH}" \ - -DBUILDARCH="${BUILDARCH}" \ - -DCPU_TARGET="${TARGET_CPU}" \ - -DZIG_TARGET="${TRIPLET}" \ - -DASSERTIONS="OFF" \ - -DWEBKIT_DIR="omit" \ - -DNO_CONFIGURE_DEPENDS=1 \ - -DNO_CODEGEN=1 \ - -DBUN_ZIG_OBJ_DIR="$cwd/build" \ - -DCANARY="$CANARY" \ - -DZIG_LIB_DIR=src/deps/zig/lib -ONLY_ZIG=1 ninja "$cwd/build/bun-zig.o" -v diff --git a/scripts/build-tinycc.ps1 b/scripts/build-tinycc.ps1 index 78a10f42e81532..095fd97dd07f8f 100755 --- a/scripts/build-tinycc.ps1 +++ b/scripts/build-tinycc.ps1 @@ -20,8 +20,6 @@ try { Run clang-cl -DTCC_TARGET_PE -DTCC_TARGET_X86_64 config.h -DC2STR -o c2str.exe conftest.c Run .\c2str.exe .\include\tccdefs.h tccdefs_.h - $Baseline = $env:BUN_DEV_ENV_SET -eq "Baseline=True" - Run clang-cl @($env:CFLAGS -split ' ') libtcc.c -o tcc.obj "-DTCC_TARGET_PE" "-DTCC_TARGET_X86_64" "-O2" "-W2" "-Zi" "-MD" "-GS-" "-c" "-MT" Run llvm-lib "tcc.obj" "-OUT:tcc.lib" diff --git a/scripts/buildkite-link-bun.ps1 b/scripts/buildkite-link-bun.ps1 index b56e0eefc17c02..caa7d98ce72b77 100755 --- a/scripts/buildkite-link-bun.ps1 +++ b/scripts/buildkite-link-bun.ps1 @@ -1,17 +1,13 @@ -param ( - [switch] $Baseline = $False, - [switch] $Fast = $False +param( + [switch]$Baseline = $false ) $ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash $Target = If ($Baseline) { "windows-x64-baseline" } Else { "windows-x64" } $Tag = "bun-$Target" -$TagSuffix = If ($Baseline) { "-Baseline" } Else { "" } -$UseBaselineBuild = If ($Baseline) { "ON" } Else { "OFF" } -$UseLto = If ($Fast) { "OFF" } Else { "ON" } -.\scripts\env.ps1 $TagSuffix +.\scripts\env.ps1 mkdir -Force build buildkite-agent artifact download "**" build --step "${Target}-build-zig" @@ -21,29 +17,24 @@ mv -Force -ErrorAction SilentlyContinue build\build\bun-deps\* build\bun-deps mv -Force -ErrorAction SilentlyContinue build\build\* build Set-Location build -$CANARY_REVISION = 0 -cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release ` + +# HACK: See scripts/build-bun-cpp.ps1 +Join-File -Path "$(Resolve-Path .)\bun-cpp-objects.a" -Verbose -DeletePartFiles + +cmake .. @CMAKE_FLAGS ` + -G Ninja ` + -DCMAKE_BUILD_TYPE=Release ` -DNO_CODEGEN=1 ` -DNO_CONFIGURE_DEPENDS=1 ` - "-DCPU_TARGET=${CPU_TARGET}" ` - "-DCANARY=${CANARY_REVISION}" ` -DBUN_LINK_ONLY=1 ` - "-DUSE_BASELINE_BUILD=${UseBaselineBuild}" ` - "-DUSE_LTO=${UseLto}" ` "-DBUN_DEPS_OUT_DIR=$(Resolve-Path bun-deps)" ` "-DBUN_CPP_ARCHIVE=$(Resolve-Path bun-cpp-objects.a)" ` - "-DBUN_ZIG_OBJ_DIR=$(Resolve-Path .)" ` - "$Flags" + "-DBUN_ZIG_OBJ_DIR=$(Resolve-Path .)" if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" } -ninja -v +ninja -v -j $env:CPUS if ($LASTEXITCODE -ne 0) { throw "Link failed!" } -ls -if ($Fast) { - $Tag = "$Tag-nolto" -} - Set-Location .. $Dist = mkdir -Force "${Tag}" cp -r build\bun.exe "$Dist\bun.exe" diff --git a/scripts/buildkite-link-bun.sh b/scripts/buildkite-link-bun.sh deleted file mode 100755 index d0456e25ff6c48..00000000000000 --- a/scripts/buildkite-link-bun.sh +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env bash -set -exo pipefail -source $(dirname -- "${BASH_SOURCE[0]}")/env.sh - -export USE_LTO="${USE_LTO:-ON}" -case "$(uname -m)" in - aarch64|arm64) - export CPU_TARGET="${CPU_TARGET:-native}" - ;; - *) - export CPU_TARGET="${CPU_TARGET:-haswell}" - ;; -esac - -export TAG="" -while [[ $# -gt 0 ]]; do - case "$1" in - --tag) - export TAG="$2" - shift - shift - ;; - --fast|--no-lto) - export USE_LTO="OFF" - shift - ;; - --baseline) - export CPU_TARGET="nehalem" - shift - ;; - --cpu) - export CPU_TARGET="$2" - shift - shift - ;; - *|-*|--*) - echo "Unknown option $1" - exit 1 - ;; - esac -done - -if [[ -z "$TAG" ]]; then - echo "--tag is required" - exit 1 -fi - -rm -rf release -mkdir -p release -buildkite-agent artifact download '**' release --step $TAG-build-deps -buildkite-agent artifact download '**' release --step $TAG-build-zig -buildkite-agent artifact download '**' release --step $TAG-build-cpp - -cd release -cmake .. \ - -GNinja \ - -DCMAKE_BUILD_TYPE=Release \ - -DCPU_TARGET=${CPU_TARGET} \ - -DUSE_LTO=${USE_LTO} \ - -DBUN_LINK_ONLY=1 \ - -DBUN_ZIG_OBJ_DIR="$(pwd)/build" \ - -DBUN_CPP_ARCHIVE="$(pwd)/build/bun-cpp-objects.a" \ - -DBUN_DEPS_OUT_DIR="$(pwd)/build/bun-deps" \ - -DNO_CONFIGURE_DEPENDS=1 -ninja -v - -if [[ "${USE_LTO}" == "OFF" ]]; then - TAG="${TAG}-nolto" -fi - -chmod +x bun-profile bun -mkdir -p bun-$TAG-profile/ bun-$TAG/ -mv bun-profile bun-$TAG-profile/bun-profile -mv bun bun-$TAG/bun -zip -r bun-$TAG-profile.zip bun-$TAG-profile -zip -r bun-$TAG.zip bun-$TAG - -cd .. -mv release/bun-$TAG.zip bun-$TAG.zip -mv release/bun-$TAG-profile.zip bun-$TAG-profile.zip diff --git a/scripts/env.ps1 b/scripts/env.ps1 index e9492abee448e6..02c63a68f33587 100755 --- a/scripts/env.ps1 +++ b/scripts/env.ps1 @@ -1,11 +1,3 @@ -param( - [switch]$Baseline = $false -) - -if ($ENV:BUN_DEV_ENV_SET -eq "Baseline=True") { - $Baseline = $true -} - $ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash # this is the environment script for building bun's dependencies @@ -38,13 +30,19 @@ if($Env:VSCMD_ARG_TGT_ARCH -eq "x86") { throw "Visual Studio environment is targetting 32 bit. This configuration is definetly a mistake." } -$ENV:BUN_DEV_ENV_SET = "Baseline=$Baseline"; - $BUN_BASE_DIR = if ($env:BUN_BASE_DIR) { $env:BUN_BASE_DIR } else { Join-Path $ScriptDir '..' } $BUN_DEPS_DIR = if ($env:BUN_DEPS_DIR) { $env:BUN_DEPS_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' } $BUN_DEPS_OUT_DIR = if ($env:BUN_DEPS_OUT_DIR) { $env:BUN_DEPS_OUT_DIR } else { Join-Path $BUN_BASE_DIR 'build\bun-deps' } $CPUS = if ($env:CPUS) { $env:CPUS } else { (Get-CimInstance -Class Win32_Processor).NumberOfCores } +$Lto = if ($env:USE_LTO) { $env:USE_LTO -eq "1" } else { True } +$Baseline = if ($env:USE_BASELINE_BUILD) { + $env:USE_BASELINE_BUILD -eq "1" +} elseif ($env:BUILDKITE_STEP_KEY -match "baseline") { + True +} else { + False +} $CC = "clang-cl" $CXX = "clang-cl" @@ -52,7 +50,7 @@ $CXX = "clang-cl" $CFLAGS = '/O2 /Z7 /MT /O2 /Ob2 /DNDEBUG /U_DLL' $CXXFLAGS = '/O2 /Z7 /MT /O2 /Ob2 /DNDEBUG /U_DLL' -if ($env:USE_LTO -eq "1") { +if ($Lto) { $CXXFLAGS += " -fuse-ld=lld -flto -Xclang -emit-llvm-bc" $CFLAGS += " -fuse-ld=lld -flto -Xclang -emit-llvm-bc" } @@ -63,6 +61,14 @@ $env:CPU_TARGET = $CPU_NAME $CFLAGS += " -march=${CPU_NAME}" $CXXFLAGS += " -march=${CPU_NAME}" +$Canary = If ($env:CANARY) { + $env:CANARY +} ElseIf ($env:BUILDKITE -eq "true") { + (buildkite-agent meta-data get canary) +} Else { + "1" +} + $CMAKE_FLAGS = @( "-GNinja", "-DCMAKE_BUILD_TYPE=Release", @@ -72,7 +78,8 @@ $CMAKE_FLAGS = @( "-DCMAKE_CXX_FLAGS=$CXXFLAGS", "-DCMAKE_C_FLAGS_RELEASE=$CFLAGS", "-DCMAKE_CXX_FLAGS_RELEASE=$CXXFLAGS", - "-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded" + "-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded", + "-DCANARY=$Canary" ) if (Get-Command llvm-lib -ErrorAction SilentlyContinue) { @@ -92,6 +99,10 @@ if ($Baseline) { $CMAKE_FLAGS += "-DUSE_BASELINE_BUILD=ON" } +if ($Lto) { + $CMAKE_FLAGS += "-DUSE_LTO=ON" +} + if (Get-Command sccache -ErrorAction SilentlyContinue) { # Continue with local compiler if sccache has an error $env:SCCACHE_IGNORE_SERVER_IO_ERROR = "1" diff --git a/scripts/env.sh b/scripts/env.sh index 6ff0e225afafc6..617c1c75ed38dd 100755 --- a/scripts/env.sh +++ b/scripts/env.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -# Hack for Buildkite sometimes not having the right path +# Hack for buildkite sometimes not having the right path if [[ "${CI:-}" == "1" || "${CI:-}" == "true" ]]; then if [ -f ~/.bashrc ]; then source ~/.bashrc diff --git a/scripts/experimental-build.mjs b/scripts/experimental-build.mjs index 33b26ab02fe72d..0dcc53138ccc5a 100755 --- a/scripts/experimental-build.mjs +++ b/scripts/experimental-build.mjs @@ -6,16 +6,162 @@ import { copyFileSync, existsSync, mkdirSync, mkdtempSync, readFileSync, readdir import { basename, dirname, join } from "node:path"; import { tmpdir } from "node:os"; -const projectPath = dirname(import.meta.dirname); -const vendorPath = process.env.BUN_VENDOR_PATH || join(projectPath, "vendor"); - const isWindows = process.platform === "win32"; const isMacOS = process.platform === "darwin"; const isLinux = process.platform === "linux"; +const cwd = dirname(import.meta.dirname); const spawnSyncTimeout = 1000 * 60; const spawnTimeout = 1000 * 60 * 3; +/** + * @typedef {Object} S3UploadOptions + * @property {string} [bucket] + * @property {string} filename + * @property {string} content + * @property {Record} [headers] + */ + +/** + * @param {S3UploadOptions} options + */ +async function uploadFileToS3(options) { + const { AwsV4Signer } = await import("aws4fetch"); + + const { bucket, filename, content, ...extra } = options; + const baseUrl = getEnv(["S3_ENDPOINT", "S3_BASE_URL", "AWS_ENDPOINT"], "https://s3.amazonaws.com"); + const bucketUrl = new URL(bucket || getEnv(["S3_BUCKET", "AWS_BUCKET"]), baseUrl); + + const signer = new AwsV4Signer({ + accessKeyId: getSecret(["S3_ACCESS_KEY_ID", "AWS_ACCESS_KEY_ID"]), + secretAccessKey: getSecret(["S3_SECRET_ACCESS_KEY", "AWS_SECRET_ACCESS_KEY"]), + url: new URL(filename, bucketUrl), + method: "PUT", + body: content, + ...extra, + }); + + const { url, method, headers, body } = signer.sign(); + await fetchSafe(url, { + method, + headers, + body, + }); + + console.log("Uploaded file to S3:", { + url: `${bucketUrl}`, + filename, + }); +} + +/** + * @typedef {Object} SentryRelease + * @property {string} organizationId + * @property {string} projectId + * @property {string} version + * @property {string} [url] + * @property {string} [ref] + * @property {string} [dateReleased] + */ + +/** + * @param {SentryRelease} options + * @returns {Promise} + */ +async function createSentryRelease(options) { + const { organizationId, projectId, ...body } = options; + + const baseUrl = getEnv("SENTRY_BASE_URL", "https://sentry.io"); + const url = new URL(`api/0/organizations/${organizationId}/releases`, baseUrl); + const accessToken = getSecret(["SENTRY_AUTH_TOKEN", "SENTRY_TOKEN"]); + + const release = await fetchSafe(url, { + method: "POST", + headers: { + "Authorization": `Bearer ${accessToken}`, + "Content-Type": "application/json", + }, + body: JSON.stringify(body), + format: "json", + }); + + console.log("Created Sentry release:", release); +} + +/** + * @return {string} + */ +function getGithubToken() { + const token = getEnv("GITHUB_TOKEN", null); + if (token) { + return token; + } + + const gh = which("gh"); + if (gh) { + const { exitCode, stdout } = spawnSyncSafe(gh, ["auth", "token"]); + if (exitCode === 0) { + return stdout.trim(); + } + } + + throw new Error("Failed to get GitHub token (set GITHUB_TOKEN or run `gh auth login`)"); +} + +/** + * @param {string | string[]} name + * @return {string} + */ +function getSecret(name) { + return getEnv(name); +} + +/** + * @param {string | string[]} name + * @param {string | null} [defaultValue] + * @returns {string | undefined} + */ +function getEnv(name, defaultValue) { + let result = defaultValue; + + for (const key of typeof name === "string" ? [name] : name) { + const value = process.env[key]; + if (value) { + result = value; + break; + } + } + + if (result || result === null) { + return result; + } + + throw new Error(`Environment variable is required: ${name}`); +} + +/** + * @typedef {Object} SpawnOptions + * @property {boolean} [throwOnError] + * @property {string} [cwd] + * @property {string} [env] + * @property {string} [encoding] + * @property {number} [timeout] + */ + +/** + * @typedef {Object} SpawnResult + * @property {number | null} exitCode + * @property {number | null} signalCode + * @property {string} stdout + * @property {string} stderr + */ + +/** + * @param {string} command + * @param {string[]} [args] + * @param {SpawnOptions} [options] + * @returns {Promise} + */ async function spawnSafe(command, args, options = {}) { const result = new Promise((resolve, reject) => { let stdout = ""; @@ -60,6 +206,12 @@ async function spawnSafe(command, args, options = {}) { } } +/** + * @param {string} command + * @param {string[]} [args] + * @param {SpawnOptions} [options] + * @returns {SpawnResult} + */ function spawnSyncSafe(command, args, options = {}) { try { const { error, status, signal, stdout, stderr } = spawnSync(command, args, { @@ -86,6 +238,20 @@ function spawnSyncSafe(command, args, options = {}) { } } +/** + * @typedef {Object} FetchOptions + * @property {string} [method] + * @property {Record} [headers] + * @property {string | Uint8Array} [body] + * @property {"json" | "text" | "bytes"} [format] + * @property {boolean} [throwOnError] + */ + +/** + * @param {string | URL} url + * @param {FetchOptions} [options] + * @returns {Promise} + */ async function fetchSafe(url, options = {}) { let response; try { @@ -138,47 +304,6 @@ function which(command, path) { return result.trimEnd(); } -function getZigTarget(os = process.platform, arch = process.arch) { - if (arch === "x64") { - if (os === "linux") return "linux-x86_64"; - if (os === "darwin") return "macos-x86_64"; - if (os === "win32") return "windows-x86_64"; - } - if (arch === "arm64") { - if (os === "linux") return "linux-aarch64"; - if (os === "darwin") return "macos-aarch64"; - } - throw new Error(`Unsupported zig target: os=${os}, arch=${arch}`); -} - -function getRecommendedZigVersion() { - const scriptPath = join(projectPath, "build.zig"); - try { - const scriptContent = readFileSync(scriptPath, "utf-8"); - const match = scriptContent.match(/recommended_zig_version = "([^"]+)"/); - if (!match) { - throw new Error("File does not contain string: 'recommended_zig_version'"); - } - return match[1]; - } catch (cause) { - throw new Error("Failed to find recommended Zig version", { cause }); - } -} - -/** - * @returns {Promise} - */ -async function getLatestZigVersion() { - try { - const response = await fetchSafe("https://ziglang.org/download/index.json", { format: "json" }); - const { master } = response; - const { version } = master; - return version; - } catch (cause) { - throw new Error("Failed to get latest Zig version", { cause }); - } -} - /** * @param {string} execPath * @returns {string | undefined} @@ -191,110 +316,3 @@ function getVersion(execPath) { } return result.trim(); } - -/** - * @returns {string} - */ -function getTmpdir() { - if (isMacOS && existsSync("/tmp")) { - return "/tmp"; - } - return tmpdir(); -} - -/** - * @returns {string} - */ -function mkTmpdir() { - return mkdtempSync(join(getTmpdir(), "bun-")); -} - -/** - * @param {string} url - * @param {string} [path] - * @returns {Promise} - */ -async function downloadFile(url, path) { - const outPath = path || join(mkTmpdir(), basename(url)); - const bytes = await fetchSafe(url, { format: "bytes" }); - mkdirSync(dirname(outPath), { recursive: true }); - writeFileSync(outPath, bytes); - return outPath; -} - -/** - * @param {string} tarPath - * @param {string} [path] - * @returns {Promise} - */ -async function extractFile(tarPath, path) { - const outPath = path || join(mkTmpdir(), basename(tarPath)); - mkdirSync(outPath, { recursive: true }); - await spawnSafe("tar", ["-xf", tarPath, "-C", outPath, "--strip-components=1"]); - return outPath; -} - -const dependencies = [ - { - name: "zig", - version: getRecommendedZigVersion(), - download: downloadZig, - }, -]; - -async function getDependencyPath(name) { - let dependency; - for (const entry of dependencies) { - if (name === entry.name) { - dependency = entry; - break; - } - } - if (!dependency) { - throw new Error(`Unknown dependency: ${name}`); - } - const { version, download } = dependency; - mkdirSync(vendorPath, { recursive: true }); - for (const path of readdirSync(vendorPath)) { - if (!path.startsWith(name)) { - continue; - } - const dependencyPath = join(vendorPath, path); - const dependencyVersion = getVersion(dependencyPath); - if (dependencyVersion === version) { - return dependencyPath; - } - } - if (!download) { - throw new Error(`Dependency not found: ${name}`); - } - return await download(version); -} - -/** - * @param {string} [version] - */ -async function downloadZig(version) { - const target = getZigTarget(); - const expectedVersion = version || getRecommendedZigVersion(); - const url = `https://ziglang.org/builds/zig-${target}-${expectedVersion}.tar.xz`; - const tarPath = await downloadFile(url); - const extractedPath = await extractFile(tarPath); - const zigPath = join(extractedPath, exePath("zig")); - const actualVersion = getVersion(zigPath); - const outPath = join(vendorPath, exePath(`zig-${actualVersion}`)); - mkdirSync(dirname(outPath), { recursive: true }); - copyFileSync(zigPath, outPath); - return outPath; -} - -/** - * @param {string} path - * @returns {string} - */ -function exePath(path) { - return isWindows ? `${path}.exe` : path; -} - -const execPath = await getDependencyPath("zig"); -console.log(execPath); diff --git a/scripts/runner.node.mjs b/scripts/runner.node.mjs index 7d50b148cb10bf..2523da8586ef01 100755 --- a/scripts/runner.node.mjs +++ b/scripts/runner.node.mjs @@ -26,7 +26,7 @@ import { normalize as normalizeWindows } from "node:path/win32"; import { isIP } from "node:net"; import { parseArgs } from "node:util"; -const spawnTimeout = 30_000; +const spawnTimeout = 5_000; const testTimeout = 3 * 60_000; const integrationTimeout = 5 * 60_000; @@ -231,18 +231,20 @@ async function runTests() { */ /** - * @param {SpawnOptions} request + * @param {SpawnOptions} options * @returns {Promise} */ -async function spawnSafe({ - command, - args, - cwd, - env, - timeout = spawnTimeout, - stdout = process.stdout.write.bind(process.stdout), - stderr = process.stderr.write.bind(process.stderr), -}) { +async function spawnSafe(options) { + const { + command, + args, + cwd, + env, + timeout = spawnTimeout, + stdout = process.stdout.write.bind(process.stdout), + stderr = process.stderr.write.bind(process.stderr), + retries = 0, + } = options; let exitCode; let signalCode; let spawnError; @@ -318,6 +320,16 @@ async function spawnSafe({ resolve(); } }); + if (spawnError && retries < 5) { + const { code } = spawnError; + if (code === "EBUSY" || code === "UNKNOWN") { + await new Promise(resolve => setTimeout(resolve, 1000 * (retries + 1))); + return spawnSafe({ + ...options, + retries: retries + 1, + }); + } + } let error; if (exitCode === 0) { // ... @@ -1332,7 +1344,7 @@ function formatTestToMarkdown(result, concise) { let markdown = ""; for (const { testPath, ok, tests, error, stdoutPreview: stdout } of results) { - if (ok) { + if (ok || error === "SIGTERM") { continue; } diff --git a/test/cli/run/run-crash-handler.test.ts b/test/cli/run/run-crash-handler.test.ts index 0769129f8ac70f..4923c6f06d4d0c 100644 --- a/test/cli/run/run-crash-handler.test.ts +++ b/test/cli/run/run-crash-handler.test.ts @@ -13,13 +13,11 @@ test.if(process.platform === "darwin")("macOS has the assumed image offset", () test("raise ignoring panic handler does not trigger the panic handler", async () => { let sent = false; - let onresolve = Promise.withResolvers(); using server = Bun.serve({ port: 0, fetch(request, server) { sent = true; - onresolve.resolve(); return new Response("OK"); }, }); @@ -34,11 +32,8 @@ test("raise ignoring panic handler does not trigger the panic handler", async () }, ]), }); - await proc.exited; - await Promise.race([onresolve.promise, Bun.sleep(1000)]); - - expect(proc.exitCode).not.toBe(0); + expect(proc.exited).resolves.not.toBe(0); expect(sent).toBe(false); }); @@ -46,7 +41,6 @@ describe("automatic crash reporter", () => { for (const approach of ["panic", "segfault", "outOfMemory"]) { test(`${approach} should report`, async () => { let sent = false; - let onresolve = Promise.withResolvers(); // Self host the crash report backend. using server = Bun.serve({ @@ -54,7 +48,6 @@ describe("automatic crash reporter", () => { fetch(request, server) { expect(request.url).toEndWith("/ack"); sent = true; - onresolve.resolve(); return new Response("OK"); }, }); @@ -72,15 +65,12 @@ describe("automatic crash reporter", () => { ]), stdio: ["ignore", "pipe", "pipe"], }); - await proc.exited; - - await Promise.race([onresolve.promise, Bun.sleep(1000)]); - + const exitCode = await proc.exited; const stderr = await Bun.readableStreamToText(proc.stderr); console.log(stderr); - expect(proc.exitCode).not.toBe(0); + expect(exitCode).not.toBe(0); expect(stderr).toContain(server.url.toString()); if (approach !== "outOfMemory") { expect(stderr).toContain("oh no: Bun has crashed. This indicates a bug in Bun, not your code"); diff --git a/test/js/bun/dns/resolve-dns.test.ts b/test/js/bun/dns/resolve-dns.test.ts index b237d43a3fe2f1..747a2aa3a2702e 100644 --- a/test/js/bun/dns/resolve-dns.test.ts +++ b/test/js/bun/dns/resolve-dns.test.ts @@ -107,7 +107,7 @@ describe("dns", () => { test.each(malformedHostnames)("'%s'", hostname => { // @ts-expect-error expect(dns.lookup(hostname, { backend })).rejects.toMatchObject({ - code: "DNS_ENOTFOUND", + code: expect.stringMatching(/^DNS_ENOTFOUND|DNS_ESERVFAIL|DNS_ENOTIMP$/), name: "DNSException", }); }); From 2ce1fc52be5f072eae403401a445fae13bd78224 Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Mon, 29 Jul 2024 14:50:26 -0700 Subject: [PATCH 38/46] Allow creating release builds with 'RELEASE=1' --- .buildkite/scripts/prepare-build.sh | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/.buildkite/scripts/prepare-build.sh b/.buildkite/scripts/prepare-build.sh index faaa046811f47d..1c245d961893fc 100755 --- a/.buildkite/scripts/prepare-build.sh +++ b/.buildkite/scripts/prepare-build.sh @@ -48,6 +48,12 @@ function assert_command() { fi } +function assert_release() { + if [ "$RELEASE" == "1" ]; then + run_command buildkite-agent meta-data set canary "0" + fi +} + function assert_canary() { local canary="$(buildkite-agent meta-data get canary 2>/dev/null)" if [ -z "$canary" ]; then @@ -63,8 +69,8 @@ function assert_canary() { canary="$revision" fi fi + run_command buildkite-agent meta-data set canary "$canary" fi - run_command buildkite-agent meta-data set canary "$canary" } function upload_buildkite_pipeline() { @@ -86,5 +92,6 @@ assert_build assert_buildkite_agent assert_jq assert_curl +assert_release assert_canary upload_buildkite_pipeline ".buildkite/ci.yml" From cc5f659fbeb38a0fe55759a7fdeb0402c3d476b7 Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Mon, 29 Jul 2024 14:54:29 -0700 Subject: [PATCH 39/46] Only upload canary artifacts when the build is canary --- .buildkite/scripts/upload-release.sh | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.buildkite/scripts/upload-release.sh b/.buildkite/scripts/upload-release.sh index b3d4c0a415975b..def9394ee17565 100755 --- a/.buildkite/scripts/upload-release.sh +++ b/.buildkite/scripts/upload-release.sh @@ -181,4 +181,13 @@ function create_release() { create_sentry_release "$tag" } +function assert_canary() { + local canary="$(buildkite-agent meta-data get canary 2>/dev/null)" + if [ -z "$canary" ] || [ "$canary" == "0" ]; then + echo "warn: Skipping release because this is not a canary build" + exit 0 + fi +} + +assert_canary create_release "canary" From e1c76d8a8fc1ff866ac3ddd9c2528748c48aed9c Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Mon, 29 Jul 2024 15:29:46 -0700 Subject: [PATCH 40/46] Fix release script --- .buildkite/scripts/upload-release.sh | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.buildkite/scripts/upload-release.sh b/.buildkite/scripts/upload-release.sh index def9394ee17565..5627b53430a3ff 100755 --- a/.buildkite/scripts/upload-release.sh +++ b/.buildkite/scripts/upload-release.sh @@ -171,13 +171,10 @@ function create_release() { bun-windows-x64-baseline-profile.zip ) - for artifact in "${artifacts[@]}"; do - download_buildkite_artifact "$artifact" - done - - upload_github_assets "$tag" "${artifacts[@]}" + download_buildkite_artifacts "." "${artifacts[@]}" upload_s3_files "releases/$BUILDKITE_COMMIT" "${artifacts[@]}" upload_s3_files "releases/$tag" "${artifacts[@]}" + upload_github_assets "$tag" "${artifacts[@]}" create_sentry_release "$tag" } From e867bdca8edd572ead2c91b3e32961835f0067e8 Mon Sep 17 00:00:00 2001 From: Dave Caruso Date: Mon, 29 Jul 2024 16:58:55 -0700 Subject: [PATCH 41/46] fix edge cases --- src/bundler.zig | 1 + src/bundler/bundle_v2.zig | 6 +++ src/js_parser.zig | 67 +++++++++++++-------------- src/js_printer.zig | 18 ++++++- test/bundler/bundler_edgecase.test.ts | 25 ++++++++++ test/cli/run/run-importmetamain.ts | 24 ++++++++++ 6 files changed, 104 insertions(+), 37 deletions(-) create mode 100644 test/cli/run/run-importmetamain.ts diff --git a/src/bundler.zig b/src/bundler.zig index 3f7de364e37c23..d2cea8f179d165 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -1199,6 +1199,7 @@ pub const Bundler = struct { .inline_require_and_import_errors = false, .import_meta_ref = ast.import_meta_ref, .runtime_transpiler_cache = runtime_transpiler_cache, + .target = bundler.options.target, }, enable_source_map, ), diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index f91d179556f2a0..1c80a95603cd05 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -765,6 +765,7 @@ pub const BundleV2 = struct { generator.linker.options.source_maps = bundler.options.source_map; generator.linker.options.tree_shaking = bundler.options.tree_shaking; generator.linker.options.public_path = bundler.options.public_path; + generator.linker.options.target = bundler.options.target; var pool = try generator.graph.allocator.create(ThreadPool); if (enable_reloading) { @@ -2883,6 +2884,8 @@ pub const ParseTask = struct { // in which we inline `true`. if (bundler.options.inline_entrypoint_import_meta_main or !task.is_entry_point) { opts.import_meta_main_value = task.is_entry_point; + } else if (bundler.options.target == .node) { + opts.lower_import_meta_main_for_node_js = true; } opts.tree_shaking = if (source.index.isRuntime()) true else bundler.options.tree_shaking; @@ -3866,6 +3869,7 @@ pub const LinkerContext = struct { minify_syntax: bool = false, minify_identifiers: bool = false, source_maps: options.SourceMapOption = .none, + target: options.Target = .browser, mode: Mode = Mode.bundle, @@ -6814,6 +6818,7 @@ pub const LinkerContext = struct { .minify_whitespace = c.options.minify_whitespace, .minify_identifiers = c.options.minify_identifiers, .minify_syntax = c.options.minify_syntax, + .target = c.options.target, // .const_values = c.graph.const_values, }; @@ -9037,6 +9042,7 @@ pub const LinkerContext = struct { c, ), .line_offset_tables = c.graph.files.items(.line_offset_table)[part_range.source_index.get()], + .target = c.options.target, }; writer.buffer.reset(); diff --git a/src/js_parser.zig b/src/js_parser.zig index 70a5402d0d8e53..1ae073b86f1714 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -3094,6 +3094,7 @@ pub const Parser = struct { /// Used for inlining the state of import.meta.main during visiting import_meta_main_value: ?bool = null, + lower_import_meta_main_for_node_js: bool = false, pub fn hashForRuntimeTranspiler(this: *const Options, hasher: *std.hash.Wyhash, did_use_jsx: bool) void { bun.assert(!this.bundle); @@ -7528,13 +7529,7 @@ fn NewParser_( if (equality.is_require_main_and_module) { p.ignoreUsageOfRuntimeRequire(); p.ignoreUsage(p.module_ref); - return .{ - .loc = v.loc, - .data = if (p.options.import_meta_main_value) |known| - .{ .e_boolean = .{ .value = known } } - else - .{ .e_import_meta_main = .{} }, - }; + return p.valueForImportMetaMain(false, v.loc); } return p.newExpr( @@ -7563,13 +7558,7 @@ fn NewParser_( if (equality.is_require_main_and_module) { p.ignoreUsage(p.module_ref); p.ignoreUsageOfRuntimeRequire(); - return .{ - .loc = v.loc, - .data = if (p.options.import_meta_main_value) |known| - .{ .e_boolean = .{ .value = known } } - else - .{ .e_import_meta_main = .{} }, - }; + return p.valueForImportMetaMain(false, v.loc); } return p.newExpr(E.Boolean{ .value = equality.equal }, v.loc); @@ -7585,13 +7574,7 @@ fn NewParser_( if (equality.is_require_main_and_module) { p.ignoreUsage(p.module_ref); p.ignoreUsageOfRuntimeRequire(); - return .{ - .loc = v.loc, - .data = if (p.options.import_meta_main_value) |known| - .{ .e_boolean = .{ .value = !known } } - else - .{ .e_import_meta_main = .{ .inverted = true } }, - }; + return p.valueForImportMetaMain(true, v.loc); } return p.newExpr(E.Boolean{ .value = !equality.equal }, v.loc); @@ -7611,13 +7594,7 @@ fn NewParser_( if (equality.is_require_main_and_module) { p.ignoreUsage(p.module_ref); p.ignoreUsageOfRuntimeRequire(); - return .{ - .loc = v.loc, - .data = if (p.options.import_meta_main_value) |known| - .{ .e_boolean = .{ .value = !known } } - else - .{ .e_import_meta_main = .{ .inverted = true } }, - }; + return p.valueForImportMetaMain(true, v.loc); } return p.newExpr(E.Boolean{ .value = !equality.equal }, v.loc); @@ -17937,6 +17914,32 @@ fn NewParser_( }; } + inline fn valueForImportMetaMain(p: *P, inverted: bool, loc: logger.Loc) Expr { + if (p.options.import_meta_main_value) |known| { + return .{ .loc = loc, .data = .{ .e_boolean = .{ .value = if (inverted) !known else known } } }; + } else { + // Node.js does not have import.meta.main, so we end up lowering + // this to `require.main === module`, but with the ESM format, + // both `require` and `module` are not present, so the code + // generation we need is: + // + // import { createRequire } from "node:module"; + // var __require = createRequire(import.meta.url); + // var import_meta_main = __require.main === __require.module; + // + // The printer can handle this for us, but we need to reference + // a handle to the `__require` function. + if (p.options.lower_import_meta_main_for_node_js) { + p.recordUsageOfRuntimeRequire(); + } + + return .{ + .loc = loc, + .data = .{ .e_import_meta_main = .{ .inverted = inverted } }, + }; + } + } + fn visitArgs(p: *P, args: []G.Arg, opts: VisitArgsOpts) void { const strict_loc = fnBodyContainsUseStrict(opts.body); const has_simple_args = isSimpleParameterList(args, opts.has_rest_arg); @@ -19008,13 +19011,7 @@ fn NewParser_( } if (strings.eqlComptime(name, "main")) { - return .{ - .loc = target.loc, - .data = if (p.options.import_meta_main_value) |known| - .{ .e_boolean = .{ .value = known } } - else - .{ .e_import_meta_main = .{} }, - }; + return p.valueForImportMetaMain(false, target.loc); } }, .e_require_call_target => { diff --git a/src/js_printer.zig b/src/js_printer.zig index 1458dafc964ed5..7d5e8a2bcd036d 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -526,6 +526,7 @@ pub const Options = struct { source_map_handler: ?SourceMapHandler = null, source_map_builder: ?*bun.sourcemap.Chunk.Builder = null, css_import_behavior: Api.CssInJsBehavior = Api.CssInJsBehavior.facade, + target: options.Target = .browser, runtime_transpiler_cache: ?*bun.JSC.RuntimeTranspilerCache = null, @@ -2331,7 +2332,8 @@ fn NewPrinter( } }, .e_import_meta_main => |data| { - if (p.options.module_type == .esm) { + if (p.options.module_type == .esm and p.options.target != .node) { + // Node.js doesn't support import.meta.main // Most of the time, leave it in there if (data.inverted) { p.addSourceMapping(expr.loc); @@ -2355,7 +2357,19 @@ fn NewPrinter( else p.printWhitespacer(ws(".main == ")); - p.printSymbol(p.options.commonjs_module_ref); + if (p.options.target == .node) { + // "__require.module" + if (p.options.require_ref) |require| + p.printSymbol(require) + else + p.print("require"); + + p.print(".module"); + } else if (p.options.commonjs_module_ref.isValid()) { + p.printSymbol(p.options.commonjs_module_ref); + } else { + p.print(""); + } } }, .e_commonjs_export_identifier => |id| { diff --git a/test/bundler/bundler_edgecase.test.ts b/test/bundler/bundler_edgecase.test.ts index c0398f9d2d23be..83fdeccecc4640 100644 --- a/test/bundler/bundler_edgecase.test.ts +++ b/test/bundler/bundler_edgecase.test.ts @@ -1780,6 +1780,31 @@ describe("bundler", () => { `, }, capture: ["false", "false", "import.meta.main", "import.meta.main"], + onAfterBundle(api) { + // This should not be marked as a CommonJS module + api.expectFile("/out.js").not.toContain("require"); + api.expectFile("/out.js").not.toContain("module"); + }, + }); + itBundled("edgecase/ImportMetaMainTargetNode", { + files: { + "/entry.ts": /* js */ ` + import {other} from './other'; + console.log(capture(import.meta.main), capture(require.main === module), ...other); + `, + "/other.ts": ` + globalThis['ca' + 'pture'] = x => x; + + export const other = [capture(require.main === module), capture(import.meta.main)]; + `, + }, + target: 'node', + capture: ["false", "false", "__require.main == __require.module", "__require.main == __require.module"], + onAfterBundle(api) { + // This should not be marked as a CommonJS module + api.expectFile("/out.js").not.toMatch(/\brequire\b/); // __require is ok + api.expectFile("/out.js").not.toMatch(/[^\.:]module/); // `.module` and `node:module` are ok. + }, }); // TODO(@paperdave): test every case of this. I had already tested it manually, but it may break later diff --git a/test/cli/run/run-importmetamain.ts b/test/cli/run/run-importmetamain.ts new file mode 100644 index 00000000000000..66994e79a8417f --- /dev/null +++ b/test/cli/run/run-importmetamain.ts @@ -0,0 +1,24 @@ +import { expect, test } from "bun:test"; +import { mkdirSync } from "fs"; +import { bunEnv, bunExe, tmpdirSync } from "harness"; +import { join } from "path"; + +test("import.meta.main", async () => { + const dir = tmpdirSync(); + mkdirSync(dir, { recursive: true }); + await Bun.write(join(dir, "index1.js"), `import "fs"; console.log(JSON.stringify([typeof require, import.meta.main, !import.meta.main, require.main === module, require.main !== module]));`); + const { stdout } = Bun.spawnSync({ + cmd: [bunExe(), join(dir, "index1.js")], + cwd: dir, + env: bunEnv, + stderr: 'inherit', + stdout: 'pipe', + }); + expect(stdout.toString("utf8").trim()).toEqual(JSON.stringify([ + "function", + true, + false, + true, + false, + ])); +}); From 9b8100be9d2e857183fd79a793bc74dea949db3e Mon Sep 17 00:00:00 2001 From: Dave Caruso Date: Mon, 29 Jul 2024 17:09:20 -0700 Subject: [PATCH 42/46] fix submodule --- src/bun.js/WebKit | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bun.js/WebKit b/src/bun.js/WebKit index 49907bff878171..f9a0fda2d2b2fd 160000 --- a/src/bun.js/WebKit +++ b/src/bun.js/WebKit @@ -1 +1 @@ -Subproject commit 49907bff8781719bc2ded068b0c934f6d0074d1e +Subproject commit f9a0fda2d2b2fd001a00bfcf8e7917a56b382516 From c264884af22f77c41c8b06e1aff0f6cda9879d27 Mon Sep 17 00:00:00 2001 From: Dave Caruso Date: Mon, 29 Jul 2024 17:10:15 -0700 Subject: [PATCH 43/46] oops --- .buildkite/scripts/env.sh | 248 ++++++++++++++++++-------------------- 1 file changed, 114 insertions(+), 134 deletions(-) diff --git a/.buildkite/scripts/env.sh b/.buildkite/scripts/env.sh index 9f644a6ac4d1d4..b09f799bf68e0b 100755 --- a/.buildkite/scripts/env.sh +++ b/.buildkite/scripts/env.sh @@ -1,139 +1,119 @@ -#!/usr/bin/env bash - -# Hack for buildkite sometimes not having the right path -if [[ "${CI:-}" == "1" || "${CI:-}" == "true" ]]; then - if [ -f ~/.bashrc ]; then - source ~/.bashrc +#!/bin/bash + +set -eo pipefail + +function assert_os() { + local os="$(uname -s)" + case "$os" in + Linux) + echo "linux" ;; + Darwin) + echo "darwin" ;; + *) + echo "error: Unsupported operating system: $os" 1>&2 + exit 1 + ;; + esac +} + +function assert_arch() { + local arch="$(uname -m)" + case "$arch" in + aarch64 | arm64) + echo "aarch64" ;; + x86_64 | amd64) + echo "x64" ;; + *) + echo "error: Unknown architecture: $arch" 1>&2 + exit 1 + ;; + esac +} + +function assert_build() { + if [ -z "$BUILDKITE_REPO" ]; then + echo "error: Cannot find repository for this build" + exit 1 fi -fi - -if [[ $(uname -s) == 'Darwin' ]]; then - export LLVM_VERSION=18 -else - export LLVM_VERSION=16 -fi - -# this is the environment script for building bun's dependencies -# it sets c compiler and flags -export SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd) -export BUN_BASE_DIR=${BUN_BASE_DIR:-$(cd $SCRIPT_DIR && cd .. && pwd)} -export BUN_DEPS_DIR=${BUN_DEPS_DIR:-$BUN_BASE_DIR/src/deps} -export BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR:-$BUN_BASE_DIR/build/bun-deps} - -# Silence a perl script warning -export LC_CTYPE="en_US.UTF-8" -export LC_ALL="en_US.UTF-8" - -if [[ $(uname -s) == 'Darwin' ]]; then - export CXX="$(brew --prefix llvm)@$LLVM_VERSION/bin/clang++" - export CC="$(brew --prefix llvm)@$LLVM_VERSION/bin/clang" - export AR="$(brew --prefix llvm)@$LLVM_VERSION/bin/llvm-ar" - export RANLIB="$(brew --prefix llvm)@$LLVM_VERSION/bin/llvm-ranlib" - export LIBTOOL="$(brew --prefix llvm)@$LLVM_VERSION/bin/llvm-libtool-darwin" - export PATH="$(brew --prefix llvm)@$LLVM_VERSION/bin:$PATH" - ln -sf $LIBTOOL "$(brew --prefix llvm)@$LLVM_VERSION/bin/libtool" || true -elif [[ "$CI" != "1" && "$CI" != "true" ]]; then - if [[ -f $SCRIPT_DIR/env.local ]]; then - echo "Sourcing $SCRIPT_DIR/env.local" - source $SCRIPT_DIR/env.local + if [ -z "$BUILDKITE_COMMIT" ]; then + echo "error: Cannot find commit for this build" + exit 1 fi -fi - -# this compiler detection could be better -export CC=${CC:-$(which clang-$LLVM_VERSION || which clang || which cc)} -export CXX=${CXX:-$(which clang++-$LLVM_VERSION || which clang++ || which c++)} -export AR=${AR:-$(which llvm-ar || which ar)} -export CPUS=${CPUS:-$(nproc || sysctl -n hw.ncpu || echo 1)} -export RANLIB=${RANLIB:-$(which llvm-ranlib-$LLVM_VERSION || which llvm-ranlib || which ranlib)} - -# on Linux, force using lld as the linker -if [[ $(uname -s) == 'Linux' ]]; then - export LD=${LD:-$(which ld.lld-$LLVM_VERSION || which ld.lld || which ld)} - export LDFLAGS="${LDFLAGS} -fuse-ld=lld " -fi - -export CMAKE_CXX_COMPILER=${CXX} -export CMAKE_C_COMPILER=${CC} - -export CFLAGS='-O3 -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables ' -export CXXFLAGS='-O3 -fno-exceptions -fno-rtti -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-c++-static-destructors ' - -# Add flags for LTO -# We cannot enable LTO on macOS for dependencies because it requires -fuse-ld=lld and lld causes many segfaults on macOS (likely related to stack size) -if [ "$BUN_ENABLE_LTO" == "1" ]; then - export CFLAGS="$CFLAGS -flto=full " - export CXXFLAGS="$CXXFLAGS -flto=full -fwhole-program-vtables -fforce-emit-vtables " - export LDFLAGS="$LDFLAGS -flto=full -fwhole-program-vtables -fforce-emit-vtables " -fi - -if [[ $(uname -s) == 'Linux' ]]; then - export CFLAGS="$CFLAGS -ffunction-sections -fdata-sections -faddrsig " - export CXXFLAGS="$CXXFLAGS -ffunction-sections -fdata-sections -faddrsig " - export LDFLAGS="${LDFLAGS} -Wl,-z,norelro" -fi - -# Clang 18 on macOS needs to have -fno-define-target-os-macros to fix a zlib build issue -# https://gitlab.kitware.com/cmake/cmake/-/issues/25755 -if [[ $(uname -s) == 'Darwin' && $LLVM_VERSION == '18' ]]; then - export CFLAGS="$CFLAGS -fno-define-target-os-macros " - export CXXFLAGS="$CXXFLAGS -fno-define-target-os-macros -D_LIBCXX_ENABLE_ASSERTIONS=0 -D_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_NONE " -fi - -# libarchive needs position-independent executables to compile successfully -if [ -n "$FORCE_PIC" ]; then - export CFLAGS="$CFLAGS -fPIC " - export CXXFLAGS="$CXXFLAGS -fPIC " -elif [[ $(uname -s) == 'Linux' ]]; then - export CFLAGS="$CFLAGS -fno-pie -fno-pic " - export CXXFLAGS="$CXXFLAGS -fno-pie -fno-pic " -fi - -if [[ $(uname -s) == 'Linux' && ($(uname -m) == 'aarch64' || $(uname -m) == 'arm64') ]]; then - export CFLAGS="$CFLAGS -march=armv8-a+crc -mtune=ampere1 " - export CXXFLAGS="$CXXFLAGS -march=armv8-a+crc -mtune=ampere1 " -fi - -export CMAKE_FLAGS=( - -DCMAKE_C_COMPILER="${CC}" - -DCMAKE_CXX_COMPILER="${CXX}" - -DCMAKE_C_FLAGS="$CFLAGS" - -DCMAKE_CXX_FLAGS="$CXXFLAGS" - -DCMAKE_BUILD_TYPE=Release - -DCMAKE_CXX_STANDARD=20 - -DCMAKE_C_STANDARD=17 - -DCMAKE_CXX_STANDARD_REQUIRED=ON - -DCMAKE_C_STANDARD_REQUIRED=ON -) - -CCACHE=$(which ccache || which sccache || echo "") -if [ -f "$CCACHE" ]; then - CMAKE_FLAGS+=( - -DCMAKE_C_COMPILER_LAUNCHER="$CCACHE" - -DCMAKE_CXX_COMPILER_LAUNCHER="$CCACHE" - ) -fi - -if [[ $(uname -s) == 'Linux' ]]; then - # Ensure we always use -std=gnu++20 on Linux - CMAKE_FLAGS+=(-DCMAKE_CXX_EXTENSIONS=ON) -fi - -if [[ $(uname -s) == 'Darwin' ]]; then - export CMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET:-13.0} - CMAKE_FLAGS+=(-DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET}) - export CFLAGS="$CFLAGS -mmacosx-version-min=${CMAKE_OSX_DEPLOYMENT_TARGET} -D__DARWIN_NON_CANCELABLE=1 " - export CXXFLAGS="$CXXFLAGS -mmacosx-version-min=${CMAKE_OSX_DEPLOYMENT_TARGET} -D__DARWIN_NON_CANCELABLE=1 " -fi - -mkdir -p $BUN_DEPS_OUT_DIR + if [ -z "$BUILDKITE_STEP_KEY" ]; then + echo "error: Cannot find step key for this build" + exit 1 + fi + if [ -n "$BUILDKITE_GROUP_KEY" ] && [[ "$BUILDKITE_STEP_KEY" != "$BUILDKITE_GROUP_KEY"* ]]; then + echo "error: Build step '$BUILDKITE_STEP_KEY' does not start with group key '$BUILDKITE_GROUP_KEY'" + exit 1 + fi + # Skip os and arch checks for Zig, since it's cross-compiled on macOS + if [[ "$BUILDKITE_STEP_KEY" != *"zig"* ]]; then + local os="$(assert_os)" + if [[ "$BUILDKITE_STEP_KEY" != *"$os"* ]]; then + echo "error: Build step '$BUILDKITE_STEP_KEY' does not match operating system '$os'" + exit 1 + fi + local arch="$(assert_arch)" + if [[ "$BUILDKITE_STEP_KEY" != *"$arch"* ]]; then + echo "error: Build step '$BUILDKITE_STEP_KEY' does not match architecture '$arch'" + exit 1 + fi + fi +} -if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then - echo "C Compiler: ${CC}" - echo "C++ Compiler: ${CXX}" - if [ -n "$CCACHE" ]; then - echo "Ccache: ${CCACHE}" +function assert_buildkite_agent() { + if ! command -v buildkite-agent &> /dev/null; then + echo "error: Cannot find buildkite-agent, please install it:" + echo "https://buildkite.com/docs/agent/v3/install" + exit 1 + fi +} + +function export_environment() { + source "$(realpath $(dirname "$0")/../../scripts/env.sh)" + source "$(realpath $(dirname "$0")/../../scripts/update-submodules.sh)" + { set +x; } 2>/dev/null + export GIT_SHA="$BUILDKITE_COMMIT" + export CCACHE_DIR="$HOME/.cache/ccache/$BUILDKITE_STEP_KEY" + export SCCACHE_DIR="$HOME/.cache/sccache/$BUILDKITE_STEP_KEY" + export ZIG_LOCAL_CACHE_DIR="$HOME/.cache/zig-cache/$BUILDKITE_STEP_KEY" + export BUN_DEPS_CACHE_DIR="$HOME/.cache/bun-deps/$BUILDKITE_STEP_KEY" + if [ "$(assert_arch)" == "aarch64" ]; then + export CPU_TARGET="native" + elif [[ "$BUILDKITE_STEP_KEY" == *"baseline"* ]]; then + export CPU_TARGET="nehalem" + else + export CPU_TARGET="haswell" + fi + if [[ "$BUILDKITE_STEP_KEY" == *"nolto"* ]]; then + export USE_LTO="OFF" + else + export USE_LTO="ON" + fi + if $(buildkite-agent meta-data exists release &> /dev/null); then + export CMAKE_BUILD_TYPE="$(buildkite-agent meta-data get release)" + else + export CMAKE_BUILD_TYPE="Release" fi - if [[ $(uname -s) == 'Darwin' ]]; then - echo "OSX Deployment Target: ${CMAKE_OSX_DEPLOYMENT_TARGET}" + if $(buildkite-agent meta-data exists canary &> /dev/null); then + export CANARY="$(buildkite-agent meta-data get canary)" + else + export CANARY="1" fi -fi \ No newline at end of file + if $(buildkite-agent meta-data exists assertions &> /dev/null); then + export USE_DEBUG_JSC="$(buildkite-agent meta-data get assertions)" + else + export USE_DEBUG_JSC="OFF" + fi + if [ "$BUILDKITE_CLEAN_CHECKOUT" == "true" ]; then + rm -rf "$CCACHE_DIR" + rm -rf "$SCCACHE_DIR" + rm -rf "$ZIG_LOCAL_CACHE_DIR" + rm -rf "$BUN_DEPS_CACHE_DIR" + fi +} + +assert_build +assert_buildkite_agent +export_environment From 6ceca8698719c79a57160ebd3f4bf1a3e2fcd4a7 Mon Sep 17 00:00:00 2001 From: Dave Caruso Date: Mon, 29 Jul 2024 17:10:58 -0700 Subject: [PATCH 44/46] oops --- src/deps/WebKit | 1 - 1 file changed, 1 deletion(-) delete mode 160000 src/deps/WebKit diff --git a/src/deps/WebKit b/src/deps/WebKit deleted file mode 160000 index c737b24765cddf..00000000000000 --- a/src/deps/WebKit +++ /dev/null @@ -1 +0,0 @@ -Subproject commit c737b24765cddf5294c425b2e23dd381f1e0b33e From 20f8965150135d4730f58aa750e2efa84f5773da Mon Sep 17 00:00:00 2001 From: Dave Caruso Date: Mon, 29 Jul 2024 17:24:50 -0700 Subject: [PATCH 45/46] fix commonjs --- src/js_printer.zig | 2 +- test/cli/run/run-importmetamain.ts | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/src/js_printer.zig b/src/js_printer.zig index 7d5e8a2bcd036d..11e7d203ff5ec5 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -2368,7 +2368,7 @@ fn NewPrinter( } else if (p.options.commonjs_module_ref.isValid()) { p.printSymbol(p.options.commonjs_module_ref); } else { - p.print(""); + p.print("module"); } } }, diff --git a/test/cli/run/run-importmetamain.ts b/test/cli/run/run-importmetamain.ts index 66994e79a8417f..e56414b366d5ab 100644 --- a/test/cli/run/run-importmetamain.ts +++ b/test/cli/run/run-importmetamain.ts @@ -22,3 +22,23 @@ test("import.meta.main", async () => { false, ])); }); + +test("import.meta.main in a common.js file", async () => { + const dir = tmpdirSync(); + mkdirSync(dir, { recursive: true }); + await Bun.write(join(dir, "index1.js"), `module.exports = {}; console.log(JSON.stringify([typeof require, import.meta.main, !import.meta.main, require.main === module, require.main !== module]));`); + const { stdout } = Bun.spawnSync({ + cmd: [bunExe(), join(dir, "index1.js")], + cwd: dir, + env: bunEnv, + stderr: 'inherit', + stdout: 'pipe', + }); + expect(stdout.toString("utf8").trim()).toEqual(JSON.stringify([ + "function", + true, + false, + true, + false, + ])); +}); From 0aec3c7a7fbd1de161df46e2fe37a3271788e64e Mon Sep 17 00:00:00 2001 From: dave caruso Date: Thu, 1 Aug 2024 15:13:47 -0700 Subject: [PATCH 46/46] requested changes --- src/js_ast.zig | 28 ++++++++++------------------ src/js_parser.zig | 1 + 2 files changed, 11 insertions(+), 18 deletions(-) diff --git a/src/js_ast.zig b/src/js_ast.zig index 34d6c652927637..6f46782b1fc81b 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -5938,27 +5938,19 @@ pub const Expr = struct { } }, - .e_require_main => { - if (right.as(.e_identifier)) |id| { - if (id.ref.eql(p.module_ref)) return .{ - .ok = true, - .equal = true, - .is_require_main_and_module = true, - }; - } - }, - - .e_identifier => |id| { + else => { + // Do not need to check left because e_require_main is + // always re-ordered to the right side. if (right == .e_require_main) { - if (id.ref.eql(p.module_ref)) return .{ - .ok = true, - .equal = true, - .is_require_main_and_module = true, - }; + if (left.as(.e_identifier)) |id| { + if (id.ref.eql(p.module_ref)) return .{ + .ok = true, + .equal = true, + .is_require_main_and_module = true, + }; + } } }, - - else => {}, } return Equality.unknown; diff --git a/src/js_parser.zig b/src/js_parser.zig index 1ae073b86f1714..5fa3c7b375203f 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -1837,6 +1837,7 @@ pub const SideEffects = enum(u1) { .e_number, .e_big_int, .e_inlined_enum, + .e_require_main, => true, else => false, };