diff --git a/src/bun.js/api/JSTranspiler.zig b/src/bun.js/api/JSTranspiler.zig index 16fd9bf5cc2dde..61264f30e8942f 100644 --- a/src/bun.js/api/JSTranspiler.zig +++ b/src/bun.js/api/JSTranspiler.zig @@ -492,6 +492,8 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std &transpiler.log, source, allocator, + .json, + false, ) catch null) orelse break :macros; transpiler.macro_map = PackageJSON.parseMacrosJSON(allocator, json, &transpiler.log, &source); } diff --git a/src/bun.js/bindings/ZigGlobalObject.cpp b/src/bun.js/bindings/ZigGlobalObject.cpp index da9ba3652a7f40..0311fa0eb1c5ca 100644 --- a/src/bun.js/bindings/ZigGlobalObject.cpp +++ b/src/bun.js/bindings/ZigGlobalObject.cpp @@ -4224,6 +4224,10 @@ JSC::JSInternalPromise* GlobalObject::moduleLoaderFetch(JSGlobalObject* globalOb if (params.type() == ScriptFetchParameters::Type::HostDefined) { typeAttributeString = params.hostDefinedImportType(); + } else if (params.type() == ScriptFetchParameters::Type::JSON) { + typeAttributeString = "json"_s; + } else if (params.type() == ScriptFetchParameters::Type::WebAssembly) { + typeAttributeString = "webassembly"_s; } } } diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index 146ed7e35b3763..53cb075f67a53d 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -252,6 +252,7 @@ pub const RuntimeTranspilerStore = struct { input_specifier: bun.String, path: Fs.Path, referrer: bun.String, + loader: bun.options.Loader, ) *anyopaque { var job: *TranspilerJob = this.store.get(); const owned_path = Fs.Path.init(bun.default_allocator.dupe(u8, path.text) catch unreachable); @@ -263,7 +264,7 @@ pub const RuntimeTranspilerStore = struct { .non_threadsafe_referrer = referrer, .vm = vm, .log = logger.Log.init(bun.default_allocator), - .loader = vm.transpiler.options.loader(owned_path.name.ext), + .loader = loader, .promise = JSC.Strong.create(JSC.JSValue.fromCell(promise), globalObject), .poll_ref = .{}, .fetcher = TranspilerJob.Fetcher{ @@ -1507,7 +1508,7 @@ pub const ModuleLoader = struct { const disable_transpilying = comptime flags.disableTranspiling(); if (comptime disable_transpilying) { - if (!(loader.isJavaScriptLike() or loader == .toml or loader == .text or loader == .json)) { + if (!(loader.isJavaScriptLike() or loader == .toml or loader == .text or loader == .json or loader == .jsonc)) { // Don't print "export default " return ResolvedSource{ .allocator = null, @@ -1520,7 +1521,7 @@ pub const ModuleLoader = struct { } switch (loader) { - .js, .jsx, .ts, .tsx, .json, .toml, .text => { + .js, .jsx, .ts, .tsx, .json, .jsonc, .toml, .text => { jsc_vm.transpiled_count += 1; jsc_vm.transpiler.resetStore(); const hash = bun.Watcher.getHash(path.text); @@ -1659,7 +1660,7 @@ pub const ModuleLoader = struct { } var parse_result: ParseResult = switch (disable_transpilying or - (loader == .json and !path.isJSONCFile())) { + (loader == .json)) { inline else => |return_file_only| brk: { const heap_access = if (!disable_transpilying) jsc_vm.jsc.releaseHeapAccess() @@ -1738,7 +1739,7 @@ pub const ModuleLoader = struct { return error.ParseError; } - if (loader == .json and !path.isJSONCFile()) { + if (loader == .json) { return ResolvedSource{ .allocator = null, .source_code = bun.String.createUTF8(parse_result.source.contents), @@ -1764,7 +1765,7 @@ pub const ModuleLoader = struct { }; } - if (loader == .json or loader == .toml) { + if (loader == .json or loader == .jsonc or loader == .toml) { if (parse_result.empty) { return ResolvedSource{ .allocator = null, @@ -2293,7 +2294,7 @@ pub const ModuleLoader = struct { // Deliberately optional. // The concurrent one only handles javascript-like loaders right now. - var loader: ?options.Loader = jsc_vm.transpiler.options.loaders.get(path.name.ext); + var loader: ?options.Loader = path.loader(&jsc_vm.transpiler.options.loaders); if (jsc_vm.module_loader.eval_source) |eval_source| { if (strings.endsWithComptime(specifier, bun.pathLiteral("/[eval]"))) { @@ -2320,7 +2321,7 @@ pub const ModuleLoader = struct { path = current_path; } - loader = jsc_vm.transpiler.options.loaders.get(current_path.name.ext) orelse .tsx; + loader = current_path.loader(&jsc_vm.transpiler.options.loaders) orelse .tsx; } else { loader = .tsx; } @@ -2338,36 +2339,9 @@ pub const ModuleLoader = struct { } } - if (type_attribute) |attribute| { - if (attribute.eqlComptime("sqlite")) { - loader = .sqlite; - } else if (attribute.eqlComptime("text")) { - loader = .text; - } else if (attribute.eqlComptime("json")) { - loader = .json; - } else if (attribute.eqlComptime("toml")) { - loader = .toml; - } else if (attribute.eqlComptime("file")) { - loader = .file; - } else if (attribute.eqlComptime("js")) { - loader = .js; - } else if (attribute.eqlComptime("jsx")) { - loader = .jsx; - } else if (attribute.eqlComptime("ts")) { - loader = .ts; - } else if (attribute.eqlComptime("tsx")) { - loader = .tsx; - } else if (attribute.eqlComptime("html")) { - loader = .html; - } - } - - // If we were going to choose file loader, see if it's a bun.lock - if (loader == null) { - if (strings.eqlComptime(path.name.filename, "bun.lock")) { - loader = .json; - } - } + if (type_attribute) |attribute| if (attribute.asUTF8()) |attr_utf8| if (bun.options.Loader.fromString(attr_utf8)) |attr_loader| { + loader = attr_loader; + }; // We only run the transpiler concurrently when we can. // Today, that's: @@ -2389,6 +2363,7 @@ pub const ModuleLoader = struct { specifier_ptr.dupeRef(), path, referrer.dupeRef(), + concurrent_loader, ); } } diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 9b2ca8f32c074c..41d2289d4db12f 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -824,8 +824,8 @@ pub const BundleV2 = struct { path.* = this.pathWithPrettyInitialized(path.*, target) catch bun.outOfMemory(); const loader: Loader = (brk: { if (import_record.importer_source_index) |importer| { - var record: *ImportRecord = &this.graph.ast.items(.import_records)[importer].slice()[import_record.import_record_index]; - if (record.loader()) |out_loader| { + const record: *ImportRecord = &this.graph.ast.items(.import_records)[importer].slice()[import_record.import_record_index]; + if (record.loader) |out_loader| { break :brk out_loader; } } @@ -1329,10 +1329,7 @@ pub const BundleV2 = struct { this.graph.input_files.append(bun.default_allocator, .{ .source = source, .loader = loader, - .side_effects = switch (loader) { - .text, .json, .toml, .file => _resolver.SideEffects.no_side_effects__pure_data, - else => _resolver.SideEffects.has_side_effects, - }, + .side_effects = loader.sideEffects(), }) catch bun.outOfMemory(); var task = this.graph.allocator.create(ParseTask) catch bun.outOfMemory(); task.* = ParseTask.init(resolve_result, source_index, this); @@ -1371,10 +1368,7 @@ pub const BundleV2 = struct { this.graph.input_files.append(bun.default_allocator, .{ .source = source, .loader = loader, - .side_effects = switch (loader) { - .text, .json, .toml, .file => .no_side_effects__pure_data, - else => .has_side_effects, - }, + .side_effects = loader.sideEffects(), }) catch bun.outOfMemory(); var task = this.graph.allocator.create(ParseTask) catch bun.outOfMemory(); task.* = .{ @@ -2666,12 +2660,12 @@ pub const BundleV2 = struct { } // By default, we treat .sqlite files as external. - if (import_record.tag == .with_type_sqlite) { + if (import_record.loader != null and import_record.loader.? == .sqlite) { import_record.is_external_without_side_effects = true; continue; } - if (import_record.tag == .with_type_sqlite_embedded) { + if (import_record.loader != null and import_record.loader.? == .sqlite_embedded) { import_record.is_external_without_side_effects = true; } @@ -2879,7 +2873,7 @@ pub const BundleV2 = struct { // Figure out the loader. { - if (import_record.tag.loader()) |loader| { + if (import_record.loader) |loader| { resolve_task.loader = loader; } @@ -3767,10 +3761,10 @@ pub const ParseTask = struct { ), }; }, - .json => { + .json, .jsonc => |v| { const trace = tracer(@src(), "ParseJSON"); defer trace.end(); - const root = (try resolver.caches.json.parsePackageJSON(log, source, allocator, false)) orelse Expr.init(E.Object, E.Object{}, Logger.Loc.Empty); + const root = (try resolver.caches.json.parseJSON(log, source, allocator, if (v == .jsonc) .jsonc else .json, true)) orelse Expr.init(E.Object, E.Object{}, Logger.Loc.Empty); return JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, &source, "")).?); }, .toml => { @@ -7507,22 +7501,13 @@ pub const LinkerContext = struct { const source = &input_files[id]; const loader = loaders[record.source_index.get()]; switch (loader) { - .jsx, .js, .ts, .tsx, .napi, .sqlite, .json, .html => { + .jsx, .js, .ts, .tsx, .napi, .sqlite, .sqlite_embedded, .json, .jsonc, .html => { this.log.addErrorFmt( source, record.range.loc, this.allocator, "Cannot import a \".{s}\" file into a CSS file", - .{@tagName(loader)}, - ) catch bun.outOfMemory(); - }, - .sqlite_embedded => { - this.log.addErrorFmt( - source, - record.range.loc, - this.allocator, - "Cannot import a \"sqlite_embedded\" file into a CSS file", - .{}, + .{if (loader == .sqlite_embedded) "sqlite" else @tagName(loader)}, ) catch bun.outOfMemory(); }, .css, .file, .toml, .wasm, .base64, .dataurl, .text, .bunsh => {}, diff --git a/src/cache.zig b/src/cache.zig index 96ecf3484c45de..e6cefbc8eb2d4d 100644 --- a/src/cache.zig +++ b/src/cache.zig @@ -316,15 +316,15 @@ pub const Json = struct { break :handler null; }; } - pub fn parseJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) anyerror!?js_ast.Expr { + pub fn parseJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator, mode: enum { json, jsonc }, comptime force_utf8: bool) anyerror!?js_ast.Expr { // tsconfig.* and jsconfig.* files are JSON files, but they are not valid JSON files. // They are JSON files with comments and trailing commas. // Sometimes tooling expects this to work. - if (source.path.isJSONCFile()) { - return try parse(cache, log, source, allocator, json_parser.parseTSConfig, true); + if (mode == .jsonc) { + return try parse(cache, log, source, allocator, json_parser.parseTSConfig, force_utf8); } - return try parse(cache, log, source, allocator, json_parser.parse, false); + return try parse(cache, log, source, allocator, json_parser.parse, force_utf8); } pub fn parsePackageJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator, comptime force_utf8: bool) anyerror!?js_ast.Expr { diff --git a/src/fs.zig b/src/fs.zig index fa20e43a5272aa..bf3c4afd2ab0fa 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -1696,7 +1696,24 @@ pub const Path = struct { const ext = this.name.ext; - return loaders.get(ext) orelse bun.options.Loader.fromString(ext); + const result = loaders.get(ext) orelse bun.options.Loader.fromString(ext); + if (result == null or result == .json) { + const str = this.name.filename; + if (strings.eqlComptime(str, "package.json") or strings.eqlComptime(str, "bun.lock")) { + return .jsonc; + } + + if (strings.hasSuffixComptime(str, ".jsonc")) { + return .jsonc; + } + + if (strings.hasPrefixComptime(str, "tsconfig.") or strings.hasPrefixComptime(str, "jsconfig.")) { + if (strings.hasSuffixComptime(str, ".json")) { + return .jsonc; + } + } + } + return result; } pub fn isDataURL(this: *const Path) bool { @@ -1711,24 +1728,6 @@ pub const Path = struct { return strings.eqlComptime(this.namespace, "macro"); } - pub fn isJSONCFile(this: *const Path) bool { - const str = this.name.filename; - - if (strings.eqlComptime(str, "package.json") or strings.eqlComptime(str, "bun.lock")) { - return true; - } - - if (strings.hasSuffixComptime(str, ".jsonc")) { - return true; - } - - if (strings.hasPrefixComptime(str, "tsconfig.") or strings.hasPrefixComptime(str, "jsconfig.")) { - return strings.hasSuffixComptime(str, ".json"); - } - - return false; - } - pub const PackageRelative = struct { path: string, name: string, diff --git a/src/import_record.zig b/src/import_record.zig index a81f5f180b073d..ad27009dcad6ef 100644 --- a/src/import_record.zig +++ b/src/import_record.zig @@ -102,6 +102,7 @@ pub const ImportRecord = struct { path: fs.Path, kind: ImportKind, tag: Tag = .none, + loader: ?bun.options.Loader = null, source_index: Index = Index.invalid, @@ -167,10 +168,6 @@ pub const ImportRecord = struct { pub const List = bun.BabyList(ImportRecord); - pub fn loader(this: *const ImportRecord) ?bun.options.Loader { - return this.tag.loader(); - } - pub const Tag = enum { /// A normal import to a user's source file none, @@ -189,43 +186,8 @@ pub const ImportRecord = struct { /// crossover to the SSR graph. See bake.Framework.ServerComponents.separate_ssr_graph bake_resolve_to_ssr_graph, - with_type_sqlite, - with_type_sqlite_embedded, - with_type_text, - with_type_json, - with_type_toml, - with_type_file, - tailwind, - pub fn loader(this: Tag) ?bun.options.Loader { - return switch (this) { - .with_type_sqlite => .sqlite, - .with_type_sqlite_embedded => .sqlite_embedded, - .with_type_text => .text, - .with_type_json => .json, - .with_type_toml => .toml, - .with_type_file => .file, - else => null, - }; - } - - pub fn onlySupportsDefaultImports(this: Tag) bool { - return switch (this) { - .with_type_file, .with_type_text => true, - else => false, - }; - } - - pub fn isSQLite(this: Tag) bool { - return switch (this) { - .with_type_sqlite, - .with_type_sqlite_embedded, - => true, - else => false, - }; - } - pub inline fn isRuntime(this: Tag) bool { return this == .runtime; } diff --git a/src/js_ast.zig b/src/js_ast.zig index f7187cbfe953e6..393075c7a7048b 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -2794,7 +2794,8 @@ pub const E = struct { return this.import_record_index == std.math.maxInt(u32); } - pub fn importRecordTag(import: *const Import) ?ImportRecord.Tag { + pub fn importRecordLoader(import: *const Import) ?bun.options.Loader { + // This logic is duplicated in js_printer.zig fn parsePath() const obj = import.options.data.as(.e_object) orelse return null; const with = obj.get("with") orelse obj.get("assert") orelse @@ -2805,23 +2806,16 @@ pub const E = struct { return null).data.as(.e_string) orelse return null; - if (str.eqlComptime("json")) { - return .with_type_json; - } else if (str.eqlComptime("toml")) { - return .with_type_toml; - } else if (str.eqlComptime("text")) { - return .with_type_text; - } else if (str.eqlComptime("file")) { - return .with_type_file; - } else if (str.eqlComptime("sqlite")) { - const embed = brk: { - const embed = with_obj.get("embed") orelse break :brk false; - const embed_str = embed.data.as(.e_string) orelse break :brk false; - break :brk embed_str.eqlComptime("true"); - }; - - return if (embed) .with_type_sqlite_embedded else .with_type_sqlite; - } + if (!str.is_utf16) if (bun.options.Loader.fromString(str.data)) |loader| { + if (loader == .sqlite) { + const embed = with_obj.get("embed") orelse return loader; + const embed_str = embed.data.as(.e_string) orelse return loader; + if (embed_str.eqlComptime("true")) { + return .sqlite_embedded; + } + } + return loader; + }; return null; } diff --git a/src/js_parser.zig b/src/js_parser.zig index 69a7b91c021c9f..535cd9bae444e3 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -505,6 +505,7 @@ const TransposeState = struct { is_require_immediately_assigned_to_decl: bool = false, loc: logger.Loc = logger.Loc.Empty, import_record_tag: ?ImportRecord.Tag = null, + import_loader: ?bun.options.Loader = null, import_options: Expr = Expr.empty, }; @@ -2569,6 +2570,7 @@ const ParsedPath = struct { text: string, is_macro: bool, import_tag: ImportRecord.Tag = .none, + loader: ?bun.options.Loader = null, }; const StrictModeFeature = enum { @@ -9203,8 +9205,8 @@ fn NewParser_( item_refs.shrinkAndFree(stmt.items.len + @as(usize, @intFromBool(stmt.default_name != null))); } - if (path.import_tag != .none) { - try p.validateImportType(path.import_tag, &stmt); + if (path.import_tag != .none or path.loader != null) { + try p.validateAndSetImportType(&path, &stmt); } // Track the items for this namespace @@ -9212,13 +9214,13 @@ fn NewParser_( return p.s(stmt, loc); } - fn validateImportType(p: *P, import_tag: ImportRecord.Tag, stmt: *S.Import) !void { + fn validateAndSetImportType(p: *P, path: *const ParsedPath, stmt: *S.Import) !void { @setCold(true); - if (import_tag.loader() != null) { - p.import_records.items[stmt.import_record_index].tag = import_tag; + if (path.loader) |loader| { + p.import_records.items[stmt.import_record_index].loader = loader; - if (import_tag.isSQLite()) { + if (loader == .sqlite or loader == .sqlite_embedded) { for (stmt.items) |*item| { if (!(strings.eqlComptime(item.alias, "default") or strings.eqlComptime(item.alias, "db"))) { try p.log.addError( @@ -9229,7 +9231,7 @@ fn NewParser_( break; } } - } else if (import_tag.onlySupportsDefaultImports()) { + } else if (loader == .file or loader == .text) { for (stmt.items) |*item| { if (!(strings.eqlComptime(item.alias, "default"))) { try p.log.addError( @@ -9241,8 +9243,8 @@ fn NewParser_( } } } - } else if (import_tag == .bake_resolve_to_ssr_graph) { - p.import_records.items[stmt.import_record_index].tag = import_tag; + } else if (path.import_tag == .bake_resolve_to_ssr_graph) { + p.import_records.items[stmt.import_record_index].tag = path.import_tag; } } @@ -12324,29 +12326,22 @@ fn NewParser_( if (supported_attribute) |attr| { switch (attr) { .type => { + // This logic is duplicated in js_ast.zig fn importRecordTag() const type_attr = string_literal_text; if (strings.eqlComptime(type_attr, "macro")) { path.is_macro = true; - } else if (strings.eqlComptime(type_attr, "sqlite")) { - path.import_tag = .with_type_sqlite; - if (has_seen_embed_true) { - path.import_tag = .with_type_sqlite_embedded; - } - } else if (strings.eqlComptime(type_attr, "json")) { - path.import_tag = .with_type_json; - } else if (strings.eqlComptime(type_attr, "toml")) { - path.import_tag = .with_type_toml; - } else if (strings.eqlComptime(type_attr, "text")) { - path.import_tag = .with_type_text; - } else if (strings.eqlComptime(type_attr, "file")) { - path.import_tag = .with_type_file; + } else if (bun.options.Loader.fromString(type_attr)) |loader| { + path.loader = loader; + if (loader == .sqlite and has_seen_embed_true) path.loader = .sqlite_embedded; + } else { + // unknown loader; consider erroring } }, .embed => { if (strings.eqlComptime(string_literal_text, "true")) { has_seen_embed_true = true; - if (path.import_tag == .with_type_sqlite) { - path.import_tag = .with_type_sqlite_embedded; + if (path.loader != null and path.loader == .sqlite) { + path.loader = .sqlite_embedded; } } }, @@ -17331,7 +17326,7 @@ fn NewParser_( .import_options = e_.options, .loc = e_.expr.loc, - .import_record_tag = e_.importRecordTag(), + .import_loader = e_.importRecordLoader(), }; return p.import_transposer.maybeTransposeIf(e_.expr, &state); diff --git a/src/js_printer.zig b/src/js_printer.zig index 6ab28eb324c5d3..661eb41c33e583 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -4452,36 +4452,27 @@ fn NewPrinter( p.printImportRecordPath(record); - switch (record.tag) { - .with_type_sqlite, .with_type_sqlite_embedded => { - // we do not preserve "embed": "true" since it is not necessary - p.printWhitespacer(ws(" with { type: \"sqlite\" }")); - }, - .with_type_text => { - if (comptime is_bun_platform) { - p.printWhitespacer(ws(" with { type: \"text\" }")); - } - }, - .with_type_json => { - // backwards compatibility: previously, we always stripped type json - if (comptime is_bun_platform) { - p.printWhitespacer(ws(" with { type: \"json\" }")); - } - }, - .with_type_toml => { - // backwards compatibility: previously, we always stripped type - if (comptime is_bun_platform) { - p.printWhitespacer(ws(" with { type: \"toml\" }")); - } - }, - .with_type_file => { - // backwards compatibility: previously, we always stripped type - if (comptime is_bun_platform) { - p.printWhitespacer(ws(" with { type: \"file\" }")); - } - }, - else => {}, - } + // backwards compatibility: previously, we always stripped type + if (comptime is_bun_platform) if (record.loader) |loader| switch (loader) { + .jsx => p.printWhitespacer(ws(" with { type: \"jsx\" }")), + .js => p.printWhitespacer(ws(" with { type: \"js\" }")), + .ts => p.printWhitespacer(ws(" with { type: \"ts\" }")), + .tsx => p.printWhitespacer(ws(" with { type: \"tsx\" }")), + .css => p.printWhitespacer(ws(" with { type: \"css\" }")), + .file => p.printWhitespacer(ws(" with { type: \"file\" }")), + .json => p.printWhitespacer(ws(" with { type: \"json\" }")), + .jsonc => p.printWhitespacer(ws(" with { type: \"jsonc\" }")), + .toml => p.printWhitespacer(ws(" with { type: \"toml\" }")), + .wasm => p.printWhitespacer(ws(" with { type: \"wasm\" }")), + .napi => p.printWhitespacer(ws(" with { type: \"napi\" }")), + .base64 => p.printWhitespacer(ws(" with { type: \"base64\" }")), + .dataurl => p.printWhitespacer(ws(" with { type: \"dataurl\" }")), + .text => p.printWhitespacer(ws(" with { type: \"text\" }")), + .bunsh => p.printWhitespacer(ws(" with { type: \"sh\" }")), + // sqlite_embedded only relevant when bundling + .sqlite, .sqlite_embedded => p.printWhitespacer(ws(" with { type: \"sqlite\" }")), + .html => p.printWhitespacer(ws(" with { type: \"html\" }")), + }; p.printSemicolonAfterStatement(); }, .s_block => |s| { diff --git a/src/options.zig b/src/options.zig index 2e097d1c516c09..b60ff6913f9e9b 100644 --- a/src/options.zig +++ b/src/options.zig @@ -635,6 +635,7 @@ pub const Loader = enum(u8) { css, file, json, + jsonc, toml, wasm, napi, @@ -679,7 +680,7 @@ pub const Loader = enum(u8) { return switch (this) { .jsx, .js, .ts, .tsx => bun.http.MimeType.javascript, .css => bun.http.MimeType.css, - .toml, .json => bun.http.MimeType.json, + .toml, .json, .jsonc => bun.http.MimeType.json, .wasm => bun.http.MimeType.wasm, .html => bun.http.MimeType.html, else => bun.http.MimeType.other, @@ -697,7 +698,7 @@ pub const Loader = enum(u8) { pub fn canBeRunByBun(this: Loader) bool { return switch (this) { - .jsx, .js, .ts, .tsx, .json, .wasm, .bunsh => true, + .jsx, .js, .ts, .tsx, .json, .jsonc, .wasm, .bunsh => true, else => false, }; } @@ -753,9 +754,10 @@ pub const Loader = enum(u8) { .{ "css", .css }, .{ "file", .file }, .{ "json", .json }, - .{ "jsonc", .json }, + .{ "jsonc", .jsonc }, .{ "toml", .toml }, .{ "wasm", .wasm }, + .{ "napi", .napi }, .{ "node", .napi }, .{ "dataurl", .dataurl }, .{ "base64", .base64 }, @@ -818,6 +820,7 @@ pub const Loader = enum(u8) { .html => .html, .file, .bunsh => .file, .json => .json, + .jsonc => .json, .toml => .toml, .wasm => .wasm, .napi => .napi, @@ -867,7 +870,7 @@ pub const Loader = enum(u8) { pub fn isJavaScriptLikeOrJSON(loader: Loader) bool { return switch (loader) { - .jsx, .js, .ts, .tsx, .json => true, + .jsx, .js, .ts, .tsx, .json, .jsonc => true, // toml is included because we can serialize to the same AST as JSON .toml => true, @@ -882,6 +885,13 @@ pub const Loader = enum(u8) { return obj.get(ext); } + + pub fn sideEffects(this: Loader) bun.resolver.SideEffects { + return switch (this) { + .text, .json, .jsonc, .toml, .file => bun.resolver.SideEffects.no_side_effects__pure_data, + else => bun.resolver.SideEffects.has_side_effects, + }; + } }; const default_loaders_posix = .{ @@ -905,7 +915,7 @@ const default_loaders_posix = .{ .{ ".txt", .text }, .{ ".text", .text }, .{ ".html", .html }, - .{ ".jsonc", .json }, + .{ ".jsonc", .jsonc }, }; const default_loaders_win32 = default_loaders_posix ++ .{ .{ ".sh", .bunsh }, diff --git a/src/transpiler.zig b/src/transpiler.zig index edcda26226940f..6cef4e33ee19d0 100644 --- a/src/transpiler.zig +++ b/src/transpiler.zig @@ -869,7 +869,7 @@ pub const Transpiler = struct { }; switch (loader) { - .jsx, .tsx, .js, .ts, .json, .toml, .text => { + .jsx, .tsx, .js, .ts, .json, .jsonc, .toml, .text => { var result = transpiler.parse( ParseOptions{ .allocator = transpiler.allocator, @@ -1389,14 +1389,13 @@ pub const Transpiler = struct { }; }, // TODO: use lazy export AST - inline .toml, .json => |kind| { - var expr = if (kind == .json) + inline .toml, .json, .jsonc => |kind| { + var expr = if (kind == .jsonc) // We allow importing tsconfig.*.json or jsconfig.*.json with comments // These files implicitly become JSONC files, which aligns with the behavior of text editors. - if (source.path.isJSONCFile()) - JSON.parseTSConfig(&source, transpiler.log, allocator, false) catch return null - else - JSON.parse(&source, transpiler.log, allocator, false) catch return null + JSON.parseTSConfig(&source, transpiler.log, allocator, false) catch return null + else if (kind == .json) + JSON.parse(&source, transpiler.log, allocator, false) catch return null else if (kind == .toml) TOML.parse(&source, transpiler.log, allocator, false) catch return null else diff --git a/test/js/bun/import-attributes/import-attributes.test.ts b/test/js/bun/import-attributes/import-attributes.test.ts new file mode 100644 index 00000000000000..f912e9f069d954 --- /dev/null +++ b/test/js/bun/import-attributes/import-attributes.test.ts @@ -0,0 +1,305 @@ +import { bunExe, tempDirWithFiles } from "harness"; +import * as path from "path"; + +const loaders = ["js", "jsx", "ts", "tsx", "json", "jsonc", "toml", "text", "sqlite", "file"]; +const other_loaders_do_not_crash = ["webassembly", "does_not_exist"]; + +async function testBunRun(dir: string, loader: string | null, filename: string): Promise { + const cmd = [ + bunExe(), + "-e", + `import * as contents from './${filename}'${loader != null ? ` with {type: '${loader}'}` : ""}; console.log(JSON.stringify(contents));`, + ]; + const result = Bun.spawnSync({ + cmd: cmd, + cwd: dir, + }); + if (result.exitCode !== 0) { + if (result.stderr.toString().includes("panic")) { + console.error("cmd stderr"); + console.log(result.stderr.toString()); + console.error("cmd stdout"); + console.log(result.stdout.toString()); + console.error("cmd args"); + console.log(JSON.stringify(cmd)); + console.error("cmd cwd"); + console.log(dir); + throw new Error("panic"); + } + return "error"; + // return result.stderr.toString().match(/error: .+/)?.[0]; + } else { + return JSON.parse(result.stdout.toString()); + } +} +async function testBunRunAwaitImport(dir: string, loader: string | null, filename: string): Promise { + const cmd = [ + bunExe(), + "-e", + `console.log(JSON.stringify(await import('./${filename}'${loader != null ? `, {with: {type: '${loader}'}}` : ""})));`, + ]; + const result = Bun.spawnSync({ + cmd: cmd, + cwd: dir, + }); + console.timeEnd("testBunRunAwaitImport: " + dir + " " + loader); + if (result.exitCode !== 0) { + if (result.stderr.toString().includes("panic")) { + console.error("cmd stderr"); + console.log(result.stderr.toString()); + console.error("cmd stdout"); + console.log(result.stdout.toString()); + console.error("cmd args"); + console.log(JSON.stringify(cmd)); + console.error("cmd cwd"); + console.log(dir); + throw new Error("panic"); + } + return "error"; + // return result.stderr.toString().match(/error: .+/)?.[0]; + } else { + return JSON.parse(result.stdout.toString()); + } +} +async function testBunBuild(dir: string, loader: string | null, filename: string): Promise { + await Bun.write( + path.join(dir, "main_" + loader + ".js"), + `import * as contents from './${filename}'${loader != null ? ` with {type: '${loader}'${loader === "sqlite" ? ", embed: 'true'" : ""}}` : ""}; console.log(JSON.stringify(contents));`, + ); + const result = await Bun.build({ + entrypoints: [path.join(dir, "main_" + loader + ".js")], + throw: false, + target: "bun", + outdir: path.join(dir, "out"), + }); + if (result.success) { + const cmd = [bunExe(), "out/main_" + loader + ".js"]; + const result = Bun.spawnSync({ + cmd: cmd, + cwd: dir, + }); + if (result.exitCode !== 0) { + if (result.stderr.toString().includes("panic")) { + console.error("cmd stderr"); + console.log(result.stderr.toString()); + console.error("cmd stdout"); + console.log(result.stdout.toString()); + console.error("cmd args"); + console.log(JSON.stringify(cmd)); + console.error("cmd cwd"); + console.log(dir); + throw new Error("panic"); + } + return "error"; + } else { + return JSON.parse(result.stdout.toString()); + } + } else { + return "error"; + } +} +type Tests = Record< + string, + { + loader: string | null; + filename: string; + dir?: string; + } +>; +const default_tests = Object.fromEntries( + loaders.map(loader => [loader, { loader, filename: "no_extension" }]), +) as Tests; +async function compileAndTest(code: string, tests: Tests = default_tests): Promise> { + console.time("import {} from '';"); + const v1 = await compileAndTest_inner(code, tests, testBunRun); + console.timeEnd("import {} from '';"); + console.time("await import()"); + const v2 = await compileAndTest_inner(code, tests, testBunRunAwaitImport); + console.timeEnd("await import()"); + console.time("Bun.build()"); + const v3 = await compileAndTest_inner(code, tests, testBunBuild); + console.timeEnd("Bun.build()"); + if (!Bun.deepEquals(v1, v2) || !Bun.deepEquals(v2, v3)) { + console.log("==== regular import ====\n" + JSON.stringify(v1, null, 2) + "\n"); + console.log("==== await import ====\n" + JSON.stringify(v2, null, 2) + "\n"); + console.log("==== build ====\n" + JSON.stringify(v3, null, 2) + "\n"); + throw new Error("did not equal"); + } + return v1; +} +async function compileAndTest_inner( + code: string, + tests: Tests, + cb: (dir: string, loader: string | null, filename: string) => Promise, +): Promise> { + let res: Record = {}; + for (const [label, test] of Object.entries(tests)) { + test.dir = tempDirWithFiles("import-attributes", { + [test.filename]: code, + }); + res[label] = await cb(test.dir!, test.loader, test.filename); + } + if (Object.hasOwn(res, "text")) { + expect(res.text).toEqual({ default: code }); + delete res.text; + } + if (Object.hasOwn(res, "sqlite")) { + const sqlite_res = res.sqlite; + delete (sqlite_res as any).__esModule; + if (cb === testBunBuild) { + expect(sqlite_res).toStrictEqual({ + default: { filename: expect.any(String) }, + }); + expect((sqlite_res as any).default.filename).toStartWith(path.join(tests.sqlite!.dir!, "out")); + } else { + expect(sqlite_res).toStrictEqual({ + db: { filename: path.join(tests.sqlite!.dir!, tests.sqlite!.filename) }, + default: { filename: path.join(tests.sqlite!.dir!, tests.sqlite!.filename) }, + }); + } + delete res.sqlite; + } + if (Object.hasOwn(res, "file")) { + const file_res = res.file; + if (cb === testBunBuild) { + expect(file_res).toEqual({ + default: expect.any(String), + }); + } else { + delete (file_res as any).__esModule; + expect(file_res).toEqual({ + default: path.join(tests.file!.dir!, tests.file!.filename), + }); + } + delete res.file; + } + const res_flipped: Record = {}; + for (const [k, v] of Object.entries(res)) { + (res_flipped[JSON.stringify(v)] ??= [v, []])[1].push(k); + } + return Object.fromEntries(Object.entries(res_flipped).map(([k, [k2, v]]) => [v.join(","), k2])); +} + +test("javascript", async () => { + expect(await compileAndTest(`export const a = "demo";`)).toMatchInlineSnapshot(` +{ + "js,jsx,ts,tsx": { + "a": "demo", + }, + "json,jsonc,toml": "error", +} +`); +}); + +test("typescript", async () => { + expect(await compileAndTest(`export const a = (() => {}).toString().replace(/\\n/g, '');`)).toMatchInlineSnapshot(` +{ + "js,jsx,tsx,json,jsonc,toml": "error", + "ts": { + "a": "() => {}", + }, +} +`); +}); + +test("json", async () => { + expect(await compileAndTest(`{"key": "πŸ‘©β€πŸ‘§β€πŸ‘§value"}`)).toMatchInlineSnapshot(` +{ + "js,jsx,ts,tsx,toml": "error", + "json,jsonc": { + "default": { + "key": "πŸ‘©β€πŸ‘§β€πŸ‘§value", + }, + "key": "πŸ‘©β€πŸ‘§β€πŸ‘§value", + }, +} +`); +}); +test("jsonc", async () => { + expect( + await compileAndTest(`{ + "key": "πŸ‘©β€πŸ‘§β€πŸ‘§value", // my json + }`), + ).toMatchInlineSnapshot(` +{ + "js,jsx,ts,tsx,json,toml": "error", + "jsonc": { + "default": { + "key": "πŸ‘©β€πŸ‘§β€πŸ‘§value", + }, + "key": "πŸ‘©β€πŸ‘§β€πŸ‘§value", + }, +} +`); +}); +test("toml", async () => { + expect( + await compileAndTest(`[section] + key = "πŸ‘©β€πŸ‘§β€πŸ‘§value"`), + ).toMatchInlineSnapshot(` +{ + "js,jsx,ts,tsx,json,jsonc": "error", + "toml": { + "default": { + "section": { + "key": "πŸ‘©β€πŸ‘§β€πŸ‘§value", + }, + }, + "section": { + "key": "πŸ‘©β€πŸ‘§β€πŸ‘§value", + }, + }, +} +`); +}); + +test("tsconfig.json is assumed jsonc", async () => { + const tests: Tests = { + "tsconfig.json": { loader: null, filename: "tsconfig.json" }, + "myfile.json": { loader: null, filename: "myfile.json" }, + }; + expect( + await compileAndTest( + `{ + // jsonc file + "key": "πŸ‘©β€πŸ‘§β€πŸ‘§def", + }`, + tests, + ), + ).toMatchInlineSnapshot(` +{ + "myfile.json": "error", + "tsconfig.json": { + "default": { + "key": "πŸ‘©β€πŸ‘§β€πŸ‘§def", + }, + "key": "πŸ‘©β€πŸ‘§β€πŸ‘§def", + }, +} +`); + expect( + await compileAndTest( + `{ + "key": "πŸ‘©β€πŸ‘§β€πŸ‘§def" + }`, + tests, + ), + ).toMatchInlineSnapshot(` +{ + "tsconfig.json,myfile.json": { + "default": { + "key": "πŸ‘©β€πŸ‘§β€πŸ‘§def", + }, + "key": "πŸ‘©β€πŸ‘§β€πŸ‘§def", + }, +} +`); +}); + +describe("other loaders do not crash", () => { + for (const skipped_loader of other_loaders_do_not_crash) { + test(skipped_loader, async () => { + await compileAndTest(`export const a = "demo";`); + }); + } +});