diff --git a/src/cli/bunx_command.zig b/src/cli/bunx_command.zig index 5eb50b4d20f444..37badf8970cfbb 100644 --- a/src/cli/bunx_command.zig +++ b/src/cli/bunx_command.zig @@ -271,7 +271,6 @@ pub const BunxCommand = struct { defer requests_buf.deinit(ctx.allocator); const update_requests = bun.PackageManager.UpdateRequest.parse( ctx.allocator, - null, ctx.log, &.{package_name}, &requests_buf, diff --git a/src/install/bun.lock.zig b/src/install/bun.lock.zig index b34d21e0097bd8..5ca4981a93dea4 100644 --- a/src/install/bun.lock.zig +++ b/src/install/bun.lock.zig @@ -1044,7 +1044,6 @@ pub fn parseIntoBinaryLockfile( root: JSON.Expr, source: *const logger.Source, log: *logger.Log, - manager: ?*PackageManager, ) ParseError!void { lockfile.initEmpty(allocator); @@ -1165,7 +1164,6 @@ pub fn parseIntoBinaryLockfile( version_sliced.slice, &version_sliced, log, - manager, ) orelse { try log.addError(source, value.loc, "Invalid override version"); return error.InvalidOverridesObject; @@ -1747,7 +1745,6 @@ fn parseAppendDependencies( version_sliced.slice, &version_sliced, log, - null, ) orelse { try log.addError(source, value.loc, "Invalid dependency version"); return error.InvalidDependencyVersion; diff --git a/src/install/dependency.zig b/src/install/dependency.zig index 4ded9d5682eb59..6673674604fd7b 100644 --- a/src/install/dependency.zig +++ b/src/install/dependency.zig @@ -78,11 +78,11 @@ pub fn count(this: *const Dependency, buf: []const u8, comptime StringBuilder: t this.countWithDifferentBuffers(buf, buf, StringBuilder, builder); } -pub fn clone(this: *const Dependency, package_manager: *PackageManager, buf: []const u8, comptime StringBuilder: type, builder: StringBuilder) !Dependency { - return this.cloneWithDifferentBuffers(package_manager, buf, buf, StringBuilder, builder); +pub fn clone(this: *const Dependency, buf: []const u8, comptime StringBuilder: type, builder: StringBuilder) !Dependency { + return this.cloneWithDifferentBuffers(buf, buf, StringBuilder, builder); } -pub fn cloneWithDifferentBuffers(this: *const Dependency, package_manager: *PackageManager, name_buf: []const u8, version_buf: []const u8, comptime StringBuilder: type, builder: StringBuilder) !Dependency { +pub fn cloneWithDifferentBuffers(this: *const Dependency, name_buf: []const u8, version_buf: []const u8, comptime StringBuilder: type, builder: StringBuilder) !Dependency { const out_slice = builder.lockfile.buffers.string_bytes.items; const new_literal = builder.append(String, this.version.literal.slice(version_buf)); const sliced = new_literal.sliced(out_slice); @@ -99,7 +99,6 @@ pub fn cloneWithDifferentBuffers(this: *const Dependency, package_manager: *Pack this.version.tag, &sliced, null, - package_manager, ) orelse Dependency.Version{}, .behavior = this.behavior, }; @@ -116,7 +115,6 @@ pub const Context = struct { allocator: std.mem.Allocator, log: *logger.Log, buffer: []const u8, - package_manager: ?*PackageManager, }; /// Get the name of the package as it should appear in a remote registry. @@ -430,7 +428,6 @@ pub const Version = struct { tag, sliced, ctx.log, - ctx.package_manager, ) orelse Dependency.Version.zeroed; } @@ -856,10 +853,9 @@ pub inline fn parse( dependency: string, sliced: *const SlicedString, log: ?*logger.Log, - manager: ?*PackageManager, ) ?Version { const dep = std.mem.trimLeft(u8, dependency, " \t\n\r"); - return parseWithTag(allocator, alias, alias_hash, dep, Version.Tag.infer(dep), sliced, log, manager); + return parseWithTag(allocator, alias, alias_hash, dep, Version.Tag.infer(dep), sliced, log); } pub fn parseWithOptionalTag( @@ -870,7 +866,6 @@ pub fn parseWithOptionalTag( tag: ?Dependency.Version.Tag, sliced: *const SlicedString, log: ?*logger.Log, - package_manager: ?*PackageManager, ) ?Version { const dep = std.mem.trimLeft(u8, dependency, " \t\n\r"); return parseWithTag( @@ -881,7 +876,6 @@ pub fn parseWithOptionalTag( tag orelse Version.Tag.infer(dep), sliced, log, - package_manager, ); } @@ -893,7 +887,6 @@ pub fn parseWithTag( tag: Dependency.Version.Tag, sliced: *const SlicedString, log_: ?*logger.Log, - package_manager: ?*PackageManager, ) ?Version { switch (tag) { .npm => { @@ -932,7 +925,6 @@ pub fn parseWithTag( const version = Semver.Query.parse( allocator, - input, sliced.sub(input), ) catch |err| { switch (err) { @@ -952,16 +944,6 @@ pub fn parseWithTag( .tag = .npm, }; - if (is_alias) { - if (package_manager) |pm| { - pm.known_npm_aliases.put( - allocator, - alias_hash.?, - result, - ) catch unreachable; - } - } - return result; }, .dist_tag => { @@ -1284,7 +1266,7 @@ pub fn fromJS(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JS var log = logger.Log.init(allocator); const sliced = SlicedString.init(buf, name); - const dep: Version = Dependency.parse(allocator, SlicedString.init(buf, alias).value(), null, buf, &sliced, &log, null) orelse { + const dep: Version = Dependency.parse(allocator, SlicedString.init(buf, alias).value(), null, buf, &sliced, &log) orelse { if (log.msgs.items.len > 0) { return globalThis.throwValue(log.toJS(globalThis, bun.default_allocator, "Failed to parse dependency")); } diff --git a/src/install/install.zig b/src/install/install.zig index 400b5a0c124766..18d7600b8277e0 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -2689,7 +2689,6 @@ pub const PackageManager = struct { task_queue: TaskDependencyQueue = .{}, manifests: PackageManifestMap = .{}, - folders: FolderResolution.Map = .{}, git_repositories: RepositoryMap = .{}, network_dedupe_map: NetworkTask.DedupeMap = NetworkTask.DedupeMap.init(bun.default_allocator), @@ -2737,9 +2736,6 @@ pub const PackageManager = struct { peer_dependencies: std.fifo.LinearFifo(DependencyID, .Dynamic) = std.fifo.LinearFifo(DependencyID, .Dynamic).init(default_allocator), - // name hash from alias package name -> aliased package dependency version info - known_npm_aliases: NpmAliasMap = .{}, - event_loop: JSC.AnyEventLoop, // During `installPackages` we learn exactly what dependencies from --trust @@ -3278,7 +3274,7 @@ pub const PackageManager = struct { builder.allocate() catch |err| return .{ .failure = err }; - const dep = dummy.cloneWithDifferentBuffers(this, name, version_buf, @TypeOf(&builder), &builder) catch unreachable; + const dep = dummy.cloneWithDifferentBuffers(name, version_buf, @TypeOf(&builder), &builder) catch unreachable; builder.clamp(); const index = this.lockfile.buffers.dependencies.items.len; this.lockfile.buffers.dependencies.append(this.allocator, dep) catch unreachable; @@ -4281,7 +4277,7 @@ pub const PackageManager = struct { }, }, }; - switch (FolderResolution.getOrPut(.{ .cache_folder = npm_package_path }, dependency, ".", this)) { + switch (FolderResolution.getOrPut(.{ .cache_folder = npm_package_path }, dependency, ".", this, this.lockfile)) { .new_package_id => |id| { this.enqueueDependencyList(this.lockfile.packages.items(.dependencies)[id]); return id; @@ -4321,7 +4317,6 @@ pub const PackageManager = struct { name_hash: PackageNameHash, name: String, dependency: *const Dependency, - version: Dependency.Version, dependency_id: DependencyID, behavior: Behavior, manifest: *const Npm.PackageManifest, @@ -4329,17 +4324,9 @@ pub const PackageManager = struct { install_peer: bool, comptime successFn: SuccessFn, ) !?ResolvedPackageResult { - const should_update = this.to_update and - // If updating, only update packages in the current workspace - this.lockfile.isRootDependency(this, dependency_id) and - // no need to do a look up if update requests are empty (`bun update` with no args) - (this.update_requests.len == 0 or - this.updating_packages.contains(dependency.name.slice(this.lockfile.buffers.string_bytes.items))); - // Was this package already allocated? Let's reuse the existing one. if (this.lockfile.getPackageID( name_hash, - if (should_update) null else version, &.{ .tag = .npm, .value = .{ @@ -4361,7 +4348,6 @@ pub const PackageManager = struct { // appendPackage sets the PackageID on the package const package = try this.lockfile.appendPackage(try Lockfile.Package.fromNPM( - this, this.allocator, this.lockfile, this.log, @@ -4772,7 +4758,6 @@ pub const PackageManager = struct { name_hash, name, dependency, - version, dependency_id, behavior, manifest, @@ -4802,7 +4787,7 @@ pub const PackageManager = struct { // .auto, // ); }; - break :res FolderResolution.getOrPut(.{ .relative = .folder }, version, folder_path_abs, this); + break :res FolderResolution.getOrPut(.{ .relative = .folder }, version, folder_path_abs, this, this.lockfile); } // transitive folder dependencies do not have their dependencies resolved @@ -4865,7 +4850,7 @@ pub const PackageManager = struct { break :blk Path.joinAbsStringBuf(FileSystem.instance.top_level_dir, &buf2, &[_]string{workspace_path}, .auto); }; - const res = FolderResolution.getOrPut(.{ .relative = .workspace }, version, workspace_path_u8, this); + const res = FolderResolution.getOrPut(.{ .relative = .workspace }, version, workspace_path_u8, this, this.lockfile); switch (res) { .err => |err| return err, @@ -4881,7 +4866,7 @@ pub const PackageManager = struct { } }, .symlink => { - const res = FolderResolution.getOrPut(.{ .global = try this.globalLinkDirPath() }, version, this.lockfile.str(&version.value.symlink), this); + const res = FolderResolution.getOrPut(.{ .global = try this.globalLinkDirPath() }, version, this.lockfile.str(&version.value.symlink), this, this.lockfile); switch (res) { .err => |err| return err, @@ -5193,29 +5178,6 @@ pub const PackageManager = struct { }; const version = version: { - if (dependency.version.tag == .npm) { - if (this.known_npm_aliases.get(name_hash)) |aliased| { - const group = dependency.version.value.npm.version; - const buf = this.lockfile.buffers.string_bytes.items; - var curr_list: ?*const Semver.Query.List = &aliased.value.npm.version.head; - while (curr_list) |queries| { - var curr: ?*const Semver.Query = &queries.head; - while (curr) |query| { - if (group.satisfies(query.range.left.version, buf, buf) or group.satisfies(query.range.right.version, buf, buf)) { - name = aliased.value.npm.name; - name_hash = String.Builder.stringHash(this.lockfile.str(&name)); - break :version aliased; - } - curr = query.next; - } - curr_list = queries.next; - } - - // fallthrough. a package that matches the name of an alias but does not match - // the version should be enqueued as a normal npm dependency, overrides allowed - } - } - // allow overriding all dependencies unless the dependency is coming directly from an alias, "npm:" if (dependency.version.tag != .npm or !dependency.version.value.npm.is_alias and this.lockfile.hasOverrides()) { if (this.lockfile.overrides.get(name_hash)) |new| { @@ -5417,7 +5379,6 @@ pub const PackageManager = struct { name_hash, name, dependency, - version, id, dependency.behavior, &loaded_manifest.?, @@ -5488,7 +5449,7 @@ pub const PackageManager = struct { }; // First: see if we already loaded the git package in-memory - if (this.lockfile.getPackageID(name_hash, null, &res)) |pkg_id| { + if (this.lockfile.getPackageID(name_hash, &res)) |pkg_id| { successFn(this, id, pkg_id); return; } @@ -5577,7 +5538,7 @@ pub const PackageManager = struct { }; // First: see if we already loaded the github package in-memory - if (this.lockfile.getPackageID(name_hash, null, &res)) |pkg_id| { + if (this.lockfile.getPackageID(name_hash, &res)) |pkg_id| { successFn(this, id, pkg_id); return; } @@ -5762,7 +5723,7 @@ pub const PackageManager = struct { }; // First: see if we already loaded the tarball package in-memory - if (this.lockfile.getPackageID(name_hash, null, &res)) |pkg_id| { + if (this.lockfile.getPackageID(name_hash, &res)) |pkg_id| { successFn(this, id, pkg_id); return; } @@ -10338,7 +10299,7 @@ pub const PackageManager = struct { var array = Array{}; - const update_requests = parseWithError(allocator, null, &log, all_positionals.items, &array, .add, false) catch { + const update_requests = parseWithError(allocator, &log, all_positionals.items, &array, .add, false) catch { return globalThis.throwValue(log.toJS(globalThis, bun.default_allocator, "Failed to parse dependencies")); }; if (update_requests.len == 0) return .undefined; @@ -10360,18 +10321,16 @@ pub const PackageManager = struct { pub fn parse( allocator: std.mem.Allocator, - pm: ?*PackageManager, log: *logger.Log, positionals: []const string, update_requests: *Array, subcommand: Subcommand, ) []UpdateRequest { - return parseWithError(allocator, pm, log, positionals, update_requests, subcommand, true) catch Global.crash(); + return parseWithError(allocator, log, positionals, update_requests, subcommand, true) catch Global.crash(); } fn parseWithError( allocator: std.mem.Allocator, - pm: ?*PackageManager, log: *logger.Log, positionals: []const string, update_requests: *Array, @@ -10422,7 +10381,6 @@ pub const PackageManager = struct { null, &SlicedString.init(input, value), log, - pm, ) orelse { if (fatal) { Output.errGeneric("unrecognised dependency format: {s}", .{ @@ -10445,7 +10403,6 @@ pub const PackageManager = struct { null, &SlicedString.init(input, input), log, - pm, )) |ver| { alias = null; version = ver; @@ -10700,7 +10657,7 @@ pub const PackageManager = struct { const updates: []UpdateRequest = if (manager.subcommand == .@"patch-commit" or manager.subcommand == .patch) &[_]UpdateRequest{} else - UpdateRequest.parse(ctx.allocator, manager, ctx.log, manager.options.positionals[1..], &update_requests, manager.subcommand); + UpdateRequest.parse(ctx.allocator, ctx.log, manager.options.positionals[1..], &update_requests, manager.subcommand); try manager.updatePackageJSONAndInstallWithManagerWithUpdates( ctx, updates, @@ -14523,8 +14480,17 @@ pub const PackageManager = struct { &resolver, Features.main, ); - const mapping = try manager.lockfile.allocator.alloc(PackageID, maybe_root.dependencies.len); - @memset(mapping, invalid_package_id); + + try lockfile.packages.append(lockfile.allocator, maybe_root); + + var dep_map = try std.ArrayList(DependencyID).initCapacity(manager.lockfile.allocator, maybe_root.dependencies.len); + dep_map.appendNTimesAssumeCapacity(invalid_dependency_id, maybe_root.dependencies.len); + + // only used for workspaces + var pkg_map = try std.ArrayList(PackageID).initCapacity(manager.lockfile.allocator, manager.lockfile.workspace_paths.count() + 1); + + // root is always 0 + pkg_map.appendAssumeCapacity(0); manager.summary = try Package.Diff.generate( manager, @@ -14532,43 +14498,59 @@ pub const PackageManager = struct { manager.log, manager.lockfile, &lockfile, - &root, - &maybe_root, + 0, + 0, if (manager.to_update) manager.update_requests else null, - mapping, + &dep_map, + &pkg_map, ); - had_any_diffs = manager.summary.hasDiffs(); + had_any_diffs = manager.summary.hasDiffs() or manager.summary.satisfied_versions > 0; if (had_any_diffs) { var builder_ = manager.lockfile.stringBuilder(); // ensure we use one pointer to reference it instead of creating new ones and potentially aliasing var builder = &builder_; // If you changed packages, we will copy over the new package from the new lockfile - const new_dependencies = maybe_root.dependencies.get(lockfile.buffers.dependencies.items); - for (new_dependencies) |new_dep| { - new_dep.count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); + const new_pkgs = lockfile.packages.slice(); + const new_pkg_dependencies = new_pkgs.items(.dependencies); + const new_pkg_scripts = new_pkgs.items(.scripts); + const new_pkg_names: []String = new_pkgs.items(.name); + const new_pkg_name_hashes = new_pkgs.items(.name_hash); + + var deps_off: DependencyID = @truncate(manager.lockfile.buffers.dependencies.items.len); + for (new_pkg_dependencies, 0..) |new_pkg_deps, _new_pkg_id| { + const new_pkg_id: PackageID = @truncate(_new_pkg_id); + + for (new_pkg_deps.get(lockfile.buffers.dependencies.items)) |new_pkg_dep| { + new_pkg_dep.count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); + } + + new_pkg_scripts[new_pkg_id].count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); + builder.count(new_pkg_names[new_pkg_id].slice(lockfile.buffers.string_bytes.items)); + + const existing_pkg_id = pkg_map.items[new_pkg_id]; + + manager.lockfile.packages.items(.dependencies)[existing_pkg_id] = .{ .off = deps_off, .len = new_pkg_deps.len }; + manager.lockfile.packages.items(.resolutions)[existing_pkg_id] = .{ .off = deps_off, .len = new_pkg_deps.len }; + + deps_off += new_pkg_deps.len; } + const new_deps_len = deps_off - manager.lockfile.buffers.dependencies.items.len; + deps_off = @truncate(manager.lockfile.buffers.dependencies.items.len); + for (lockfile.workspace_paths.values()) |path| builder.count(path.slice(lockfile.buffers.string_bytes.items)); for (lockfile.workspace_versions.values()) |version| version.count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); for (lockfile.patched_dependencies.values()) |patch_dep| builder.count(patch_dep.path.slice(lockfile.buffers.string_bytes.items)); lockfile.overrides.count(&lockfile, builder); - maybe_root.scripts.count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); - const off = @as(u32, @truncate(manager.lockfile.buffers.dependencies.items.len)); - const len = @as(u32, @truncate(new_dependencies.len)); var packages = manager.lockfile.packages.slice(); - var dep_lists = packages.items(.dependencies); - var resolution_lists = packages.items(.resolutions); - const old_resolutions_list = resolution_lists[0]; - dep_lists[0] = .{ .off = off, .len = len }; - resolution_lists[0] = .{ .off = off, .len = len }; try builder.allocate(); - const all_name_hashes: []PackageNameHash = brk: { + const all_override_name_hashes: []PackageNameHash = brk: { if (!manager.summary.overrides_changed) break :brk &.{}; const hashes_len = manager.lockfile.overrides.map.entries.len + lockfile.overrides.map.entries.len; if (hashes_len == 0) break :brk &.{}; @@ -14587,41 +14569,64 @@ pub const PackageManager = struct { break :brk all_name_hashes; }; - manager.lockfile.overrides = try lockfile.overrides.clone(manager, &lockfile, manager.lockfile, builder); + manager.lockfile.overrides = try lockfile.overrides.clone(&lockfile, manager.lockfile, builder); manager.lockfile.trusted_dependencies = if (lockfile.trusted_dependencies) |trusted_dependencies| try trusted_dependencies.clone(manager.lockfile.allocator) else null; - try manager.lockfile.buffers.dependencies.ensureUnusedCapacity(manager.lockfile.allocator, len); - try manager.lockfile.buffers.resolutions.ensureUnusedCapacity(manager.lockfile.allocator, len); - - const old_resolutions = old_resolutions_list.get(manager.lockfile.buffers.resolutions.items); + { + // clone deps + try manager.lockfile.buffers.dependencies.ensureUnusedCapacity(manager.lockfile.allocator, new_deps_len); + try manager.lockfile.buffers.resolutions.ensureUnusedCapacity(manager.lockfile.allocator, new_deps_len); + + var dependencies = manager.lockfile.buffers.dependencies.items.ptr[deps_off .. deps_off + new_deps_len]; + var resolutions = manager.lockfile.buffers.resolutions.items.ptr[deps_off .. deps_off + new_deps_len]; + + // It is too easy to accidentally undefined memory + @memset(resolutions, invalid_package_id); + @memset(dependencies, Dependency{}); + + manager.lockfile.buffers.dependencies.items = manager.lockfile.buffers.dependencies.items.ptr[0 .. deps_off + new_deps_len]; + manager.lockfile.buffers.resolutions.items = manager.lockfile.buffers.resolutions.items.ptr[0 .. deps_off + new_deps_len]; + + for (new_pkg_dependencies, 0..) |new_pkg_deps, _new_pkg_id| { + const new_pkg_id: PackageID = @truncate(_new_pkg_id); + const existing_pkg_id = pkg_map.items[new_pkg_id]; + + for (new_pkg_deps.begin()..new_pkg_deps.end()) |_new_pkg_dep_id| { + const new_pkg_dep_id: DependencyID = @truncate(_new_pkg_dep_id); + dependencies[new_pkg_dep_id] = try lockfile.buffers.dependencies.items[new_pkg_dep_id].clone( + lockfile.buffers.string_bytes.items, + *Lockfile.StringBuilder, + builder, + ); - var dependencies = manager.lockfile.buffers.dependencies.items.ptr[off .. off + len]; - var resolutions = manager.lockfile.buffers.resolutions.items.ptr[off .. off + len]; + if (new_pkg_dep_id < dep_map.items.len) { + const mapped_dep_id = dep_map.items[new_pkg_dep_id]; + if (mapped_dep_id != invalid_dependency_id) { + resolutions[new_pkg_dep_id] = manager.lockfile.buffers.resolutions.items[mapped_dep_id]; + } + } + } - // It is too easy to accidentally undefined memory - @memset(resolutions, invalid_package_id); - @memset(dependencies, Dependency{}); + packages.items(.scripts)[existing_pkg_id] = new_pkg_scripts[new_pkg_id].clone( + lockfile.buffers.string_bytes.items, + *Lockfile.StringBuilder, + builder, + ); - manager.lockfile.buffers.dependencies.items = manager.lockfile.buffers.dependencies.items.ptr[0 .. off + len]; - manager.lockfile.buffers.resolutions.items = manager.lockfile.buffers.resolutions.items.ptr[0 .. off + len]; + packages.items(.name)[existing_pkg_id] = builder.appendWithHash( + String, + new_pkg_names[new_pkg_id].slice(lockfile.buffers.string_bytes.items), + new_pkg_name_hashes[new_pkg_id], + ); - for (new_dependencies, 0..) |new_dep, i| { - dependencies[i] = try new_dep.clone(manager, lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); - if (mapping[i] != invalid_package_id) { - resolutions[i] = old_resolutions[mapping[i]]; + packages.items(.name_hash)[existing_pkg_id] = new_pkg_name_hashes[new_pkg_id]; } } - manager.lockfile.packages.items(.scripts)[0] = maybe_root.scripts.clone( - lockfile.buffers.string_bytes.items, - *Lockfile.StringBuilder, - builder, - ); - // Update workspace paths try manager.lockfile.workspace_paths.ensureTotalCapacity(manager.lockfile.allocator, lockfile.workspace_paths.entries.len); { @@ -14692,9 +14697,9 @@ pub const PackageManager = struct { builder.clamp(); - if (manager.summary.overrides_changed and all_name_hashes.len > 0) { + if (manager.summary.overrides_changed and all_override_name_hashes.len > 0) { for (manager.lockfile.buffers.dependencies.items, 0..) |*dependency, dependency_i| { - if (std.mem.indexOfScalar(PackageNameHash, all_name_hashes, dependency.name_hash)) |_| { + if (std.mem.indexOfScalar(PackageNameHash, all_override_name_hashes, dependency.name_hash)) |_| { manager.lockfile.buffers.resolutions.items[dependency_i] = invalid_package_id; try manager.enqueueDependencyWithMain( @truncate(dependency_i), @@ -14708,15 +14713,15 @@ pub const PackageManager = struct { // Split this into two passes because the below may allocate memory or invalidate pointers if (manager.summary.add > 0 or manager.summary.update > 0) { - const changes = @as(PackageID, @truncate(mapping.len)); - var counter_i: PackageID = 0; + const changes: DependencyID = @truncate(dep_map.items.len); + var counter_i: DependencyID = 0; _ = manager.getCacheDirectory(); _ = manager.getTemporaryDirectory(); while (counter_i < changes) : (counter_i += 1) { - if (mapping[counter_i] == invalid_package_id) { - const dependency_i = counter_i + off; + if (counter_i < dep_map.items.len and dep_map.items[counter_i] == invalid_dependency_id) { + const dependency_i = counter_i + deps_off; const dependency = manager.lockfile.buffers.dependencies.items[dependency_i]; try manager.enqueueDependencyWithMain( dependency_i, diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index 969a31c1dd136b..e159d331516df4 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -157,6 +157,8 @@ trusted_dependencies: ?TrustedDependenciesSet = null, patched_dependencies: PatchedDependenciesMap = .{}, overrides: OverrideMap = .{}, +folder_resolutions: FolderResolution.Map = .{}, + const Stream = std.io.FixedBufferStream([]u8); pub const default_filename = "bun.lockb"; @@ -342,7 +344,7 @@ pub fn loadFromDir( }; }; - TextLockfile.parseIntoBinaryLockfile(this, allocator, json, &source, log, manager) catch |err| { + TextLockfile.parseIntoBinaryLockfile(this, allocator, json, &source, log) catch |err| { switch (err) { error.OutOfMemory => bun.outOfMemory(), else => { @@ -397,7 +399,7 @@ pub fn loadFromDir( Output.panic("failed to print valid json from binary lockfile: {s}", .{@errorName(err)}); }; - TextLockfile.parseIntoBinaryLockfile(this, allocator, json, &source, log, manager) catch |err| { + TextLockfile.parseIntoBinaryLockfile(this, allocator, json, &source, log) catch |err| { Output.panic("failed to parse text lockfile converted from binary lockfile: {s}", .{@errorName(err)}); }; @@ -420,6 +422,7 @@ pub fn loadFromBytes(this: *Lockfile, pm: ?*PackageManager, buf: []u8, allocator this.workspace_versions = .{}; this.overrides = .{}; this.patched_dependencies = .{}; + this.folder_resolutions = .{}; const load_result = Lockfile.Serializer.load(this, &stream, allocator, log, pm) catch |err| { return LoadResult{ .err = .{ .step = .parse_file, .value = err, .lockfile_path = "bun.lockb", .format = .binary } }; @@ -679,11 +682,13 @@ pub const Tree = struct { pub fn Builder(comptime method: BuilderMethod) type { return struct { allocator: Allocator, - name_hashes: []const PackageNameHash, + pkg_names: []const String, + pkg_metas: []const Package.Meta, + pkg_resolutions: []const Resolution, + pkg_resolution_lists: []const Lockfile.DependencyIDSlice, list: bun.MultiArrayList(Entry) = .{}, resolutions: []const PackageID, dependencies: []const Dependency, - resolution_lists: []const Lockfile.DependencyIDSlice, queue: Lockfile.TreeFiller, log: *logger.Log, lockfile: *const Lockfile, @@ -775,7 +780,7 @@ pub const Tree = struct { root_dep_id => 0, else => |id| builder.resolutions[id], }; - const resolution_list = builder.resolution_lists[parent_pkg_id]; + const resolution_list = builder.pkg_resolution_lists[parent_pkg_id]; if (resolution_list.len == 0) return; @@ -792,13 +797,7 @@ pub const Tree = struct { const trees = list_slice.items(.tree); const dependency_lists = list_slice.items(.dependencies); const next: *Tree = &trees[builder.list.len - 1]; - const name_hashes: []const PackageNameHash = builder.name_hashes; - const max_package_id = @as(PackageID, @truncate(name_hashes.len)); - - const pkgs = builder.lockfile.packages.slice(); - const pkg_resolutions = pkgs.items(.resolution); - const pkg_metas = pkgs.items(.meta); - const pkg_names = pkgs.items(.name); + const max_package_id = @as(PackageID, @truncate(builder.lockfile.packages.len)); builder.sort_buf.clearRetainingCapacity(); try builder.sort_buf.ensureUnusedCapacity(builder.allocator, resolution_list.len); @@ -843,15 +842,15 @@ pub const Tree = struct { if (comptime method == .filter) { if (builder.lockfile.isResolvedDependencyDisabled( dep_id, - switch (pkg_resolutions[parent_pkg_id].tag) { + switch (builder.pkg_resolutions[parent_pkg_id].tag) { .root, .workspace, .folder => builder.manager.options.local_package_features, else => builder.manager.options.remote_package_features, }, - &pkg_metas[pkg_id], + &builder.pkg_metas[pkg_id], )) { if (log_level.isVerbose()) { - const meta = &pkg_metas[pkg_id]; - const name = builder.lockfile.str(&pkg_names[pkg_id]); + const meta = &builder.pkg_metas[pkg_id]; + const name = builder.lockfile.str(&builder.pkg_names[pkg_id]); if (!meta.os.isMatch() and !meta.arch.isMatch()) { Output.prettyErrorln("Skip installing '{s}' cpu & os mismatch", .{name}); } else if (!meta.os.isMatch()) { @@ -882,10 +881,10 @@ pub const Tree = struct { const res_id = builder.resolutions[dep_id]; const pattern, const path_or_name = switch (workspace_filter) { - .name => |pattern| .{ pattern, pkg_names[res_id].slice(builder.buf()) }, + .name => |pattern| .{ pattern, builder.pkg_names[res_id].slice(builder.buf()) }, .path => |pattern| path: { - const res = &pkg_resolutions[res_id]; + const res = &builder.pkg_resolutions[res_id]; if (res.tag != .workspace) { break :dont_skip; } @@ -939,14 +938,14 @@ pub const Tree = struct { } const hoisted: HoistDependencyResult = hoisted: { - const dependency = builder.dependencies[dep_id]; + const dep = builder.dependencies[dep_id]; // don't hoist if it's a folder dependency or a bundled dependency. - if (dependency.behavior.isBundled()) { + if (dep.behavior.isBundled()) { break :hoisted .{ .placement = .{ .id = next.id, .bundled = true } }; } - if (pkg_resolutions[pkg_id].tag == .folder) { + if (builder.pkg_resolutions[pkg_id].tag == .folder) { break :hoisted .{ .placement = .{ .id = next.id } }; } @@ -954,7 +953,8 @@ pub const Tree = struct { true, hoist_root_id, pkg_id, - &dependency, + dep_id, + &dep, dependency_lists, trees, method, @@ -965,9 +965,9 @@ pub const Tree = struct { switch (hoisted) { .dependency_loop, .hoisted => continue, .placement => |dest| { - dependency_lists[dest.id].append(builder.allocator, dep_id) catch bun.outOfMemory(); + try dependency_lists[dest.id].append(builder.allocator, dep_id); trees[dest.id].dependencies.len += 1; - if (builder.resolution_lists[pkg_id].len > 0) { + if (builder.pkg_resolution_lists[pkg_id].len > 0) { try builder.queue.writeItem(.{ .tree_id = dest.id, .dependency_id = dep_id, @@ -994,8 +994,9 @@ pub const Tree = struct { this: *Tree, comptime as_defined: bool, hoist_root_id: Id, - package_id: PackageID, - dependency: *const Dependency, + pkg_id: PackageID, + target_dep_id: DependencyID, + target_dep: *const Dependency, dependency_lists: []Lockfile.DependencyIDList, trees: []Tree, comptime method: BuilderMethod, @@ -1005,15 +1006,15 @@ pub const Tree = struct { for (0..this_dependencies.len) |i| { const dep_id = this_dependencies[i]; const dep = builder.dependencies[dep_id]; - if (dep.name_hash != dependency.name_hash) continue; + if (dep.name_hash != target_dep.name_hash) continue; - if (builder.resolutions[dep_id] == package_id) { + if (builder.resolutions[dep_id] == pkg_id) { // this dependency is the same package as the other, hoist return .hoisted; // 1 } if (comptime as_defined) { - if (dep.behavior.isDev() != dependency.behavior.isDev()) { + if (dep.behavior.isDev() != target_dep.behavior.isDev()) { // will only happen in workspaces and root package because // dev dependencies won't be included in other types of // dependencies @@ -1024,15 +1025,16 @@ pub const Tree = struct { // now we either keep the dependency at this place in the tree, // or hoist if peer version allows it - if (dependency.behavior.isPeer()) { - if (dependency.version.tag == .npm) { - const resolution: Resolution = builder.lockfile.packages.items(.resolution)[builder.resolutions[dep_id]]; - const version = dependency.version.value.npm.version; - if (resolution.tag == .npm and version.satisfies(resolution.value.npm.version, builder.buf(), builder.buf())) { - return .hoisted; // 1 - } + const target_res = builder.pkg_resolutions[builder.resolutions[target_dep_id]]; + const existing_res = builder.pkg_resolutions[builder.resolutions[dep_id]]; + + if (target_res.tag == .npm and existing_res.tag == .npm) { + if (target_dep.version.tag == .npm and target_dep.version.value.npm.version.satisfies(existing_res.value.npm.version, builder.buf(), builder.buf())) { + return .hoisted; // 1 } + } + if (target_dep.behavior.isPeer()) { // Root dependencies are manually chosen by the user. Allow them // to hoist other peers even if they don't satisfy the version if (builder.lockfile.isWorkspaceRootDependency(dep_id)) { @@ -1043,12 +1045,12 @@ pub const Tree = struct { if (as_defined and !dep.behavior.isPeer()) { builder.maybeReportError("Package \"{}@{}\" has a dependency loop\n Resolution: \"{}@{}\"\n Dependency: \"{}@{}\"", .{ - builder.packageName(package_id), - builder.packageVersion(package_id), + builder.packageName(pkg_id), + builder.packageVersion(pkg_id), builder.packageName(builder.resolutions[dep_id]), builder.packageVersion(builder.resolutions[dep_id]), - dependency.name.fmt(builder.buf()), - dependency.version.literal.fmt(builder.buf()), + target_dep.name.fmt(builder.buf()), + target_dep.version.literal.fmt(builder.buf()), }); return error.DependencyLoop; } @@ -1061,8 +1063,9 @@ pub const Tree = struct { const id = trees[this.parent].hoistDependency( false, hoist_root_id, - package_id, - dependency, + pkg_id, + target_dep_id, + target_dep, dependency_lists, trees, method, @@ -1211,7 +1214,6 @@ fn preprocessUpdateRequests(old: *Lockfile, manager: *PackageManager, updates: [ sliced.slice, &sliced, null, - manager, ) orelse Dependency.Version{}; } } @@ -1337,7 +1339,7 @@ pub fn cleanWithLogger( var builder = new.stringBuilder(); old.overrides.count(old, &builder); try builder.allocate(); - new.overrides = try old.overrides.clone(manager, old, new, &builder); + new.overrides = try old.overrides.clone(old, new, &builder); } // Step 1. Recreate the lockfile with only the packages that are still alive @@ -1360,7 +1362,7 @@ pub fn cleanWithLogger( }; // try clone_queue.ensureUnusedCapacity(root.dependencies.len); - _ = try root.clone(manager, old, new, package_id_mapping, &cloner); + _ = try root.clone(old, new, package_id_mapping, &cloner); // Clone workspace_paths and workspace_versions at the end. if (old.workspace_paths.count() > 0 or old.workspace_versions.count() > 0) { @@ -1542,7 +1544,6 @@ const Cloner = struct { const old_package = this.old.packages.get(to_clone.old_resolution); this.lockfile.buffers.resolutions.items[to_clone.resolve_id] = try old_package.clone( - this.manager, this.old, this.lockfile, this.mapping, @@ -1592,14 +1593,16 @@ pub fn hoist( workspace_filters: if (method == .filter) []const WorkspaceFilter else void, ) Tree.SubtreeError!void { const allocator = lockfile.allocator; - var slice = lockfile.packages.slice(); + var pkgs = lockfile.packages.slice(); var path_buf: bun.PathBuffer = undefined; var builder = Tree.Builder(method){ - .name_hashes = slice.items(.name_hash), + .pkg_names = pkgs.items(.name), + .pkg_metas = pkgs.items(.meta), + .pkg_resolutions = pkgs.items(.resolution), .queue = TreeFiller.init(allocator), - .resolution_lists = slice.items(.resolutions), + .pkg_resolution_lists = pkgs.items(.resolutions), .resolutions = lockfile.buffers.resolutions.items, .allocator = allocator, .dependencies = lockfile.buffers.dependencies.items, @@ -2591,23 +2594,17 @@ pub fn initEmpty(this: *Lockfile, allocator: Allocator) void { .workspace_versions = .{}, .overrides = .{}, .meta_hash = zero_hash, + .folder_resolutions = .{}, }; } pub fn getPackageID( this: *Lockfile, name_hash: u64, - // If non-null, attempt to use an existing package - // that satisfies this version range. - version: ?Dependency.Version, resolution: *const Resolution, ) ?PackageID { const entry = this.package_index.get(name_hash) orelse return null; const resolutions: []const Resolution = this.packages.items(.resolution); - const npm_version = if (version) |v| switch (v.tag) { - .npm => v.value.npm.version, - else => null, - } else null; const buf = this.buffers.string_bytes.items; switch (entry) { @@ -2617,10 +2614,6 @@ pub fn getPackageID( if (resolutions[id].eql(resolution, buf, buf)) { return id; } - - if (resolutions[id].tag == .npm and npm_version != null) { - if (npm_version.?.satisfies(resolutions[id].value.npm.version, buf, buf)) return id; - } }, .ids => |ids| { for (ids.items) |id| { @@ -2629,10 +2622,6 @@ pub fn getPackageID( if (resolutions[id].eql(resolution, buf, buf)) { return id; } - - if (resolutions[id].tag == .npm and npm_version != null) { - if (npm_version.?.satisfies(resolutions[id].value.npm.version, buf, buf)) return id; - } } }, } @@ -2760,7 +2749,7 @@ pub fn appendPackage(this: *Lockfile, package_: Lockfile.Package) OOM!Lockfile.P fn appendPackageWithID(this: *Lockfile, package_: Lockfile.Package, id: PackageID) OOM!Lockfile.Package { defer { if (comptime Environment.allow_assert) { - assert(this.getPackageID(package_.name_hash, null, &package_.resolution) != null); + assert(this.getPackageID(package_.name_hash, &package_.resolution) != null); } } var package = package_; @@ -2979,14 +2968,14 @@ pub const OverrideMap = struct { } } - pub fn clone(this: *OverrideMap, pm: *PackageManager, old_lockfile: *Lockfile, new_lockfile: *Lockfile, new_builder: *Lockfile.StringBuilder) !OverrideMap { + pub fn clone(this: *OverrideMap, old_lockfile: *Lockfile, new_lockfile: *Lockfile, new_builder: *Lockfile.StringBuilder) !OverrideMap { var new = OverrideMap{}; try new.map.ensureTotalCapacity(new_lockfile.allocator, this.map.entries.len); for (this.map.keys(), this.map.values()) |k, v| { new.map.putAssumeCapacity( k, - try v.clone(pm, old_lockfile.buffers.string_bytes.items, @TypeOf(new_builder), new_builder), + try v.clone(old_lockfile.buffers.string_bytes.items, @TypeOf(new_builder), new_builder), ); } @@ -3036,7 +3025,6 @@ pub const OverrideMap = struct { /// It is assumed the input map is uninitialized (zero entries) pub fn parseAppend( this: *OverrideMap, - pm: *PackageManager, lockfile: *Lockfile, root_package: *Lockfile.Package, log: *logger.Log, @@ -3048,9 +3036,9 @@ pub const OverrideMap = struct { assert(this.map.entries.len == 0); // only call parse once } if (expr.asProperty("overrides")) |overrides| { - try this.parseFromOverrides(pm, lockfile, root_package, json_source, log, overrides.expr, builder); + try this.parseFromOverrides(lockfile, root_package, json_source, log, overrides.expr, builder); } else if (expr.asProperty("resolutions")) |resolutions| { - try this.parseFromResolutions(pm, lockfile, root_package, json_source, log, resolutions.expr, builder); + try this.parseFromResolutions(lockfile, root_package, json_source, log, resolutions.expr, builder); } debug("parsed {d} overrides", .{this.map.entries.len}); } @@ -3058,7 +3046,6 @@ pub const OverrideMap = struct { /// https://docs.npmjs.com/cli/v9/configuring-npm/package-json#overrides pub fn parseFromOverrides( this: *OverrideMap, - pm: *PackageManager, lockfile: *Lockfile, root_package: *Lockfile.Package, source: logger.Source, @@ -3118,7 +3105,6 @@ pub const OverrideMap = struct { if (try parseOverrideValue( "override", lockfile, - pm, root_package, source, value.loc, @@ -3136,7 +3122,6 @@ pub const OverrideMap = struct { /// yarn berry: https://yarnpkg.com/configuration/manifest#resolutions pub fn parseFromResolutions( this: *OverrideMap, - pm: *PackageManager, lockfile: *Lockfile, root_package: *Lockfile.Package, source: logger.Source, @@ -3190,7 +3175,6 @@ pub const OverrideMap = struct { if (try parseOverrideValue( "resolution", lockfile, - pm, root_package, source, value.loc, @@ -3208,7 +3192,6 @@ pub const OverrideMap = struct { pub fn parseOverrideValue( comptime field: []const u8, lockfile: *Lockfile, - package_manager: *PackageManager, root_package: *Lockfile.Package, source: logger.Source, loc: logger.Loc, @@ -3256,7 +3239,6 @@ pub const OverrideMap = struct { literalSliced.slice, &literalSliced, log, - package_manager, ) orelse { try log.addWarningFmt(&source, loc, lockfile.allocator, "Invalid " ++ field ++ " value \"{s}\"", .{value}); return null; @@ -3757,7 +3739,6 @@ pub const Package = extern struct { pub fn clone( this: *const Lockfile.Package, - pm: *PackageManager, old: *Lockfile, new: *Lockfile, package_id_mapping: []PackageID, @@ -3854,7 +3835,6 @@ pub const Package = extern struct { for (old_dependencies, dependencies) |old_dep, *new_dep| { new_dep.* = try old_dep.clone( - pm, old_string_buf, *Lockfile.StringBuilder, builder, @@ -3888,7 +3868,6 @@ pub const Package = extern struct { pub fn fromPackageJSON( lockfile: *Lockfile, - pm: *PackageManager, package_json: *PackageJSON, comptime features: Features, ) !Lockfile.Package { @@ -3948,7 +3927,7 @@ pub const Package = extern struct { for (package_dependencies) |dep| { if (!dep.behavior.isEnabled(features)) continue; - dependencies[0] = try dep.clone(pm, source_buf, @TypeOf(&string_builder), &string_builder); + dependencies[0] = try dep.clone(source_buf, @TypeOf(&string_builder), &string_builder); dependencies = dependencies[1..]; if (dependencies.len == 0) break; } @@ -3978,7 +3957,6 @@ pub const Package = extern struct { } pub fn fromNPM( - pm: *PackageManager, allocator: Allocator, lockfile: *Lockfile, log: *logger.Log, @@ -4142,7 +4120,6 @@ pub const Package = extern struct { sliced.slice, &sliced, log, - pm, ) orelse Dependency.Version{}, }; @@ -4210,6 +4187,10 @@ pub const Package = extern struct { update: u32 = 0, overrides_changed: bool = false, + // Dependency version literal changed, but it still + // satisfies the version of the package in the lockfile. + satisfied_versions: u32 = 0, + // bool for if this dependency should be added to lockfile trusted dependencies. // it is false when the new trusted dependency is coming from the default list. added_trusted_dependencies: std.ArrayHashMapUnmanaged(TruncatedPackageNameHash, bool, ArrayIdentityContext, false) = .{}, @@ -4235,18 +4216,22 @@ pub const Package = extern struct { pm: *PackageManager, allocator: Allocator, log: *logger.Log, - from_lockfile: *Lockfile, + from_lockfile: *const Lockfile, to_lockfile: *Lockfile, - from: *Lockfile.Package, - to: *Lockfile.Package, + from_pkg_id: PackageID, + to_pkg_id: PackageID, update_requests: ?[]PackageManager.UpdateRequest, - id_mapping: ?[]PackageID, + dep_map: *std.ArrayList(DependencyID), + pkg_map: *std.ArrayList(PackageID), ) !Summary { var summary = Summary{}; - var to_deps = to.dependencies.get(to_lockfile.buffers.dependencies.items); - const from_deps = from.dependencies.get(from_lockfile.buffers.dependencies.items); - const from_resolutions = from.resolutions.get(from_lockfile.buffers.resolutions.items); - var to_i: usize = 0; + + const from_pkgs = from_lockfile.packages.slice(); + const from_pkg_resolutions = from_pkgs.items(.resolution); + const from_pkg_dependencies = from_pkgs.items(.dependencies); + const from_pkg_scripts = from_pkgs.items(.scripts); + + const from_deps = from_pkg_dependencies[from_pkg_id]; if (from_lockfile.overrides.map.count() != to_lockfile.overrides.map.count()) { summary.overrides_changed = true; @@ -4391,21 +4376,32 @@ pub const Package = extern struct { break :patched_dependencies_changed false; }; - for (from_deps, 0..) |*from_dep, i| { + const to_deps_off: DependencyID, const to_deps_len = to_deps: { + const to_deps = to_lockfile.packages.items(.dependencies)[to_pkg_id]; + break :to_deps .{ to_deps.off, to_deps.len }; + }; + const to_deps_end = to_deps_off + to_deps_len; + + var to_dep_id = to_deps_off; + + for (from_deps.begin()..from_deps.end()) |_from_dep_id| { + const from_dep_id: DependencyID = @truncate(_from_dep_id); + const from_dep = from_lockfile.buffers.dependencies.items[from_dep_id]; + found: { - const prev_i = to_i; + const prev_i = to_dep_id; // common case, dependency is present in both versions: // - in the same position // - shifted by a constant offset - while (to_i < to_deps.len) : (to_i += 1) { - if (from_dep.name_hash == to_deps[to_i].name_hash) break :found; + while (to_dep_id < to_deps_end) : (to_dep_id += 1) { + if (from_dep.name_hash == to_lockfile.buffers.dependencies.items[to_dep_id].name_hash) break :found; } // less common, o(n^2) case - to_i = 0; - while (to_i < prev_i) : (to_i += 1) { - if (from_dep.name_hash == to_deps[to_i].name_hash) break :found; + to_dep_id = to_deps_off; + while (to_dep_id < prev_i) : (to_dep_id += 1) { + if (from_dep.name_hash == to_lockfile.buffers.dependencies.items[to_dep_id].name_hash) break :found; } // We found a removed dependency! @@ -4414,9 +4410,49 @@ pub const Package = extern struct { summary.remove += 1; continue; } - defer to_i += 1; + defer to_dep_id += 1; + + const match = match: { + const to_dep = to_lockfile.buffers.dependencies.items[to_dep_id]; + eql: { + if (to_dep.version.tag != .npm) { + break :eql; + } + if (from_dep_id >= from_lockfile.buffers.resolutions.items.len) { + break :eql; + } + const from_dep_pkg_id = from_lockfile.buffers.resolutions.items[from_dep_id]; + if (from_dep_pkg_id >= from_lockfile.packages.len) { + break :eql; + } + const from_res = from_pkg_resolutions[from_dep_pkg_id]; + if (from_res.tag != .npm) { + break :eql; + } + + const equals = to_dep.eql(&from_dep, to_lockfile.buffers.string_bytes.items, from_lockfile.buffers.string_bytes.items); - if (to_deps[to_i].eql(from_dep, to_lockfile.buffers.string_bytes.items, from_lockfile.buffers.string_bytes.items)) { + if (equals) { + break :match true; + } + + // if it satisfies we should not update the resolution + const satisfies = to_dep.version.value.npm.version.satisfies( + from_res.value.npm.version, + to_lockfile.buffers.string_bytes.items, + from_lockfile.buffers.string_bytes.items, + ); + + // but we want the version to update in the lockfile + summary.satisfied_versions += @intFromBool(satisfies); + + break :match satisfies; + } + + break :match to_dep.eql(&from_dep, to_lockfile.buffers.string_bytes.items, from_lockfile.buffers.string_bytes.items); + }; + + if (match) { if (update_requests) |updates| { if (updates.len == 0 or brk: { for (updates) |request| { @@ -4430,49 +4466,48 @@ pub const Package = extern struct { } } - if (id_mapping) |mapping| { - const version = to_deps[to_i].version; - const update_mapping = switch (version.tag) { - .workspace => if (to_lockfile.workspace_paths.getPtr(from_dep.name_hash)) |path_ptr| brk: { + { + const is_workspace_only = to_lockfile.buffers.dependencies.items[to_dep_id].behavior.isWorkspaceOnly(); + const is_workspace_version = to_lockfile.buffers.dependencies.items[to_dep_id].version.tag == .workspace; + const update_mapping = !is_workspace_only or !is_workspace_version or update_mapping: { + const path_ptr = to_lockfile.workspace_paths.getPtr(from_dep.name_hash) orelse { + break :update_mapping false; + }; + + { const path = to_lockfile.str(path_ptr); var local_buf: bun.PathBuffer = undefined; - const package_json_path = Path.joinAbsStringBuf(FileSystem.instance.top_level_dir, &local_buf, &.{ path, "package.json" }, .auto); - - const source = bun.sys.File.toSource(package_json_path, allocator).unwrap() catch { - // Can't guarantee this workspace still exists - break :brk false; - }; - var workspace = Package{}; + const workspace_path = Path.joinAbsStringBuf( + FileSystem.instance.top_level_dir, + &local_buf, + &[_]string{path}, + .auto, + ); - const json = pm.workspace_package_json_cache.getWithSource(bun.default_allocator, log, source, .{}).unwrap() catch break :brk false; + // version isn't used for workspaces + const dummy_version: Dependency.Version = .{}; + const workspace_to_pkg_id = switch (FolderResolution.getOrPut(.{ .relative = .workspace }, dummy_version, workspace_path, pm, to_lockfile)) { + .err => break :update_mapping false, + .new_package_id, .package_id => |pkg_id| pkg_id, + }; - var resolver: void = {}; - try workspace.parseWithJSON( - to_lockfile, - pm, - allocator, - log, - source, - json.root, - void, - &resolver, - Features.workspace, - ); + to_lockfile.buffers.resolutions.items[to_dep_id] = workspace_to_pkg_id; + const workspace_from_pkg_id = from_lockfile.buffers.resolutions.items[from_dep_id]; - to_deps = to.dependencies.get(to_lockfile.buffers.dependencies.items); + try pkg_map.append(workspace_from_pkg_id); - var from_pkg = from_lockfile.packages.get(from_resolutions[i]); const diff = try generate( pm, allocator, log, from_lockfile, to_lockfile, - &from_pkg, - &workspace, + workspace_from_pkg_id, + workspace_to_pkg_id, update_requests, - null, + dep_map, + pkg_map, ); if (PackageManager.verbose_install and (diff.add + diff.remove + diff.update) > 0) { @@ -4484,17 +4519,17 @@ pub const Package = extern struct { }); } - break :brk !diff.hasDiffs(); - } else false, - else => true, + summary.satisfied_versions += diff.satisfied_versions; + + break :update_mapping true; + } }; if (update_mapping) { - mapping[to_i] = @truncate(i); + try dep_map.appendNTimes(invalid_dependency_id, (to_dep_id + 1) -| dep_map.items.len); + dep_map.items[to_dep_id] = from_dep_id; continue; } - } else { - continue; } } @@ -4505,11 +4540,13 @@ pub const Package = extern struct { // Use saturating arithmetic here because a migrated // package-lock.json could be out of sync with the package.json, so the // number of from_deps could be greater than to_deps. - summary.add = @truncate((to_deps.len) -| (from_deps.len -| summary.remove)); + summary.add = @truncate((to_deps_len) -| (from_deps.len -| summary.remove)); + const to_scripts = to_lockfile.packages.items(.scripts)[to_pkg_id]; + const from_scripts = from_pkg_scripts[from_pkg_id]; inline for (Lockfile.Scripts.names) |hook| { - if (!@field(to.scripts, hook).eql( - @field(from.scripts, hook), + if (!@field(to_scripts, hook).eql( + @field(from_scripts, hook), to_lockfile.buffers.string_bytes.items, from_lockfile.buffers.string_bytes.items, )) { @@ -4562,7 +4599,6 @@ pub const Package = extern struct { fn parseDependency( lockfile: *Lockfile, - pm: *PackageManager, allocator: Allocator, log: *logger.Log, source: logger.Source, @@ -4612,7 +4648,6 @@ pub const Package = extern struct { tag, &sliced, log, - pm, ) orelse Dependency.Version{}; var workspace_range: ?Semver.Query.Group = null; const name_hash = switch (dependency_version.tag) { @@ -4623,14 +4658,14 @@ pub const Package = extern struct { if (trimmed.len != 1 or (trimmed[0] != '*' and trimmed[0] != '^' and trimmed[0] != '~')) { const at = strings.lastIndexOfChar(input, '@') orelse 0; if (at > 0) { - workspace_range = Semver.Query.parse(allocator, input[at + 1 ..], sliced) catch |err| { + workspace_range = Semver.Query.parse(allocator, sliced.sub(input[at + 1 ..])) catch |err| { switch (err) { error.OutOfMemory => bun.outOfMemory(), } }; break :brk String.Builder.stringHash(input[0..at]); } - workspace_range = Semver.Query.parse(allocator, input, sliced) catch |err| { + workspace_range = Semver.Query.parse(allocator, sliced.sub(input)) catch |err| { switch (err) { error.OutOfMemory => bun.outOfMemory(), } @@ -4683,7 +4718,6 @@ pub const Package = extern struct { .workspace, &path, log, - pm, )) |dep| { found_workspace = true; dependency_version = dep; @@ -4716,7 +4750,8 @@ pub const Package = extern struct { } // important to trim before len == 0 check. `workspace:foo@ ` should install successfully - const version_literal = strings.trim(range.input, &strings.whitespace_chars); + const range_input = range.input.slice(lockfile.buffers.string_bytes.items); + const version_literal = strings.trim(range_input, &strings.whitespace_chars); if (version_literal.len == 0 or range.@"is *"() or Semver.Version.isTaggedVersionOnly(version_literal)) { dependency_version.literal = path; dependency_version.value.workspace = path; @@ -5787,7 +5822,6 @@ pub const Package = extern struct { if (try parseDependency( lockfile, - pm, allocator, log, source, @@ -5830,7 +5864,6 @@ pub const Package = extern struct { if (try parseDependency( lockfile, - pm, allocator, log, source, @@ -5887,7 +5920,7 @@ pub const Package = extern struct { // This function depends on package.dependencies being set, so it is done at the very end. if (comptime features.is_main) { - try lockfile.overrides.parseAppend(pm, lockfile, package, log, source, json, &string_builder); + try lockfile.overrides.parseAppend(lockfile, package, log, source, json, &string_builder); } string_builder.clamp(); @@ -6492,7 +6525,6 @@ const Buffers = struct { .log = log, .allocator = allocator, .buffer = string_buf, - .package_manager = pm_, }; this.dependencies.expandToCapacity(); @@ -6888,7 +6920,6 @@ pub const Serializer = struct { .allocator = allocator, .log = log, .buffer = lockfile.buffers.string_bytes.items, - .package_manager = manager, }; for (overrides_name_hashes.items, override_versions_external.items) |name, value| { map.putAssumeCapacity(name, Dependency.toDependency(value, context)); diff --git a/src/install/migration.zig b/src/install/migration.zig index 71ecc89e8fe7db..9d170b5190febe 100644 --- a/src/install/migration.zig +++ b/src/install/migration.zig @@ -719,7 +719,6 @@ pub fn migrateNPMLockfile( sliced.slice, &sliced, log, - manager, ) orelse { return error.InvalidNPMLockfile; }; @@ -802,7 +801,6 @@ pub fn migrateNPMLockfile( tag, &dep_resolved_sliced, log, - manager, ) orelse return error.InvalidNPMLockfile; break :dep_resolved dep_resolved; @@ -1010,7 +1008,7 @@ pub fn migrateNPMLockfile( // but after we write all the data, there is no excuse for this to fail. // // If this is hit, it means getOrPutID was not called on this package id. Look for where 'resolution[i]' is set - bun.assert(this.getPackageID(this.packages.items(.name_hash)[i], null, &r) != null); + bun.assert(this.getPackageID(this.packages.items(.name_hash)[i], &r) != null); } } if (is_missing_resolutions) { diff --git a/src/install/resolvers/folder_resolver.zig b/src/install/resolvers/folder_resolver.zig index dceff73f4bccff..17584446e2ce0a 100644 --- a/src/install/resolvers/folder_resolver.zig +++ b/src/install/resolvers/folder_resolver.zig @@ -169,8 +169,8 @@ pub const FolderResolution = union(Tag) { fn readPackageJSONFromDisk( manager: *PackageManager, + lockfile: *Lockfile, abs: stringZ, - version: Dependency.Version, comptime features: Features, comptime ResolverType: type, resolver: *ResolverType, @@ -184,7 +184,7 @@ pub const FolderResolution = union(Tag) { const json = try manager.workspace_package_json_cache.getWithPath(manager.allocator, manager.log, abs, .{}).unwrap(); try package.parseWithJSON( - manager.lockfile, + lockfile, manager, manager.allocator, manager.log, @@ -215,7 +215,7 @@ pub const FolderResolution = union(Tag) { }; try package.parse( - manager.lockfile, + lockfile, manager, manager.allocator, manager.log, @@ -238,13 +238,13 @@ pub const FolderResolution = union(Tag) { package.meta.setHasInstallScript(has_scripts); - if (manager.lockfile.getPackageID(package.name_hash, version, &package.resolution)) |existing_id| { + if (lockfile.getPackageID(package.name_hash, &package.resolution)) |existing_id| { package.meta.id = existing_id; manager.lockfile.packages.set(existing_id, package); return manager.lockfile.packages.get(existing_id); } - return manager.lockfile.appendPackage(package); + return lockfile.appendPackage(package); } pub const GlobalOrRelative = union(enum) { @@ -253,7 +253,7 @@ pub const FolderResolution = union(Tag) { cache_folder: []const u8, }; - pub fn getOrPut(global_or_relative: GlobalOrRelative, version: Dependency.Version, non_normalized_path: string, manager: *PackageManager) FolderResolution { + pub fn getOrPut(global_or_relative: GlobalOrRelative, version: Dependency.Version, non_normalized_path: string, manager: *PackageManager, lockfile: *Lockfile) FolderResolution { var joined: bun.PathBuffer = undefined; const paths = normalizePackageJSONPath(global_or_relative, &joined, non_normalized_path); const abs = paths.abs; @@ -266,7 +266,7 @@ pub const FolderResolution = union(Tag) { } const abs_hash = hash(abs); - const entry = manager.folders.getOrPut(manager.allocator, abs_hash) catch unreachable; + const entry = lockfile.folder_resolutions.getOrPut(lockfile.allocator, abs_hash) catch unreachable; if (entry.found_existing) return entry.value_ptr.*; const package: Lockfile.Package = switch (global_or_relative) { @@ -278,8 +278,8 @@ pub const FolderResolution = union(Tag) { }; break :global readPackageJSONFromDisk( manager, + lockfile, abs, - version, Features.link, SymlinkResolver, &resolver, @@ -292,8 +292,8 @@ pub const FolderResolution = union(Tag) { }; break :folder readPackageJSONFromDisk( manager, + lockfile, abs, - version, Features.folder, Resolver, &resolver, @@ -305,8 +305,8 @@ pub const FolderResolution = union(Tag) { }; break :workspace readPackageJSONFromDisk( manager, + lockfile, abs, - version, Features.workspace, WorkspaceResolver, &resolver, @@ -320,8 +320,8 @@ pub const FolderResolution = union(Tag) { }; break :cache_folder readPackageJSONFromDisk( manager, + lockfile, abs, - version, Features.npm, CacheFolderResolver, &resolver, diff --git a/src/install/semver.zig b/src/install/semver.zig index a3f9ca2efd4cb3..c6c21eefd1b10a 100644 --- a/src/install/semver.zig +++ b/src/install/semver.zig @@ -2112,7 +2112,7 @@ pub const Query = struct { head: List = List{}, tail: ?*List = null, allocator: Allocator, - input: string = "", + input: String = .{}, flags: FlagsBitSet = FlagsBitSet.initEmpty(), pub const Flags = struct { @@ -2538,14 +2538,14 @@ pub const Query = struct { pub fn parse( allocator: Allocator, - input: string, sliced: SlicedString, ) bun.OOM!Group { var i: usize = 0; var list = Group{ .allocator = allocator, - .input = input, + .input = sliced.value(), }; + const input = sliced.slice; var token = Token{}; var prev_token = Token{}; @@ -2872,7 +2872,6 @@ pub const SemverObject = struct { const right_group = try Query.parse( allocator, - right.slice(), SlicedString.init(right.slice(), right.slice()), ); defer right_group.deinit(); diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig index c3435c06cb5929..f9effecdf4cc3d 100644 --- a/src/resolver/package_json.zig +++ b/src/resolver/package_json.zig @@ -861,7 +861,6 @@ pub const PackageJSON = struct { .npm, &sliced, r.log, - pm, )) |dependency_version| { if (dependency_version.value.npm.version.isExact()) { if (pm.lockfile.resolvePackageFromNameAndVersion(package_json.name, dependency_version)) |resolved| { @@ -983,7 +982,6 @@ pub const PackageJSON = struct { version_str, &sliced_str, r.log, - r.package_manager, )) |dependency_version| { const dependency = Dependency{ .name = name, diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index d427747c82c518..5dd59fff63c33a 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -1873,7 +1873,6 @@ pub const Resolver = struct { esm.version, &sliced_string, r.log, - manager, ) orelse break :load_module_from_cache; } @@ -2198,7 +2197,6 @@ pub const Resolver = struct { if (package_json_) |package_json| { package = Package.fromPackageJSON( pm.lockfile, - pm, package_json, Install.Features{ .dev_dependencies = true, diff --git a/test/cli/install/__snapshots__/bun-lock.test.ts.snap b/test/cli/install/__snapshots__/bun-lock.test.ts.snap index 4f15c6c30c47f0..27be26bfbb89d4 100644 --- a/test/cli/install/__snapshots__/bun-lock.test.ts.snap +++ b/test/cli/install/__snapshots__/bun-lock.test.ts.snap @@ -1,26 +1,81 @@ // Bun Snapshot v1, https://goo.gl/fbAQLP -exports[`should escape names 1`] = ` +exports[`should update dependency version literal when no updates are necessary 1`] = ` "{ "lockfileVersion": 0, "workspaces": { "": { - "name": "quote-in-dependency-name", + "dependencies": { + "no-deps": "1.0.0", + }, }, - "packages/\\"": { - "name": "\\"", + "packages/pkg1": { + "name": "pkg1", + "dependencies": { + "a-dep": "1.0.1", + }, + }, + }, + "packages": { + "a-dep": ["a-dep@1.0.1", "http://localhost:1234/a-dep/-/a-dep-1.0.1.tgz", {}, "sha512-6nmTaPgO2U/uOODqOhbjbnaB4xHuZ+UB7AjKUA3g2dT4WRWeNxgp0dC8Db4swXSnO5/uLLUdFmUJKINNBO/3wg=="], + + "no-deps": ["no-deps@1.0.0", "http://localhost:1234/no-deps/-/no-deps-1.0.0.tgz", {}, "sha512-v4w12JRjUGvfHDUP8vFDwu0gUWu04j0cv9hLb1Abf9VdaXu4XcrddYFTMVBVvmldKViGWH7jrb6xPJRF0wq6gw=="], + + "pkg1": ["pkg1@workspace:packages/pkg1", { "dependencies": { "a-dep": "1.0.1" } }], + } +} +" +`; + +exports[`should update dependency version literal when no updates are necessary 2`] = ` +"{ + "lockfileVersion": 0, + "workspaces": { + "": { + "dependencies": { + "no-deps": "^1.0.0", + }, }, "packages/pkg1": { "name": "pkg1", "dependencies": { - "\\"": "*", + "a-dep": "1.0.1", }, }, }, "packages": { - "\\"": ["\\"@workspace:packages/\\"", {}], + "a-dep": ["a-dep@1.0.1", "http://localhost:1234/a-dep/-/a-dep-1.0.1.tgz", {}, "sha512-6nmTaPgO2U/uOODqOhbjbnaB4xHuZ+UB7AjKUA3g2dT4WRWeNxgp0dC8Db4swXSnO5/uLLUdFmUJKINNBO/3wg=="], - "pkg1": ["pkg1@workspace:packages/pkg1", { "dependencies": { "\\"": "*" } }], + "no-deps": ["no-deps@1.0.0", "http://localhost:1234/no-deps/-/no-deps-1.0.0.tgz", {}, "sha512-v4w12JRjUGvfHDUP8vFDwu0gUWu04j0cv9hLb1Abf9VdaXu4XcrddYFTMVBVvmldKViGWH7jrb6xPJRF0wq6gw=="], + + "pkg1": ["pkg1@workspace:packages/pkg1", { "dependencies": { "a-dep": "1.0.1" } }], + } +} +" +`; + +exports[`should update dependency version literal when no updates are necessary 3`] = ` +"{ + "lockfileVersion": 0, + "workspaces": { + "": { + "dependencies": { + "no-deps": "^1.0.0", + }, + }, + "packages/pkg1": { + "name": "pkg1", + "dependencies": { + "a-dep": "^1.0.1", + }, + }, + }, + "packages": { + "a-dep": ["a-dep@1.0.1", "http://localhost:1234/a-dep/-/a-dep-1.0.1.tgz", {}, "sha512-6nmTaPgO2U/uOODqOhbjbnaB4xHuZ+UB7AjKUA3g2dT4WRWeNxgp0dC8Db4swXSnO5/uLLUdFmUJKINNBO/3wg=="], + + "no-deps": ["no-deps@1.0.0", "http://localhost:1234/no-deps/-/no-deps-1.0.0.tgz", {}, "sha512-v4w12JRjUGvfHDUP8vFDwu0gUWu04j0cv9hLb1Abf9VdaXu4XcrddYFTMVBVvmldKViGWH7jrb6xPJRF0wq6gw=="], + + "pkg1": ["pkg1@workspace:packages/pkg1", { "dependencies": { "a-dep": "^1.0.1" } }], } } " @@ -43,3 +98,29 @@ exports[`should write plaintext lockfiles 1`] = ` } " `; + +exports[`should escape names 1`] = ` +"{ + "lockfileVersion": 0, + "workspaces": { + "": { + "name": "quote-in-dependency-name", + }, + "packages/\\"": { + "name": "\\"", + }, + "packages/pkg1": { + "name": "pkg1", + "dependencies": { + "\\"": "*", + }, + }, + }, + "packages": { + "\\"": ["\\"@workspace:packages/\\"", {}], + + "pkg1": ["pkg1@workspace:packages/pkg1", { "dependencies": { "\\"": "*" } }], + } +} +" +`; diff --git a/test/cli/install/bun-install-registry.test.ts b/test/cli/install/bun-install-registry.test.ts index 99a3089e502c49..49cccbc92e497e 100644 --- a/test/cli/install/bun-install-registry.test.ts +++ b/test/cli/install/bun-install-registry.test.ts @@ -11,7 +11,6 @@ import { mergeWindowEnvs, runBunInstall, runBunUpdate, - pack, tempDirWithFiles, tmpdirSync, toBeValidBin, @@ -20,8 +19,6 @@ import { writeShebangScript, stderrForInstall, tls, - isFlaky, - isMacOS, readdirSorted, VerdaccioRegistry, } from "harness"; @@ -42,8 +39,6 @@ var packageDir: string; /** packageJson = join(packageDir, "package.json"); */ var packageJson: string; -let users: Record = {}; - beforeAll(async () => { setDefaultTimeout(1000 * 60 * 5); verdaccio = new VerdaccioRegistry(); @@ -52,15 +47,11 @@ beforeAll(async () => { }); afterAll(async () => { - await Bun.$`rm -f ${import.meta.dir}/htpasswd`.throws(false); verdaccio.stop(); }); beforeEach(async () => { ({ packageDir, packageJson } = await verdaccio.createTestDir()); - await Bun.$`rm -f ${import.meta.dir}/htpasswd`.throws(false); - await Bun.$`rm -rf ${import.meta.dir}/packages/private-pkg-dont-touch`.throws(false); - users = {}; env.BUN_INSTALL_CACHE_DIR = join(packageDir, ".bun-cache"); env.BUN_TMPDIR = env.TMPDIR = env.TEMP = join(packageDir, ".bun-tmp"); }); @@ -69,37 +60,6 @@ function registryUrl() { return verdaccio.registryUrl(); } -/** - * Returns auth token - */ -async function generateRegistryUser(username: string, password: string): Promise { - if (users[username]) { - throw new Error("that user already exists"); - } else users[username] = password; - - const url = `http://localhost:${port}/-/user/org.couchdb.user:${username}`; - const user = { - name: username, - password: password, - email: `${username}@example.com`, - }; - - const response = await fetch(url, { - method: "PUT", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify(user), - }); - - if (response.ok) { - const data = await response.json(); - return data.token; - } else { - throw new Error("Failed to create user:", response.statusText); - } -} - describe("npmrc", async () => { const isBase64Encoded = (opt: string) => opt === "_auth" || opt === "_password"; @@ -330,7 +290,7 @@ ${iniInner.join("\n")} const ini = /* ini */ ` registry = http://localhost:${port}/ @needs-auth:registry=http://localhost:${port}/ -//localhost:${port}/:_authToken=${await generateRegistryUser("bilbo_swaggins", "verysecure")} +//localhost:${port}/:_authToken=${await verdaccio.generateUser("bilbo_swaggins", "verysecure")} `; await Bun.$`echo ${ini} > ${packageDir}/.npmrc`; @@ -415,21 +375,21 @@ ${Object.keys(opts) } registryConfigOptionTest("_authToken", async () => ({ - "_authToken": await generateRegistryUser("bilbo_baggins", "verysecure"), + "_authToken": await verdaccio.generateUser("bilbo_baggins", "verysecure"), })); registryConfigOptionTest( "_authToken with env variable value", async () => ({ _authToken: "${SUPER_SECRET_TOKEN}" }), - async () => ({ SUPER_SECRET_TOKEN: await generateRegistryUser("bilbo_baggins420", "verysecure") }), + async () => ({ SUPER_SECRET_TOKEN: await verdaccio.generateUser("bilbo_baggins420", "verysecure") }), ); registryConfigOptionTest("username and password", async () => { - await generateRegistryUser("gandalf429", "verysecure"); + await verdaccio.generateUser("gandalf429", "verysecure"); return { username: "gandalf429", _password: "verysecure" }; }); registryConfigOptionTest( "username and password with env variable password", async () => { - await generateRegistryUser("gandalf422", "verysecure"); + await verdaccio.generateUser("gandalf422", "verysecure"); return { username: "gandalf422", _password: "${SUPER_SECRET_PASSWORD}" }; }, { @@ -439,7 +399,7 @@ ${Object.keys(opts) registryConfigOptionTest( "username and password with .env variable password", async () => { - await generateRegistryUser("gandalf421", "verysecure"); + await verdaccio.generateUser("gandalf421", "verysecure"); return { username: "gandalf421", _password: "${SUPER_SECRET_PASSWORD}" }; }, { @@ -448,7 +408,7 @@ ${Object.keys(opts) ); registryConfigOptionTest("_auth", async () => { - await generateRegistryUser("linus", "verysecure"); + await verdaccio.generateUser("linus", "verysecure"); const _auth = "linus:verysecure"; return { _auth }; }); @@ -456,7 +416,7 @@ ${Object.keys(opts) registryConfigOptionTest( "_auth from .env variable", async () => { - await generateRegistryUser("zack", "verysecure"); + await verdaccio.generateUser("zack", "verysecure"); return { _auth: "${SECRET_AUTH}" }; }, { @@ -467,7 +427,7 @@ ${Object.keys(opts) registryConfigOptionTest( "_auth from .env variable with no value", async () => { - await generateRegistryUser("zack420", "verysecure"); + await verdaccio.generateUser("zack420", "verysecure"); return { _auth: "${SECRET_AUTH}" }; }, { @@ -610,995 +570,135 @@ describe("certificate authority", () => { expect(await exited).toBe(0); }); test(`non-existent --cafile`, async () => { - await write(packageJson, JSON.stringify({ name: "foo", version: "1.0.0", "dependencies": { "no-deps": "1.1.1" } })); - - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install", "--cafile", "does-not-exist"], - cwd: packageDir, - stderr: "pipe", - stdout: "pipe", - env, - }); - const out = await Bun.readableStreamToText(stdout); - expect(out).not.toContain("no-deps"); - const err = await Bun.readableStreamToText(stderr); - expect(err).toContain(`HTTPThread: could not find CA file: '${join(packageDir, "does-not-exist")}'`); - expect(await exited).toBe(1); - }); - - test("non-existent --cafile (absolute path)", async () => { - await write(packageJson, JSON.stringify({ name: "foo", version: "1.0.0", "dependencies": { "no-deps": "1.1.1" } })); - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install", "--cafile", "/does/not/exist"], - cwd: packageDir, - stderr: "pipe", - stdout: "pipe", - env, - }); - const out = await Bun.readableStreamToText(stdout); - expect(out).not.toContain("no-deps"); - const err = await Bun.readableStreamToText(stderr); - expect(err).toContain(`HTTPThread: could not find CA file: '/does/not/exist'`); - expect(await exited).toBe(1); - }); - - test("cafile from bunfig does not exist", async () => { - await Promise.all([ - write( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "no-deps": "1.1.1", - }, - }), - ), - write( - join(packageDir, "bunfig.toml"), - ` - [install] - cache = false - registry = "http://localhost:${port}/" - cafile = "does-not-exist"`, - ), - ]); - - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stderr: "pipe", - stdout: "pipe", - env, - }); - - const out = await Bun.readableStreamToText(stdout); - expect(out).not.toContain("no-deps"); - const err = await Bun.readableStreamToText(stderr); - expect(err).toContain(`HTTPThread: could not find CA file: '${join(packageDir, "does-not-exist")}'`); - expect(await exited).toBe(1); - }); - test("invalid cafile", async () => { - await Promise.all([ - write( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "no-deps": "1.1.1", - }, - }), - ), - write( - join(packageDir, "invalid-cafile"), - `-----BEGIN CERTIFICATE----- -jlwkjekfjwlejlgldjfljlkwjef ------END CERTIFICATE----- ------BEGIN CERTIFICATE----- -ljelkjwelkgjw;lekj;lkejflkj ------END CERTIFICATE-----`, - ), - ]); - - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install", "--cafile", join(packageDir, "invalid-cafile")], - cwd: packageDir, - stderr: "pipe", - stdout: "pipe", - env, - }); - - const out = await Bun.readableStreamToText(stdout); - expect(out).not.toContain("no-deps"); - const err = await Bun.readableStreamToText(stderr); - expect(err).toContain(`HTTPThread: invalid CA file: '${join(packageDir, "invalid-cafile")}'`); - expect(await exited).toBe(1); - }); - test("invalid --ca", async () => { - await write( - packageJson, - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "no-deps": "1.1.1", - }, - }), - ); - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install", "--ca", "not-valid"], - cwd: packageDir, - stderr: "pipe", - stdout: "pipe", - env, - }); - - const out = await Bun.readableStreamToText(stdout); - expect(out).not.toContain("no-deps"); - const err = await Bun.readableStreamToText(stderr); - expect(err).toContain("HTTPThread: the CA is invalid"); - expect(await exited).toBe(1); - }); -}); - -export async function publish( - env: any, - cwd: string, - ...args: string[] -): Promise<{ out: string; err: string; exitCode: number }> { - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "publish", ...args], - cwd, - stdout: "pipe", - stderr: "pipe", - env, - }); - - const out = await Bun.readableStreamToText(stdout); - const err = stderrForInstall(await Bun.readableStreamToText(stderr)); - const exitCode = await exited; - return { out, err, exitCode }; -} - -async function authBunfig(user: string) { - const authToken = await generateRegistryUser(user, user); - return ` - [install] - cache = false - registry = { url = "http://localhost:${port}/", token = "${authToken}" } - `; -} - -describe("whoami", async () => { - test("can get username", async () => { - const bunfig = await authBunfig("whoami"); - await Promise.all([ - write( - packageJson, - JSON.stringify({ - name: "whoami-pkg", - version: "1.1.1", - }), - ), - write(join(packageDir, "bunfig.toml"), bunfig), - ]); - - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "pm", "whoami"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - env, - }); - - const out = await Bun.readableStreamToText(stdout); - expect(out).toBe("whoami\n"); - const err = await Bun.readableStreamToText(stderr); - expect(err).not.toContain("error:"); - expect(await exited).toBe(0); - }); - test("username from .npmrc", async () => { - // It should report the username from npmrc, even without an account - const bunfig = ` - [install] - cache = false - registry = "http://localhost:${port}/"`; - const npmrc = ` - //localhost:${port}/:username=whoami-npmrc - //localhost:${port}/:_password=123456 - `; - await Promise.all([ - write(packageJson, JSON.stringify({ name: "whoami-pkg", version: "1.1.1" })), - write(join(packageDir, "bunfig.toml"), bunfig), - write(join(packageDir, ".npmrc"), npmrc), - ]); - - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "pm", "whoami"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - env, - }); - - const out = await Bun.readableStreamToText(stdout); - expect(out).toBe("whoami-npmrc\n"); - const err = await Bun.readableStreamToText(stderr); - expect(err).not.toContain("error:"); - expect(await exited).toBe(0); - }); - test("only .npmrc", async () => { - const token = await generateRegistryUser("whoami-npmrc", "whoami-npmrc"); - const npmrc = ` - //localhost:${port}/:_authToken=${token} - registry=http://localhost:${port}/`; - await Promise.all([ - write(packageJson, JSON.stringify({ name: "whoami-pkg", version: "1.1.1" })), - write(join(packageDir, ".npmrc"), npmrc), - ]); - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "pm", "whoami"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - env, - }); - const out = await Bun.readableStreamToText(stdout); - expect(out).toBe("whoami-npmrc\n"); - const err = await Bun.readableStreamToText(stderr); - expect(err).not.toContain("error:"); - expect(await exited).toBe(0); - }); - test("two .npmrc", async () => { - const token = await generateRegistryUser("whoami-two-npmrc", "whoami-two-npmrc"); - const packageNpmrc = `registry=http://localhost:${port}/`; - const homeNpmrc = `//localhost:${port}/:_authToken=${token}`; - const homeDir = `${packageDir}/home_dir`; - await Bun.$`mkdir -p ${homeDir}`; - await Promise.all([ - write(packageJson, JSON.stringify({ name: "whoami-pkg", version: "1.1.1" })), - write(join(packageDir, ".npmrc"), packageNpmrc), - write(join(homeDir, ".npmrc"), homeNpmrc), - ]); - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "pm", "whoami"], - cwd: packageDir, - stdout: "pipe", - stderr: "pipe", - env: { - ...env, - XDG_CONFIG_HOME: `${homeDir}`, - }, - }); - const out = await Bun.readableStreamToText(stdout); - expect(out).toBe("whoami-two-npmrc\n"); - const err = await Bun.readableStreamToText(stderr); - expect(err).not.toContain("error:"); - expect(await exited).toBe(0); - }); - test("not logged in", async () => { - await write(packageJson, JSON.stringify({ name: "whoami-pkg", version: "1.1.1" })); - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "pm", "whoami"], - cwd: packageDir, - env, - stdout: "pipe", - stderr: "pipe", - }); - const out = await Bun.readableStreamToText(stdout); - expect(out).toBeEmpty(); - const err = await Bun.readableStreamToText(stderr); - expect(err).toBe("error: missing authentication (run `bunx npm login`)\n"); - expect(await exited).toBe(1); - }); - test("invalid token", async () => { - // create the user and provide an invalid token - const token = await generateRegistryUser("invalid-token", "invalid-token"); - const bunfig = ` - [install] - cache = false - registry = { url = "http://localhost:${port}/", token = "1234567" }`; - await Promise.all([ - write(packageJson, JSON.stringify({ name: "whoami-pkg", version: "1.1.1" })), - write(join(packageDir, "bunfig.toml"), bunfig), - ]); - const { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "pm", "whoami"], - cwd: packageDir, - env, - stdout: "pipe", - stderr: "pipe", - }); - const out = await Bun.readableStreamToText(stdout); - expect(out).toBeEmpty(); - const err = await Bun.readableStreamToText(stderr); - expect(err).toBe(`error: failed to authenticate with registry 'http://localhost:${port}/'\n`); - expect(await exited).toBe(1); - }); -}); - -describe("publish", async () => { - describe("otp", async () => { - const mockRegistryFetch = function (opts: { - token: string; - setAuthHeader?: boolean; - otpFail?: boolean; - npmNotice?: boolean; - xLocalCache?: boolean; - expectedCI?: string; - }) { - return async function (req: Request) { - const { token, setAuthHeader = true, otpFail = false, npmNotice = false, xLocalCache = false } = opts; - if (req.url.includes("otp-pkg")) { - if (opts.expectedCI) { - expect(req.headers.get("user-agent")).toContain("ci/" + opts.expectedCI); - } - if (req.headers.get("npm-otp") === token) { - if (otpFail) { - return new Response( - JSON.stringify({ - error: "You must provide a one-time pass. Upgrade your client to npm@latest in order to use 2FA.", - }), - { status: 401 }, - ); - } else { - return new Response("OK", { status: 200 }); - } - } else { - const headers = new Headers(); - if (setAuthHeader) headers.set("www-authenticate", "OTP"); - - // `bun publish` won't request a url from a message in the npm-notice header, but we - // can test that it's displayed - if (npmNotice) headers.set("npm-notice", `visit http://localhost:${this.port}/auth to login`); - - // npm-notice will be ignored - if (xLocalCache) headers.set("x-local-cache", "true"); - - return new Response( - JSON.stringify({ - // this isn't accurate, but we just want to check that finding this string works - mock: setAuthHeader ? "" : "one-time password", - - authUrl: `http://localhost:${this.port}/auth`, - doneUrl: `http://localhost:${this.port}/done`, - }), - { - status: 401, - headers, - }, - ); - } - } else if (req.url.endsWith("auth")) { - expect.unreachable("url given to user, bun publish should not request"); - } else if (req.url.endsWith("done")) { - // send a fake response saying the user has authenticated successfully with the auth url - return new Response(JSON.stringify({ token: token }), { status: 200 }); - } - - expect.unreachable("unexpected url"); - }; - }; - - for (const setAuthHeader of [true, false]) { - test("mock web login" + (setAuthHeader ? "" : " (without auth header)"), async () => { - const token = await generateRegistryUser("otp" + (setAuthHeader ? "" : "noheader"), "otp"); - - using mockRegistry = Bun.serve({ - port: 0, - fetch: mockRegistryFetch({ token }), - }); - - const bunfig = ` - [install] - cache = false - registry = { url = "http://localhost:${mockRegistry.port}", token = "${token}" }`; - await Promise.all([ - rm(join(verdaccio.packagesPath, "otp-pkg-1"), { recursive: true, force: true }), - write(join(packageDir, "bunfig.toml"), bunfig), - write( - packageJson, - JSON.stringify({ - name: "otp-pkg-1", - version: "2.2.2", - dependencies: { - "otp-pkg-1": "2.2.2", - }, - }), - ), - ]); - - const { out, err, exitCode } = await publish(env, packageDir); - expect(exitCode).toBe(0); - }); - } - - test("otp failure", async () => { - const token = await generateRegistryUser("otp-fail", "otp"); - using mockRegistry = Bun.serve({ - port: 0, - fetch: mockRegistryFetch({ token, otpFail: true }), - }); - - const bunfig = ` - [install] - cache = false - registry = { url = "http://localhost:${mockRegistry.port}", token = "${token}" }`; - - await Promise.all([ - rm(join(verdaccio.packagesPath, "otp-pkg-2"), { recursive: true, force: true }), - write(join(packageDir, "bunfig.toml"), bunfig), - write( - packageJson, - JSON.stringify({ - name: "otp-pkg-2", - version: "1.1.1", - dependencies: { - "otp-pkg-2": "1.1.1", - }, - }), - ), - ]); - - const { out, err, exitCode } = await publish(env, packageDir); - expect(exitCode).toBe(1); - expect(err).toContain(" - Received invalid OTP"); - }); - - for (const shouldIgnoreNotice of [false, true]) { - test(`npm-notice with login url${shouldIgnoreNotice ? " (ignored)" : ""}`, async () => { - // Situation: user has 2FA enabled account with faceid sign-in. - // They run `bun publish` with --auth-type=legacy, prompting them - // to enter their OTP. Because they have faceid sign-in, they don't - // have a code to enter, so npm sends a message in the npm-notice - // header with a url for logging in. - const token = await generateRegistryUser(`otp-notice${shouldIgnoreNotice ? "-ignore" : ""}`, "otp"); - using mockRegistry = Bun.serve({ - port: 0, - fetch: mockRegistryFetch({ token, npmNotice: true, xLocalCache: shouldIgnoreNotice }), - }); - - const bunfig = ` - [install] - cache = false - registry = { url = "http://localhost:${mockRegistry.port}", token = "${token}" }`; - - await Promise.all([ - rm(join(verdaccio.packagesPath, "otp-pkg-3"), { recursive: true, force: true }), - write(join(packageDir, "bunfig.toml"), bunfig), - write( - packageJson, - JSON.stringify({ - name: "otp-pkg-3", - version: "3.3.3", - dependencies: { - "otp-pkg-3": "3.3.3", - }, - }), - ), - ]); - - const { out, err, exitCode } = await publish(env, packageDir); - expect(exitCode).toBe(0); - if (shouldIgnoreNotice) { - expect(err).not.toContain(`note: visit http://localhost:${mockRegistry.port}/auth to login`); - } else { - expect(err).toContain(`note: visit http://localhost:${mockRegistry.port}/auth to login`); - } - }); - } - - const fakeCIEnvs = [ - { ci: "expo-application-services", envs: { EAS_BUILD: "hi" } }, - { ci: "codemagic", envs: { CM_BUILD_ID: "hi" } }, - { ci: "vercel", envs: { "NOW_BUILDER": "hi" } }, - ]; - for (const envInfo of fakeCIEnvs) { - test(`CI user agent name: ${envInfo.ci}`, async () => { - const token = await generateRegistryUser(`otp-${envInfo.ci}`, "otp"); - using mockRegistry = Bun.serve({ - port: 0, - fetch: mockRegistryFetch({ token, expectedCI: envInfo.ci }), - }); - - const bunfig = ` - [install] - cache = false - registry = { url = "http://localhost:${mockRegistry.port}", token = "${token}" }`; - - await Promise.all([ - rm(join(verdaccio.packagesPath, "otp-pkg-4"), { recursive: true, force: true }), - write(join(packageDir, "bunfig.toml"), bunfig), - write( - packageJson, - JSON.stringify({ - name: "otp-pkg-4", - version: "4.4.4", - dependencies: { - "otp-pkg-4": "4.4.4", - }, - }), - ), - ]); - - const { out, err, exitCode } = await publish( - { ...env, ...envInfo.envs, ...{ BUILDKITE: undefined, GITHUB_ACTIONS: undefined } }, - packageDir, - ); - expect(exitCode).toBe(0); - }); - } - }); - - test("can publish a package then install it", async () => { - const bunfig = await authBunfig("basic"); - await Promise.all([ - rm(join(verdaccio.packagesPath, "publish-pkg-1"), { recursive: true, force: true }), - write( - packageJson, - JSON.stringify({ - name: "publish-pkg-1", - version: "1.1.1", - dependencies: { - "publish-pkg-1": "1.1.1", - }, - }), - ), - write(join(packageDir, "bunfig.toml"), bunfig), - ]); - - const { out, err, exitCode } = await publish(env, packageDir); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - expect(exitCode).toBe(0); - - await runBunInstall(env, packageDir); - expect(await exists(join(packageDir, "node_modules", "publish-pkg-1", "package.json"))).toBeTrue(); - }); - test("can publish from a tarball", async () => { - const bunfig = await authBunfig("tarball"); - const json = { - name: "publish-pkg-2", - version: "2.2.2", - dependencies: { - "publish-pkg-2": "2.2.2", - }, - }; - await Promise.all([ - rm(join(verdaccio.packagesPath, "publish-pkg-2"), { recursive: true, force: true }), - write(packageJson, JSON.stringify(json)), - write(join(packageDir, "bunfig.toml"), bunfig), - ]); - - await pack(packageDir, env); - - let { out, err, exitCode } = await publish(env, packageDir, "./publish-pkg-2-2.2.2.tgz"); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - expect(exitCode).toBe(0); - - await runBunInstall(env, packageDir); - expect(await exists(join(packageDir, "node_modules", "publish-pkg-2", "package.json"))).toBeTrue(); - - await Promise.all([ - rm(join(verdaccio.packagesPath, "publish-pkg-2"), { recursive: true, force: true }), - rm(join(packageDir, "bun.lockb"), { recursive: true, force: true }), - rm(join(packageDir, "node_modules"), { recursive: true, force: true }), - ]); - - // now with an absoute path - ({ out, err, exitCode } = await publish(env, packageDir, join(packageDir, "publish-pkg-2-2.2.2.tgz"))); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - expect(exitCode).toBe(0); - - await runBunInstall(env, packageDir); - expect(await file(join(packageDir, "node_modules", "publish-pkg-2", "package.json")).json()).toEqual(json); - }); - - for (const info of [ - { user: "bin1", bin: "bin1.js" }, - { user: "bin2", bin: { bin1: "bin1.js", bin2: "bin2.js" } }, - { user: "bin3", directories: { bin: "bins" } }, - ]) { - test(`can publish and install binaries with ${JSON.stringify(info)}`, async () => { - const publishDir = tmpdirSync(); - const bunfig = await authBunfig("binaries-" + info.user); - console.log({ packageDir, publishDir }); - - await Promise.all([ - rm(join(verdaccio.packagesPath, "publish-pkg-bins"), { recursive: true, force: true }), - write( - join(publishDir, "package.json"), - JSON.stringify({ - name: "publish-pkg-bins", - version: "1.1.1", - ...info, - }), - ), - write(join(publishDir, "bunfig.toml"), bunfig), - write(join(publishDir, "bin1.js"), `#!/usr/bin/env bun\nconsole.log("bin1!")`), - write(join(publishDir, "bin2.js"), `#!/usr/bin/env bun\nconsole.log("bin2!")`), - write(join(publishDir, "bins", "bin3.js"), `#!/usr/bin/env bun\nconsole.log("bin3!")`), - write(join(publishDir, "bins", "moredir", "bin4.js"), `#!/usr/bin/env bun\nconsole.log("bin4!")`), - - write( - packageJson, - JSON.stringify({ - name: "foo", - dependencies: { - "publish-pkg-bins": "1.1.1", - }, - }), - ), - ]); - - const { out, err, exitCode } = await publish(env, publishDir); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - expect(out).toContain("+ publish-pkg-bins@1.1.1"); - expect(exitCode).toBe(0); - - await runBunInstall(env, packageDir); - - const results = await Promise.all([ - exists(join(packageDir, "node_modules", ".bin", isWindows ? "bin1.bunx" : "bin1")), - exists(join(packageDir, "node_modules", ".bin", isWindows ? "bin2.bunx" : "bin2")), - exists(join(packageDir, "node_modules", ".bin", isWindows ? "bin3.js.bunx" : "bin3.js")), - exists(join(packageDir, "node_modules", ".bin", isWindows ? "bin4.js.bunx" : "bin4.js")), - exists(join(packageDir, "node_modules", ".bin", isWindows ? "moredir" : "moredir/bin4.js")), - exists(join(packageDir, "node_modules", ".bin", isWindows ? "publish-pkg-bins.bunx" : "publish-pkg-bins")), - ]); - - switch (info.user) { - case "bin1": { - expect(results).toEqual([false, false, false, false, false, true]); - break; - } - case "bin2": { - expect(results).toEqual([true, true, false, false, false, false]); - break; - } - case "bin3": { - expect(results).toEqual([false, false, true, true, !isWindows, false]); - break; - } - } - }); - } - - test("dependencies are installed", async () => { - const publishDir = tmpdirSync(); - const bunfig = await authBunfig("manydeps"); - await Promise.all([ - rm(join(verdaccio.packagesPath, "publish-pkg-deps"), { recursive: true, force: true }), - write( - join(publishDir, "package.json"), - JSON.stringify( - { - name: "publish-pkg-deps", - version: "1.1.1", - dependencies: { - "no-deps": "1.0.0", - }, - peerDependencies: { - "a-dep": "1.0.1", - }, - optionalDependencies: { - "basic-1": "1.0.0", - }, - }, - null, - 2, - ), - ), - write(join(publishDir, "bunfig.toml"), bunfig), - write( - packageJson, - JSON.stringify({ - name: "foo", - dependencies: { - "publish-pkg-deps": "1.1.1", - }, - }), - ), - ]); - - let { out, err, exitCode } = await publish(env, publishDir); - expect(err).not.toContain("error:"); - expect(err).not.toContain("warn:"); - expect(out).toContain("+ publish-pkg-deps@1.1.1"); - expect(exitCode).toBe(0); - - await runBunInstall(env, packageDir); - - const results = await Promise.all([ - exists(join(packageDir, "node_modules", "no-deps", "package.json")), - exists(join(packageDir, "node_modules", "a-dep", "package.json")), - exists(join(packageDir, "node_modules", "basic-1", "package.json")), - ]); - - expect(results).toEqual([true, true, true]); - }); - - test("can publish workspace package", async () => { - const bunfig = await authBunfig("workspace"); - const pkgJson = { - name: "publish-pkg-3", - version: "3.3.3", - dependencies: { - "publish-pkg-3": "3.3.3", - }, - }; - await Promise.all([ - rm(join(verdaccio.packagesPath, "publish-pkg-3"), { recursive: true, force: true }), - write(join(packageDir, "bunfig.toml"), bunfig), - write( - packageJson, - JSON.stringify({ - name: "root", - workspaces: ["packages/*"], - }), - ), - write(join(packageDir, "packages", "publish-pkg-3", "package.json"), JSON.stringify(pkgJson)), - ]); - - await publish(env, join(packageDir, "packages", "publish-pkg-3")); - - await write(packageJson, JSON.stringify({ name: "root", "dependencies": { "publish-pkg-3": "3.3.3" } })); - - await runBunInstall(env, packageDir); - - expect(await file(join(packageDir, "node_modules", "publish-pkg-3", "package.json")).json()).toEqual(pkgJson); - }); - - describe("--dry-run", async () => { - test("does not publish", async () => { - const bunfig = await authBunfig("dryrun"); - await Promise.all([ - rm(join(verdaccio.packagesPath, "dry-run-1"), { recursive: true, force: true }), - write(join(packageDir, "bunfig.toml"), bunfig), - write( - packageJson, - JSON.stringify({ - name: "dry-run-1", - version: "1.1.1", - dependencies: { - "dry-run-1": "1.1.1", - }, - }), - ), - ]); - - const { out, err, exitCode } = await publish(env, packageDir, "--dry-run"); - expect(exitCode).toBe(0); - - expect(await exists(join(verdaccio.packagesPath, "dry-run-1"))).toBeFalse(); - }); - test("does not publish from tarball path", async () => { - const bunfig = await authBunfig("dryruntarball"); - await Promise.all([ - rm(join(verdaccio.packagesPath, "dry-run-2"), { recursive: true, force: true }), - write(join(packageDir, "bunfig.toml"), bunfig), - write( - packageJson, - JSON.stringify({ - name: "dry-run-2", - version: "2.2.2", - dependencies: { - "dry-run-2": "2.2.2", - }, - }), - ), - ]); - - await pack(packageDir, env); - - const { out, err, exitCode } = await publish(env, packageDir, "./dry-run-2-2.2.2.tgz", "--dry-run"); - expect(exitCode).toBe(0); - - expect(await exists(join(verdaccio.packagesPath, "dry-run-2"))).toBeFalse(); - }); - }); - - describe("lifecycle scripts", async () => { - const script = `const fs = require("fs"); - fs.writeFileSync(process.argv[2] + ".txt", \` -prepublishOnly: \${fs.existsSync("prepublishOnly.txt")} -publish: \${fs.existsSync("publish.txt")} -postpublish: \${fs.existsSync("postpublish.txt")} -prepack: \${fs.existsSync("prepack.txt")} -prepare: \${fs.existsSync("prepare.txt")} -postpack: \${fs.existsSync("postpack.txt")}\`)`; - const json = { - name: "publish-pkg-4", - version: "4.4.4", - scripts: { - // should happen in this order - "prepublishOnly": `${bunExe()} script.js prepublishOnly`, - "prepack": `${bunExe()} script.js prepack`, - "prepare": `${bunExe()} script.js prepare`, - "postpack": `${bunExe()} script.js postpack`, - "publish": `${bunExe()} script.js publish`, - "postpublish": `${bunExe()} script.js postpublish`, - }, - dependencies: { - "publish-pkg-4": "4.4.4", - }, - }; - - for (const arg of ["", "--dry-run"]) { - test(`should run in order${arg ? " (--dry-run)" : ""}`, async () => { - const bunfig = await authBunfig("lifecycle" + (arg ? "dry" : "")); - await Promise.all([ - rm(join(verdaccio.packagesPath, "publish-pkg-4"), { recursive: true, force: true }), - write(packageJson, JSON.stringify(json)), - write(join(packageDir, "script.js"), script), - write(join(packageDir, "bunfig.toml"), bunfig), - ]); - - const { out, err, exitCode } = await publish(env, packageDir, arg); - expect(exitCode).toBe(0); - - const results = await Promise.all([ - file(join(packageDir, "prepublishOnly.txt")).text(), - file(join(packageDir, "prepack.txt")).text(), - file(join(packageDir, "prepare.txt")).text(), - file(join(packageDir, "postpack.txt")).text(), - file(join(packageDir, "publish.txt")).text(), - file(join(packageDir, "postpublish.txt")).text(), - ]); - - expect(results).toEqual([ - "\nprepublishOnly: false\npublish: false\npostpublish: false\nprepack: false\nprepare: false\npostpack: false", - "\nprepublishOnly: true\npublish: false\npostpublish: false\nprepack: false\nprepare: false\npostpack: false", - "\nprepublishOnly: true\npublish: false\npostpublish: false\nprepack: true\nprepare: false\npostpack: false", - "\nprepublishOnly: true\npublish: false\npostpublish: false\nprepack: true\nprepare: true\npostpack: false", - "\nprepublishOnly: true\npublish: false\npostpublish: false\nprepack: true\nprepare: true\npostpack: true", - "\nprepublishOnly: true\npublish: true\npostpublish: false\nprepack: true\nprepare: true\npostpack: true", - ]); - }); - } - - test("--ignore-scripts", async () => { - const bunfig = await authBunfig("ignorescripts"); - await Promise.all([ - rm(join(verdaccio.packagesPath, "publish-pkg-5"), { recursive: true, force: true }), - write(packageJson, JSON.stringify(json)), - write(join(packageDir, "script.js"), script), - write(join(packageDir, "bunfig.toml"), bunfig), - ]); - - const { out, err, exitCode } = await publish(env, packageDir, "--ignore-scripts"); - expect(exitCode).toBe(0); + await write(packageJson, JSON.stringify({ name: "foo", version: "1.0.0", "dependencies": { "no-deps": "1.1.1" } })); - const results = await Promise.all([ - exists(join(packageDir, "prepublishOnly.txt")), - exists(join(packageDir, "prepack.txt")), - exists(join(packageDir, "prepare.txt")), - exists(join(packageDir, "postpack.txt")), - exists(join(packageDir, "publish.txt")), - exists(join(packageDir, "postpublish.txt")), - ]); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--cafile", "does-not-exist"], + cwd: packageDir, + stderr: "pipe", + stdout: "pipe", + env, + }); + const out = await Bun.readableStreamToText(stdout); + expect(out).not.toContain("no-deps"); + const err = await Bun.readableStreamToText(stderr); + expect(err).toContain(`HTTPThread: could not find CA file: '${join(packageDir, "does-not-exist")}'`); + expect(await exited).toBe(1); + }); - expect(results).toEqual([false, false, false, false, false, false]); + test("non-existent --cafile (absolute path)", async () => { + await write(packageJson, JSON.stringify({ name: "foo", version: "1.0.0", "dependencies": { "no-deps": "1.1.1" } })); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--cafile", "/does/not/exist"], + cwd: packageDir, + stderr: "pipe", + stdout: "pipe", + env, }); + const out = await Bun.readableStreamToText(stdout); + expect(out).not.toContain("no-deps"); + const err = await Bun.readableStreamToText(stderr); + expect(err).toContain(`HTTPThread: could not find CA file: '/does/not/exist'`); + expect(await exited).toBe(1); }); - test("attempting to publish a private package should fail", async () => { - const bunfig = await authBunfig("privatepackage"); + test("cafile from bunfig does not exist", async () => { await Promise.all([ - rm(join(verdaccio.packagesPath, "publish-pkg-6"), { recursive: true, force: true }), write( packageJson, JSON.stringify({ - name: "publish-pkg-6", - version: "6.6.6", - private: true, + name: "foo", + version: "1.0.0", dependencies: { - "publish-pkg-6": "6.6.6", + "no-deps": "1.1.1", }, }), ), - write(join(packageDir, "bunfig.toml"), bunfig), + write( + join(packageDir, "bunfig.toml"), + ` + [install] + cache = false + registry = "http://localhost:${port}/" + cafile = "does-not-exist"`, + ), ]); - // should fail - let { out, err, exitCode } = await publish(env, packageDir); - expect(exitCode).toBe(1); - expect(err).toContain("error: attempted to publish a private package"); - expect(await exists(join(verdaccio.packagesPath, "publish-pkg-6-6.6.6.tgz"))).toBeFalse(); - - // try tarball - await pack(packageDir, env); - ({ out, err, exitCode } = await publish(env, packageDir, "./publish-pkg-6-6.6.6.tgz")); - expect(exitCode).toBe(1); - expect(err).toContain("error: attempted to publish a private package"); - expect(await exists(join(packageDir, "publish-pkg-6-6.6.6.tgz"))).toBeTrue(); - }); - - describe("access", async () => { - test("--access", async () => { - const bunfig = await authBunfig("accessflag"); - await Promise.all([ - rm(join(verdaccio.packagesPath, "publish-pkg-7"), { recursive: true, force: true }), - write(join(packageDir, "bunfig.toml"), bunfig), - write( - packageJson, - JSON.stringify({ - name: "publish-pkg-7", - version: "7.7.7", - }), - ), - ]); - - // should fail - let { out, err, exitCode } = await publish(env, packageDir, "--access", "restricted"); - expect(exitCode).toBe(1); - expect(err).toContain("error: unable to restrict access to unscoped package"); - - ({ out, err, exitCode } = await publish(env, packageDir, "--access", "public")); - expect(exitCode).toBe(0); - - expect(await exists(join(verdaccio.packagesPath, "publish-pkg-7"))).toBeTrue(); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stderr: "pipe", + stdout: "pipe", + env, }); - for (const access of ["restricted", "public"]) { - test(`access ${access}`, async () => { - const bunfig = await authBunfig("access" + access); - - const pkgJson = { - name: "@secret/publish-pkg-8", - version: "8.8.8", + const out = await Bun.readableStreamToText(stdout); + expect(out).not.toContain("no-deps"); + const err = await Bun.readableStreamToText(stderr); + expect(err).toContain(`HTTPThread: could not find CA file: '${join(packageDir, "does-not-exist")}'`); + expect(await exited).toBe(1); + }); + test("invalid cafile", async () => { + await Promise.all([ + write( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", dependencies: { - "@secret/publish-pkg-8": "8.8.8", - }, - publishConfig: { - access, + "no-deps": "1.1.1", }, - }; - - await Promise.all([ - rm(join(verdaccio.packagesPath, "@secret", "publish-pkg-8"), { recursive: true, force: true }), - write(join(packageDir, "bunfig.toml"), bunfig), - write(packageJson, JSON.stringify(pkgJson)), - ]); - - let { out, err, exitCode } = await publish(env, packageDir); - expect(exitCode).toBe(0); + }), + ), + write( + join(packageDir, "invalid-cafile"), + `-----BEGIN CERTIFICATE----- +jlwkjekfjwlejlgldjfljlkwjef +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +ljelkjwelkgjw;lekj;lkejflkj +-----END CERTIFICATE-----`, + ), + ]); - await runBunInstall(env, packageDir); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--cafile", join(packageDir, "invalid-cafile")], + cwd: packageDir, + stderr: "pipe", + stdout: "pipe", + env, + }); - expect(await file(join(packageDir, "node_modules", "@secret", "publish-pkg-8", "package.json")).json()).toEqual( - pkgJson, - ); - }); - } + const out = await Bun.readableStreamToText(stdout); + expect(out).not.toContain("no-deps"); + const err = await Bun.readableStreamToText(stderr); + expect(err).toContain(`HTTPThread: invalid CA file: '${join(packageDir, "invalid-cafile")}'`); + expect(await exited).toBe(1); }); - - describe("tag", async () => { - test("can publish with a tag", async () => { - const bunfig = await authBunfig("simpletag"); - const pkgJson = { - name: "publish-pkg-9", - version: "9.9.9", + test("invalid --ca", async () => { + await write( + packageJson, + JSON.stringify({ + name: "foo", + version: "1.0.0", dependencies: { - "publish-pkg-9": "simpletag", + "no-deps": "1.1.1", }, - }; - await Promise.all([ - rm(join(verdaccio.packagesPath, "publish-pkg-9"), { recursive: true, force: true }), - write(join(packageDir, "bunfig.toml"), bunfig), - write(packageJson, JSON.stringify(pkgJson)), - ]); - - let { out, err, exitCode } = await publish(env, packageDir, "--tag", "simpletag"); - expect(exitCode).toBe(0); - - await runBunInstall(env, packageDir); - expect(await file(join(packageDir, "node_modules", "publish-pkg-9", "package.json")).json()).toEqual(pkgJson); + }), + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--ca", "not-valid"], + cwd: packageDir, + stderr: "pipe", + stdout: "pipe", + env, }); + + const out = await Bun.readableStreamToText(stdout); + expect(out).not.toContain("no-deps"); + const err = await Bun.readableStreamToText(stderr); + expect(err).toContain("HTTPThread: the CA is invalid"); + expect(await exited).toBe(1); }); }); @@ -3264,19 +2364,15 @@ test("package added after install", async () => { "", expect.stringContaining("+ no-deps@1.0.0"), "", - "2 packages installed", + "1 package installed", ]); - expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toEqual({ - name: "no-deps", - version: "1.0.0", - } as any); - expect( - await file(join(packageDir, "node_modules", "one-range-dep", "node_modules", "no-deps", "package.json")).json(), - ).toEqual({ - name: "no-deps", - version: "1.1.0", - } as any); expect(await exited).toBe(0); + expect( + await Promise.all([ + file(join(packageDir, "node_modules", "no-deps", "package.json")).json(), + exists(join(packageDir, "node_modules", "one-range-dep", "node_modules")), + ]), + ).toEqual([{ name: "no-deps", version: "1.0.0" }, false]); assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); @@ -3301,7 +2397,7 @@ test("package added after install", async () => { expect.stringContaining("+ no-deps@1.0.0"), "+ one-range-dep@1.0.0", "", - "3 packages installed", + "2 packages installed", ]); expect(await exited).toBe(0); assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); @@ -4697,87 +3793,84 @@ describe("hoisting", async () => { }, ]; for (const { dependencies, expected, situation } of peerTests) { - test.todoIf(isFlaky && isMacOS && situation === "peer ^1.0.2")( - `it should hoist ${expected} when ${situation}`, - async () => { - await writeFile( - packageJson, - JSON.stringify({ - name: "foo", - dependencies, - }), - ); - - var { stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env, - }); + test(`it should hoist ${expected} when ${situation}`, async () => { + await writeFile( + packageJson, + JSON.stringify({ + name: "foo", + dependencies, + }), + ); - var err = await new Response(stderr).text(); - var out = await new Response(stdout).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - for (const dep of Object.keys(dependencies)) { - expect(out).toContain(`+ ${dep}@${dependencies[dep]}`); - } - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); - expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected); - - await rm(join(packageDir, "bun.lockb")); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env, - })); - - err = await new Response(stderr).text(); - out = await new Response(stdout).text(); - expect(err).toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - if (out.includes("installed")) { - console.log("stdout:", out); - } - expect(out).not.toContain("package installed"); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + var err = await new Response(stderr).text(); + var out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + for (const dep of Object.keys(dependencies)) { + expect(out).toContain(`+ ${dep}@${dependencies[dep]}`); + } + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); - expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected); - - await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env, - })); - - err = await new Response(stderr).text(); - out = await new Response(stdout).text(); - expect(err).not.toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out).not.toContain("package installed"); - expect(await exited).toBe(0); - assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected); - expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected); - }, - ); + await rm(join(packageDir, "bun.lockb")); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + if (out.includes("installed")) { + console.log("stdout:", out); + } + expect(out).not.toContain("package installed"); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + + expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out).not.toContain("package installed"); + expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + + expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected); + }); } }); @@ -5692,7 +4785,11 @@ describe("transitive file dependencies", () => { "", "+ @another-scope/file-dep@1.0.0", "+ @scoped/file-dep@1.0.0", - "+ aliased-file-dep@1.0.1", + // 'aliased-file-dep' is hoisted to the root, because + // it coming from the registry, and since this + // install is from the workspace, it won't be included + // in the terminal output + // "+ aliased-file-dep@1.0.1", "+ dep-file-dep@1.0.0", expect.stringContaining("+ file-dep@1.0.0"), "+ missing-file-dep@1.0.0", @@ -5723,7 +4820,7 @@ describe("transitive file dependencies", () => { "", "+ @another-scope/file-dep@1.0.0", "+ @scoped/file-dep@1.0.0", - "+ aliased-file-dep@1.0.1", + // "+ aliased-file-dep@1.0.1", "+ dep-file-dep@1.0.0", expect.stringContaining("+ file-dep@1.0.0"), "+ missing-file-dep@1.0.0", @@ -6868,6 +5965,40 @@ describe("update", () => { expect(files).toMatchObject([{ version: "2.0.0" }, { dependencies: { "no-deps": "2.0.0" } }]); }); + + test("updating a dependency will deduplicate it if possible", async () => { + await write( + packageJson, + JSON.stringify({ + name: "foo", + dependencies: { + "zzz-1": "1.0.89", + "zzz-2": "1.0.89", + }, + }), + ); + + const { exited } = spawn({ + cmd: [bunExe(), "install", "--save-text-lockfile"], + cwd: packageDir, + env, + }); + expect(await exited).toBe(0); + + expect(await file(join(packageDir, "node_modules", "zzz-1", "package.json")).json()).toEqual({ + name: "zzz-1", + version: "1.0.89", + }); + + await runBunUpdate(env, packageDir, ["--latest"]); + + expect( + await Promise.all([ + file(join(packageDir, "node_modules", "zzz-1", "package.json")).json(), + exists(join(packageDir, "node_modules", "zzz-2", "node_modules")), + ]), + ).toEqual([{ name: "zzz-1", version: "1.0.90" }, false]); + }); }); test("packages dependening on each other with aliases does not infinitely loop", async () => { @@ -9662,79 +8793,3 @@ registry = "http://localhost:${port}/" }); } }); - -it("$npm_command is accurate during publish", async () => { - await write( - packageJson, - JSON.stringify({ - name: "publish-pkg-10", - version: "1.0.0", - scripts: { - publish: "echo $npm_command", - }, - }), - ); - await write(join(packageDir, "bunfig.toml"), await authBunfig("npm_command")); - await rm(join(verdaccio.packagesPath, "publish-pkg-10"), { recursive: true, force: true }); - let { out, err, exitCode } = await publish(env, packageDir, "--tag", "simpletag"); - expect(err).toBe(`$ echo $npm_command\n`); - expect(out.split("\n")).toEqual([ - `bun publish ${Bun.version_with_sha}`, - ``, - `packed 95B package.json`, - ``, - `Total files: 1`, - expect.stringContaining(`Shasum: `), - expect.stringContaining(`Integrity: sha512-`), - `Unpacked size: 95B`, - expect.stringContaining(`Packed size: `), - `Tag: simpletag`, - `Access: default`, - `Registry: http://localhost:${port}/`, - ``, - ` + publish-pkg-10@1.0.0`, - `publish`, - ``, - ]); - expect(exitCode).toBe(0); -}); - -it("$npm_lifecycle_event is accurate during publish", async () => { - await write( - packageJson, - `{ - "name": "publish-pkg-11", - "version": "1.0.0", - "scripts": { - "prepublish": "echo 1 $npm_lifecycle_event", - "publish": "echo 2 $npm_lifecycle_event", - "postpublish": "echo 3 $npm_lifecycle_event", - }, - } - `, - ); - await write(join(packageDir, "bunfig.toml"), await authBunfig("npm_lifecycle_event")); - await rm(join(verdaccio.packagesPath, "publish-pkg-11"), { recursive: true, force: true }); - let { out, err, exitCode } = await publish(env, packageDir, "--tag", "simpletag"); - expect(err).toBe(`$ echo 2 $npm_lifecycle_event\n$ echo 3 $npm_lifecycle_event\n`); - expect(out.split("\n")).toEqual([ - `bun publish ${Bun.version_with_sha}`, - ``, - `packed 256B package.json`, - ``, - `Total files: 1`, - expect.stringContaining(`Shasum: `), - expect.stringContaining(`Integrity: sha512-`), - `Unpacked size: 256B`, - expect.stringContaining(`Packed size: `), - `Tag: simpletag`, - `Access: default`, - `Registry: http://localhost:${port}/`, - ``, - ` + publish-pkg-11@1.0.0`, - `2 publish`, - `3 postpublish`, - ``, - ]); - expect(exitCode).toBe(0); -}); diff --git a/test/cli/install/bun-install.test.ts b/test/cli/install/bun-install.test.ts index fb4a1d7c40e2ff..a2fb4c07ada076 100644 --- a/test/cli/install/bun-install.test.ts +++ b/test/cli/install/bun-install.test.ts @@ -2978,8 +2978,13 @@ it("should get npm alias with matching version", async () => { "2 packages installed", ]); expect(await exited).toBe(0); - expect(urls.sort()).toEqual([`${root_url}/baz`, `${root_url}/baz-0.0.5.tgz`]); - expect(requested).toBe(2); + expect(urls.sort()).toEqual([ + `${root_url}/baz`, + `${root_url}/baz-0.0.5.tgz`, + `${root_url}/boba`, + `${root_url}/boba-0.0.5.tgz`, + ]); + expect(requested).toBe(4); expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "boba", "moo"]); expect(await file(join(package_dir, "node_modules", "boba", "package.json")).json()).toEqual({ name: "baz", diff --git a/test/cli/install/bun-lock.test.ts b/test/cli/install/bun-lock.test.ts index f60725be5f1236..8e8892909563e9 100644 --- a/test/cli/install/bun-lock.test.ts +++ b/test/cli/install/bun-lock.test.ts @@ -1,9 +1,130 @@ import { spawn, write, file } from "bun"; -import { expect, it } from "bun:test"; +import { expect, it, beforeAll, beforeEach, afterAll } from "bun:test"; import { access, copyFile, open, writeFile } from "fs/promises"; -import { bunExe, bunEnv as env, isWindows, tmpdirSync } from "harness"; +import { bunExe, bunEnv as env, isWindows, tmpdirSync, VerdaccioRegistry } from "harness"; import { join } from "path"; +var verdaccio: VerdaccioRegistry; +var packageDir: string; +var packageJson: string; + +beforeAll(async () => { + verdaccio = new VerdaccioRegistry(); + await verdaccio.start(); +}); + +afterAll(() => { + verdaccio.stop(); +}); + +beforeEach(async () => { + ({ packageDir, packageJson } = await verdaccio.createTestDir()); + env.BUN_INSTALL_CACHE_DIR = join(packageDir, ".bun-cache"); + env.BUN_TMPDIR = env.TMPDIR = env.TEMP = join(packageDir, ".bun-tmp"); +}); + +it("should update dependency version literal when no updates are necessary", async () => { + await Promise.all([ + write( + packageJson, + JSON.stringify({ + workspaces: ["packages/*"], + dependencies: { + "no-deps": "1.0.0", + }, + }), + ), + write( + join(packageDir, "packages", "pkg1", "package.json"), + JSON.stringify({ + name: "pkg1", + dependencies: { + "a-dep": "1.0.1", + }, + }), + ), + ]); + + let { exited } = spawn({ + cmd: [bunExe(), "install", "--save-text-lockfile"], + cwd: packageDir, + env, + }); + + expect(await exited).toBe(0); + + const firstLockfile = (await file(join(packageDir, "bun.lock")).text()).replaceAll( + /localhost:\d+/g, + "localhost:1234", + ); + expect(firstLockfile).toMatchSnapshot(); + + // "no-deps" is updated, but the version still satisfies the resolved + // package in the lockfile. no install should happen, but the dependency + // string in the lockfile should be updated. + await write( + packageJson, + JSON.stringify({ + workspaces: ["packages/*"], + dependencies: { + "no-deps": "^1.0.0", + }, + }), + ); + + ({ exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + env, + })); + + expect(await exited).toBe(0); + + expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toEqual({ + name: "no-deps", + version: "1.0.0", + }); + + const secondLockfile = (await file(join(packageDir, "bun.lock")).text()).replaceAll( + /localhost:\d+/g, + "localhost:1234", + ); + expect(firstLockfile).not.toBe(secondLockfile); + expect(secondLockfile).toMatchSnapshot(); + + // now the same with "a-dep" in the workspace + await write( + join(packageDir, "packages", "pkg1", "package.json"), + JSON.stringify({ + name: "pkg1", + dependencies: { + "a-dep": "^1.0.1", + }, + }), + ); + + ({ exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + env, + })); + + expect(await exited).toBe(0); + + expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).json()).toEqual({ + name: "a-dep", + version: "1.0.1", + }); + + const thirdLockfile = (await file(join(packageDir, "bun.lock")).text()).replaceAll( + /localhost:\d+/g, + "localhost:1234", + ); + expect(thirdLockfile).not.toBe(secondLockfile); + expect(thirdLockfile).not.toBe(firstLockfile); + expect(thirdLockfile).toMatchSnapshot(); +}); + it("should write plaintext lockfiles", async () => { const package_dir = tmpdirSync(); diff --git a/test/cli/install/bun-publish.test.ts b/test/cli/install/bun-publish.test.ts new file mode 100644 index 00000000000000..5829524bd787f2 --- /dev/null +++ b/test/cli/install/bun-publish.test.ts @@ -0,0 +1,809 @@ +import { describe, test, expect, beforeEach, beforeAll, afterAll, it } from "bun:test"; +import { spawn, file, write } from "bun"; +import { rm, exists } from "fs/promises"; +import { + VerdaccioRegistry, + bunEnv as env, + stderrForInstall, + bunExe, + runBunInstall, + pack, + tmpdirSync, + isWindows, +} from "harness"; +import { join } from "path"; + +var verdaccio: VerdaccioRegistry; +var packageDir: string; +var packageJson: string; + +beforeAll(async () => { + verdaccio = new VerdaccioRegistry(); + await verdaccio.start(); +}); + +afterAll(() => { + verdaccio.stop(); +}); + +beforeEach(async () => { + ({ packageDir, packageJson } = await verdaccio.createTestDir()); + env.BUN_INSTALL_CACHE_DIR = join(packageDir, ".bun-cache"); + env.BUN_TMPDIR = env.TMPDIR = env.TEMP = join(packageDir, ".bun-tmp"); +}); + +export async function publish( + env: any, + cwd: string, + ...args: string[] +): Promise<{ out: string; err: string; exitCode: number }> { + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "publish", ...args], + cwd, + stdout: "pipe", + stderr: "pipe", + env, + }); + + const out = await Bun.readableStreamToText(stdout); + const err = stderrForInstall(await Bun.readableStreamToText(stderr)); + const exitCode = await exited; + return { out, err, exitCode }; +} + +describe("otp", async () => { + const mockRegistryFetch = function (opts: { + token: string; + setAuthHeader?: boolean; + otpFail?: boolean; + npmNotice?: boolean; + xLocalCache?: boolean; + expectedCI?: string; + }) { + return async function (req: Request) { + const { token, setAuthHeader = true, otpFail = false, npmNotice = false, xLocalCache = false } = opts; + if (req.url.includes("otp-pkg")) { + if (opts.expectedCI) { + expect(req.headers.get("user-agent")).toContain("ci/" + opts.expectedCI); + } + if (req.headers.get("npm-otp") === token) { + if (otpFail) { + return new Response( + JSON.stringify({ + error: "You must provide a one-time pass. Upgrade your client to npm@latest in order to use 2FA.", + }), + { status: 401 }, + ); + } else { + return new Response("OK", { status: 200 }); + } + } else { + const headers = new Headers(); + if (setAuthHeader) headers.set("www-authenticate", "OTP"); + + // `bun publish` won't request a url from a message in the npm-notice header, but we + // can test that it's displayed + if (npmNotice) headers.set("npm-notice", `visit http://localhost:${this.port}/auth to login`); + + // npm-notice will be ignored + if (xLocalCache) headers.set("x-local-cache", "true"); + + return new Response( + JSON.stringify({ + // this isn't accurate, but we just want to check that finding this string works + mock: setAuthHeader ? "" : "one-time password", + + authUrl: `http://localhost:${this.port}/auth`, + doneUrl: `http://localhost:${this.port}/done`, + }), + { + status: 401, + headers, + }, + ); + } + } else if (req.url.endsWith("auth")) { + expect.unreachable("url given to user, bun publish should not request"); + } else if (req.url.endsWith("done")) { + // send a fake response saying the user has authenticated successfully with the auth url + return new Response(JSON.stringify({ token: token }), { status: 200 }); + } + + expect.unreachable("unexpected url"); + }; + }; + + for (const setAuthHeader of [true, false]) { + test("mock web login" + (setAuthHeader ? "" : " (without auth header)"), async () => { + const token = await verdaccio.generateUser("otp" + (setAuthHeader ? "" : "noheader"), "otp"); + + using mockRegistry = Bun.serve({ + port: 0, + fetch: mockRegistryFetch({ token }), + }); + + const bunfig = ` + [install] + cache = false + registry = { url = "http://localhost:${mockRegistry.port}", token = "${token}" }`; + await Promise.all([ + rm(join(verdaccio.packagesPath, "otp-pkg-1"), { recursive: true, force: true }), + write(join(packageDir, "bunfig.toml"), bunfig), + write( + packageJson, + JSON.stringify({ + name: "otp-pkg-1", + version: "2.2.2", + dependencies: { + "otp-pkg-1": "2.2.2", + }, + }), + ), + ]); + + const { out, err, exitCode } = await publish(env, packageDir); + expect(exitCode).toBe(0); + }); + } + + test("otp failure", async () => { + const token = await verdaccio.generateUser("otp-fail", "otp"); + using mockRegistry = Bun.serve({ + port: 0, + fetch: mockRegistryFetch({ token, otpFail: true }), + }); + + const bunfig = ` + [install] + cache = false + registry = { url = "http://localhost:${mockRegistry.port}", token = "${token}" }`; + + await Promise.all([ + rm(join(verdaccio.packagesPath, "otp-pkg-2"), { recursive: true, force: true }), + write(join(packageDir, "bunfig.toml"), bunfig), + write( + packageJson, + JSON.stringify({ + name: "otp-pkg-2", + version: "1.1.1", + dependencies: { + "otp-pkg-2": "1.1.1", + }, + }), + ), + ]); + + const { out, err, exitCode } = await publish(env, packageDir); + expect(exitCode).toBe(1); + expect(err).toContain(" - Received invalid OTP"); + }); + + for (const shouldIgnoreNotice of [false, true]) { + test(`npm-notice with login url${shouldIgnoreNotice ? " (ignored)" : ""}`, async () => { + // Situation: user has 2FA enabled account with faceid sign-in. + // They run `bun publish` with --auth-type=legacy, prompting them + // to enter their OTP. Because they have faceid sign-in, they don't + // have a code to enter, so npm sends a message in the npm-notice + // header with a url for logging in. + const token = await verdaccio.generateUser(`otp-notice${shouldIgnoreNotice ? "-ignore" : ""}`, "otp"); + using mockRegistry = Bun.serve({ + port: 0, + fetch: mockRegistryFetch({ token, npmNotice: true, xLocalCache: shouldIgnoreNotice }), + }); + + const bunfig = ` + [install] + cache = false + registry = { url = "http://localhost:${mockRegistry.port}", token = "${token}" }`; + + await Promise.all([ + rm(join(verdaccio.packagesPath, "otp-pkg-3"), { recursive: true, force: true }), + write(join(packageDir, "bunfig.toml"), bunfig), + write( + packageJson, + JSON.stringify({ + name: "otp-pkg-3", + version: "3.3.3", + dependencies: { + "otp-pkg-3": "3.3.3", + }, + }), + ), + ]); + + const { out, err, exitCode } = await publish(env, packageDir); + expect(exitCode).toBe(0); + if (shouldIgnoreNotice) { + expect(err).not.toContain(`note: visit http://localhost:${mockRegistry.port}/auth to login`); + } else { + expect(err).toContain(`note: visit http://localhost:${mockRegistry.port}/auth to login`); + } + }); + } + + const fakeCIEnvs = [ + { ci: "expo-application-services", envs: { EAS_BUILD: "hi" } }, + { ci: "codemagic", envs: { CM_BUILD_ID: "hi" } }, + { ci: "vercel", envs: { "NOW_BUILDER": "hi" } }, + ]; + for (const envInfo of fakeCIEnvs) { + test(`CI user agent name: ${envInfo.ci}`, async () => { + const token = await verdaccio.generateUser(`otp-${envInfo.ci}`, "otp"); + using mockRegistry = Bun.serve({ + port: 0, + fetch: mockRegistryFetch({ token, expectedCI: envInfo.ci }), + }); + + const bunfig = ` + [install] + cache = false + registry = { url = "http://localhost:${mockRegistry.port}", token = "${token}" }`; + + await Promise.all([ + rm(join(verdaccio.packagesPath, "otp-pkg-4"), { recursive: true, force: true }), + write(join(packageDir, "bunfig.toml"), bunfig), + write( + packageJson, + JSON.stringify({ + name: "otp-pkg-4", + version: "4.4.4", + dependencies: { + "otp-pkg-4": "4.4.4", + }, + }), + ), + ]); + + const { out, err, exitCode } = await publish( + { ...env, ...envInfo.envs, ...{ BUILDKITE: undefined, GITHUB_ACTIONS: undefined } }, + packageDir, + ); + expect(exitCode).toBe(0); + }); + } +}); + +test("can publish a package then install it", async () => { + const bunfig = await verdaccio.authBunfig("basic"); + await Promise.all([ + rm(join(verdaccio.packagesPath, "publish-pkg-1"), { recursive: true, force: true }), + write( + packageJson, + JSON.stringify({ + name: "publish-pkg-1", + version: "1.1.1", + dependencies: { + "publish-pkg-1": "1.1.1", + }, + }), + ), + write(join(packageDir, "bunfig.toml"), bunfig), + ]); + + const { out, err, exitCode } = await publish(env, packageDir); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + expect(exitCode).toBe(0); + + await runBunInstall(env, packageDir); + expect(await exists(join(packageDir, "node_modules", "publish-pkg-1", "package.json"))).toBeTrue(); +}); +test("can publish from a tarball", async () => { + const bunfig = await verdaccio.authBunfig("tarball"); + const json = { + name: "publish-pkg-2", + version: "2.2.2", + dependencies: { + "publish-pkg-2": "2.2.2", + }, + }; + await Promise.all([ + rm(join(verdaccio.packagesPath, "publish-pkg-2"), { recursive: true, force: true }), + write(packageJson, JSON.stringify(json)), + write(join(packageDir, "bunfig.toml"), bunfig), + ]); + + await pack(packageDir, env); + + let { out, err, exitCode } = await publish(env, packageDir, "./publish-pkg-2-2.2.2.tgz"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + expect(exitCode).toBe(0); + + await runBunInstall(env, packageDir); + expect(await exists(join(packageDir, "node_modules", "publish-pkg-2", "package.json"))).toBeTrue(); + + await Promise.all([ + rm(join(verdaccio.packagesPath, "publish-pkg-2"), { recursive: true, force: true }), + rm(join(packageDir, "bun.lockb"), { recursive: true, force: true }), + rm(join(packageDir, "node_modules"), { recursive: true, force: true }), + ]); + + // now with an absoute path + ({ out, err, exitCode } = await publish(env, packageDir, join(packageDir, "publish-pkg-2-2.2.2.tgz"))); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + expect(exitCode).toBe(0); + + await runBunInstall(env, packageDir); + expect(await file(join(packageDir, "node_modules", "publish-pkg-2", "package.json")).json()).toEqual(json); +}); + +for (const info of [ + { user: "bin1", bin: "bin1.js" }, + { user: "bin2", bin: { bin1: "bin1.js", bin2: "bin2.js" } }, + { user: "bin3", directories: { bin: "bins" } }, +]) { + test(`can publish and install binaries with ${JSON.stringify(info)}`, async () => { + const publishDir = tmpdirSync(); + const bunfig = await verdaccio.authBunfig("binaries-" + info.user); + + await Promise.all([ + rm(join(verdaccio.packagesPath, "publish-pkg-bins"), { recursive: true, force: true }), + write( + join(publishDir, "package.json"), + JSON.stringify({ + name: "publish-pkg-bins", + version: "1.1.1", + ...info, + }), + ), + write(join(publishDir, "bunfig.toml"), bunfig), + write(join(publishDir, "bin1.js"), `#!/usr/bin/env bun\nconsole.log("bin1!")`), + write(join(publishDir, "bin2.js"), `#!/usr/bin/env bun\nconsole.log("bin2!")`), + write(join(publishDir, "bins", "bin3.js"), `#!/usr/bin/env bun\nconsole.log("bin3!")`), + write(join(publishDir, "bins", "moredir", "bin4.js"), `#!/usr/bin/env bun\nconsole.log("bin4!")`), + + write( + packageJson, + JSON.stringify({ + name: "foo", + dependencies: { + "publish-pkg-bins": "1.1.1", + }, + }), + ), + ]); + + const { out, err, exitCode } = await publish(env, publishDir); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + expect(out).toContain("+ publish-pkg-bins@1.1.1"); + expect(exitCode).toBe(0); + + await runBunInstall(env, packageDir); + + const results = await Promise.all([ + exists(join(packageDir, "node_modules", ".bin", isWindows ? "bin1.bunx" : "bin1")), + exists(join(packageDir, "node_modules", ".bin", isWindows ? "bin2.bunx" : "bin2")), + exists(join(packageDir, "node_modules", ".bin", isWindows ? "bin3.js.bunx" : "bin3.js")), + exists(join(packageDir, "node_modules", ".bin", isWindows ? "bin4.js.bunx" : "bin4.js")), + exists(join(packageDir, "node_modules", ".bin", isWindows ? "moredir" : "moredir/bin4.js")), + exists(join(packageDir, "node_modules", ".bin", isWindows ? "publish-pkg-bins.bunx" : "publish-pkg-bins")), + ]); + + switch (info.user) { + case "bin1": { + expect(results).toEqual([false, false, false, false, false, true]); + break; + } + case "bin2": { + expect(results).toEqual([true, true, false, false, false, false]); + break; + } + case "bin3": { + expect(results).toEqual([false, false, true, true, !isWindows, false]); + break; + } + } + }); +} + +test("dependencies are installed", async () => { + const publishDir = tmpdirSync(); + const bunfig = await verdaccio.authBunfig("manydeps"); + await Promise.all([ + rm(join(verdaccio.packagesPath, "publish-pkg-deps"), { recursive: true, force: true }), + write( + join(publishDir, "package.json"), + JSON.stringify( + { + name: "publish-pkg-deps", + version: "1.1.1", + dependencies: { + "no-deps": "1.0.0", + }, + peerDependencies: { + "a-dep": "1.0.1", + }, + optionalDependencies: { + "basic-1": "1.0.0", + }, + }, + null, + 2, + ), + ), + write(join(publishDir, "bunfig.toml"), bunfig), + write( + packageJson, + JSON.stringify({ + name: "foo", + dependencies: { + "publish-pkg-deps": "1.1.1", + }, + }), + ), + ]); + + let { out, err, exitCode } = await publish(env, publishDir); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + expect(out).toContain("+ publish-pkg-deps@1.1.1"); + expect(exitCode).toBe(0); + + await runBunInstall(env, packageDir); + + const results = await Promise.all([ + exists(join(packageDir, "node_modules", "no-deps", "package.json")), + exists(join(packageDir, "node_modules", "a-dep", "package.json")), + exists(join(packageDir, "node_modules", "basic-1", "package.json")), + ]); + + expect(results).toEqual([true, true, true]); +}); + +test("can publish workspace package", async () => { + const bunfig = await verdaccio.authBunfig("workspace"); + const pkgJson = { + name: "publish-pkg-3", + version: "3.3.3", + dependencies: { + "publish-pkg-3": "3.3.3", + }, + }; + await Promise.all([ + rm(join(verdaccio.packagesPath, "publish-pkg-3"), { recursive: true, force: true }), + write(join(packageDir, "bunfig.toml"), bunfig), + write( + packageJson, + JSON.stringify({ + name: "root", + workspaces: ["packages/*"], + }), + ), + write(join(packageDir, "packages", "publish-pkg-3", "package.json"), JSON.stringify(pkgJson)), + ]); + + await publish(env, join(packageDir, "packages", "publish-pkg-3")); + + await write(packageJson, JSON.stringify({ name: "root", "dependencies": { "publish-pkg-3": "3.3.3" } })); + + await runBunInstall(env, packageDir); + + expect(await file(join(packageDir, "node_modules", "publish-pkg-3", "package.json")).json()).toEqual(pkgJson); +}); + +describe("--dry-run", async () => { + test("does not publish", async () => { + const bunfig = await verdaccio.authBunfig("dryrun"); + await Promise.all([ + rm(join(verdaccio.packagesPath, "dry-run-1"), { recursive: true, force: true }), + write(join(packageDir, "bunfig.toml"), bunfig), + write( + packageJson, + JSON.stringify({ + name: "dry-run-1", + version: "1.1.1", + dependencies: { + "dry-run-1": "1.1.1", + }, + }), + ), + ]); + + const { out, err, exitCode } = await publish(env, packageDir, "--dry-run"); + expect(exitCode).toBe(0); + + expect(await exists(join(verdaccio.packagesPath, "dry-run-1"))).toBeFalse(); + }); + test("does not publish from tarball path", async () => { + const bunfig = await verdaccio.authBunfig("dryruntarball"); + await Promise.all([ + rm(join(verdaccio.packagesPath, "dry-run-2"), { recursive: true, force: true }), + write(join(packageDir, "bunfig.toml"), bunfig), + write( + packageJson, + JSON.stringify({ + name: "dry-run-2", + version: "2.2.2", + dependencies: { + "dry-run-2": "2.2.2", + }, + }), + ), + ]); + + await pack(packageDir, env); + + const { out, err, exitCode } = await publish(env, packageDir, "./dry-run-2-2.2.2.tgz", "--dry-run"); + expect(exitCode).toBe(0); + + expect(await exists(join(verdaccio.packagesPath, "dry-run-2"))).toBeFalse(); + }); +}); + +describe("lifecycle scripts", async () => { + const script = `const fs = require("fs"); + fs.writeFileSync(process.argv[2] + ".txt", \` +prepublishOnly: \${fs.existsSync("prepublishOnly.txt")} +publish: \${fs.existsSync("publish.txt")} +postpublish: \${fs.existsSync("postpublish.txt")} +prepack: \${fs.existsSync("prepack.txt")} +prepare: \${fs.existsSync("prepare.txt")} +postpack: \${fs.existsSync("postpack.txt")}\`)`; + const json = { + name: "publish-pkg-4", + version: "4.4.4", + scripts: { + // should happen in this order + "prepublishOnly": `${bunExe()} script.js prepublishOnly`, + "prepack": `${bunExe()} script.js prepack`, + "prepare": `${bunExe()} script.js prepare`, + "postpack": `${bunExe()} script.js postpack`, + "publish": `${bunExe()} script.js publish`, + "postpublish": `${bunExe()} script.js postpublish`, + }, + dependencies: { + "publish-pkg-4": "4.4.4", + }, + }; + + for (const arg of ["", "--dry-run"]) { + test(`should run in order${arg ? " (--dry-run)" : ""}`, async () => { + const bunfig = await verdaccio.authBunfig("lifecycle" + (arg ? "dry" : "")); + await Promise.all([ + rm(join(verdaccio.packagesPath, "publish-pkg-4"), { recursive: true, force: true }), + write(packageJson, JSON.stringify(json)), + write(join(packageDir, "script.js"), script), + write(join(packageDir, "bunfig.toml"), bunfig), + ]); + + const { out, err, exitCode } = await publish(env, packageDir, arg); + expect(exitCode).toBe(0); + + const results = await Promise.all([ + file(join(packageDir, "prepublishOnly.txt")).text(), + file(join(packageDir, "prepack.txt")).text(), + file(join(packageDir, "prepare.txt")).text(), + file(join(packageDir, "postpack.txt")).text(), + file(join(packageDir, "publish.txt")).text(), + file(join(packageDir, "postpublish.txt")).text(), + ]); + + expect(results).toEqual([ + "\nprepublishOnly: false\npublish: false\npostpublish: false\nprepack: false\nprepare: false\npostpack: false", + "\nprepublishOnly: true\npublish: false\npostpublish: false\nprepack: false\nprepare: false\npostpack: false", + "\nprepublishOnly: true\npublish: false\npostpublish: false\nprepack: true\nprepare: false\npostpack: false", + "\nprepublishOnly: true\npublish: false\npostpublish: false\nprepack: true\nprepare: true\npostpack: false", + "\nprepublishOnly: true\npublish: false\npostpublish: false\nprepack: true\nprepare: true\npostpack: true", + "\nprepublishOnly: true\npublish: true\npostpublish: false\nprepack: true\nprepare: true\npostpack: true", + ]); + }); + } + + test("--ignore-scripts", async () => { + const bunfig = await verdaccio.authBunfig("ignorescripts"); + await Promise.all([ + rm(join(verdaccio.packagesPath, "publish-pkg-5"), { recursive: true, force: true }), + write(packageJson, JSON.stringify(json)), + write(join(packageDir, "script.js"), script), + write(join(packageDir, "bunfig.toml"), bunfig), + ]); + + const { out, err, exitCode } = await publish(env, packageDir, "--ignore-scripts"); + expect(exitCode).toBe(0); + + const results = await Promise.all([ + exists(join(packageDir, "prepublishOnly.txt")), + exists(join(packageDir, "prepack.txt")), + exists(join(packageDir, "prepare.txt")), + exists(join(packageDir, "postpack.txt")), + exists(join(packageDir, "publish.txt")), + exists(join(packageDir, "postpublish.txt")), + ]); + + expect(results).toEqual([false, false, false, false, false, false]); + }); +}); + +test("attempting to publish a private package should fail", async () => { + const bunfig = await verdaccio.authBunfig("privatepackage"); + await Promise.all([ + rm(join(verdaccio.packagesPath, "publish-pkg-6"), { recursive: true, force: true }), + write( + packageJson, + JSON.stringify({ + name: "publish-pkg-6", + version: "6.6.6", + private: true, + dependencies: { + "publish-pkg-6": "6.6.6", + }, + }), + ), + write(join(packageDir, "bunfig.toml"), bunfig), + ]); + + // should fail + let { out, err, exitCode } = await publish(env, packageDir); + expect(exitCode).toBe(1); + expect(err).toContain("error: attempted to publish a private package"); + expect(await exists(join(verdaccio.packagesPath, "publish-pkg-6-6.6.6.tgz"))).toBeFalse(); + + // try tarball + await pack(packageDir, env); + ({ out, err, exitCode } = await publish(env, packageDir, "./publish-pkg-6-6.6.6.tgz")); + expect(exitCode).toBe(1); + expect(err).toContain("error: attempted to publish a private package"); + expect(await exists(join(packageDir, "publish-pkg-6-6.6.6.tgz"))).toBeTrue(); +}); + +describe("access", async () => { + test("--access", async () => { + const bunfig = await verdaccio.authBunfig("accessflag"); + await Promise.all([ + rm(join(verdaccio.packagesPath, "publish-pkg-7"), { recursive: true, force: true }), + write(join(packageDir, "bunfig.toml"), bunfig), + write( + packageJson, + JSON.stringify({ + name: "publish-pkg-7", + version: "7.7.7", + }), + ), + ]); + + // should fail + let { out, err, exitCode } = await publish(env, packageDir, "--access", "restricted"); + expect(exitCode).toBe(1); + expect(err).toContain("error: unable to restrict access to unscoped package"); + + ({ out, err, exitCode } = await publish(env, packageDir, "--access", "public")); + expect(exitCode).toBe(0); + + expect(await exists(join(verdaccio.packagesPath, "publish-pkg-7"))).toBeTrue(); + }); + + for (const access of ["restricted", "public"]) { + test(`access ${access}`, async () => { + const bunfig = await verdaccio.authBunfig("access" + access); + + const pkgJson = { + name: "@secret/publish-pkg-8", + version: "8.8.8", + dependencies: { + "@secret/publish-pkg-8": "8.8.8", + }, + publishConfig: { + access, + }, + }; + + await Promise.all([ + rm(join(verdaccio.packagesPath, "@secret", "publish-pkg-8"), { recursive: true, force: true }), + write(join(packageDir, "bunfig.toml"), bunfig), + write(packageJson, JSON.stringify(pkgJson)), + ]); + + let { out, err, exitCode } = await publish(env, packageDir); + expect(exitCode).toBe(0); + + await runBunInstall(env, packageDir); + + expect(await file(join(packageDir, "node_modules", "@secret", "publish-pkg-8", "package.json")).json()).toEqual( + pkgJson, + ); + }); + } +}); + +describe("tag", async () => { + test("can publish with a tag", async () => { + const bunfig = await verdaccio.authBunfig("simpletag"); + const pkgJson = { + name: "publish-pkg-9", + version: "9.9.9", + dependencies: { + "publish-pkg-9": "simpletag", + }, + }; + await Promise.all([ + rm(join(verdaccio.packagesPath, "publish-pkg-9"), { recursive: true, force: true }), + write(join(packageDir, "bunfig.toml"), bunfig), + write(packageJson, JSON.stringify(pkgJson)), + ]); + + let { out, err, exitCode } = await publish(env, packageDir, "--tag", "simpletag"); + expect(exitCode).toBe(0); + + await runBunInstall(env, packageDir); + expect(await file(join(packageDir, "node_modules", "publish-pkg-9", "package.json")).json()).toEqual(pkgJson); + }); +}); + +it("$npm_command is accurate during publish", async () => { + await write( + packageJson, + JSON.stringify({ + name: "publish-pkg-10", + version: "1.0.0", + scripts: { + publish: "echo $npm_command", + }, + }), + ); + await write(join(packageDir, "bunfig.toml"), await verdaccio.authBunfig("npm_command")); + await rm(join(verdaccio.packagesPath, "publish-pkg-10"), { recursive: true, force: true }); + let { out, err, exitCode } = await publish(env, packageDir, "--tag", "simpletag"); + expect(err).toBe(`$ echo $npm_command\n`); + expect(out.split("\n")).toEqual([ + `bun publish ${Bun.version_with_sha}`, + ``, + `packed 95B package.json`, + ``, + `Total files: 1`, + expect.stringContaining(`Shasum: `), + expect.stringContaining(`Integrity: sha512-`), + `Unpacked size: 95B`, + expect.stringContaining(`Packed size: `), + `Tag: simpletag`, + `Access: default`, + `Registry: http://localhost:${verdaccio.port}/`, + ``, + ` + publish-pkg-10@1.0.0`, + `publish`, + ``, + ]); + expect(exitCode).toBe(0); +}); + +it("$npm_lifecycle_event is accurate during publish", async () => { + await write( + packageJson, + `{ + "name": "publish-pkg-11", + "version": "1.0.0", + "scripts": { + "prepublish": "echo 1 $npm_lifecycle_event", + "publish": "echo 2 $npm_lifecycle_event", + "postpublish": "echo 3 $npm_lifecycle_event", + }, + } + `, + ); + await write(join(packageDir, "bunfig.toml"), await verdaccio.authBunfig("npm_lifecycle_event")); + await rm(join(verdaccio.packagesPath, "publish-pkg-11"), { recursive: true, force: true }); + let { out, err, exitCode } = await publish(env, packageDir, "--tag", "simpletag"); + expect(err).toBe(`$ echo 2 $npm_lifecycle_event\n$ echo 3 $npm_lifecycle_event\n`); + expect(out.split("\n")).toEqual([ + `bun publish ${Bun.version_with_sha}`, + ``, + `packed 256B package.json`, + ``, + `Total files: 1`, + expect.stringContaining(`Shasum: `), + expect.stringContaining(`Integrity: sha512-`), + `Unpacked size: 256B`, + expect.stringContaining(`Packed size: `), + `Tag: simpletag`, + `Access: default`, + `Registry: http://localhost:${verdaccio.port}/`, + ``, + ` + publish-pkg-11@1.0.0`, + `2 publish`, + `3 postpublish`, + ``, + ]); + expect(exitCode).toBe(0); +}); diff --git a/test/cli/install/bun-whoami.test.ts b/test/cli/install/bun-whoami.test.ts new file mode 100644 index 00000000000000..bc83c0e552146e --- /dev/null +++ b/test/cli/install/bun-whoami.test.ts @@ -0,0 +1,169 @@ +import { test, expect, beforeAll, beforeEach, afterAll } from "bun:test"; +import { join } from "path"; +import { bunExe, bunEnv as env, VerdaccioRegistry } from "harness"; +import { spawn, write } from "bun"; + +var verdaccio: VerdaccioRegistry; +var packageDir: string; +var packageJson: string; + +beforeAll(async () => { + verdaccio = new VerdaccioRegistry(); + await verdaccio.start(); +}); + +afterAll(() => { + verdaccio.stop(); +}); + +beforeEach(async () => { + ({ packageDir, packageJson } = await verdaccio.createTestDir()); + env.BUN_INSTALL_CACHE_DIR = join(packageDir, ".bun-cache"); + env.BUN_TMPDIR = env.TMPDIR = env.TEMP = join(packageDir, ".bun-tmp"); +}); + +test("can get username", async () => { + const bunfig = await verdaccio.authBunfig("whoami"); + await Promise.all([ + write( + packageJson, + JSON.stringify({ + name: "whoami-pkg", + version: "1.1.1", + }), + ), + write(join(packageDir, "bunfig.toml"), bunfig), + ]); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "pm", "whoami"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env, + }); + + const out = await Bun.readableStreamToText(stdout); + expect(out).toBe("whoami\n"); + const err = await Bun.readableStreamToText(stderr); + expect(err).not.toContain("error:"); + expect(await exited).toBe(0); +}); +test("username from .npmrc", async () => { + // It should report the username from npmrc, even without an account + const bunfig = ` + [install] + cache = false + registry = "http://localhost:${verdaccio.port}/"`; + const npmrc = ` + //localhost:${verdaccio.port}/:username=whoami-npmrc + //localhost:${verdaccio.port}/:_password=123456 + `; + await Promise.all([ + write(packageJson, JSON.stringify({ name: "whoami-pkg", version: "1.1.1" })), + write(join(packageDir, "bunfig.toml"), bunfig), + write(join(packageDir, ".npmrc"), npmrc), + ]); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "pm", "whoami"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env, + }); + + const out = await Bun.readableStreamToText(stdout); + expect(out).toBe("whoami-npmrc\n"); + const err = await Bun.readableStreamToText(stderr); + expect(err).not.toContain("error:"); + expect(await exited).toBe(0); +}); +test("only .npmrc", async () => { + const token = await verdaccio.generateUser("whoami-npmrc", "whoami-npmrc"); + const npmrc = ` + //localhost:${verdaccio.port}/:_authToken=${token} + registry=http://localhost:${verdaccio.port}/`; + await Promise.all([ + write(packageJson, JSON.stringify({ name: "whoami-pkg", version: "1.1.1" })), + write(join(packageDir, ".npmrc"), npmrc), + ]); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "pm", "whoami"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env, + }); + const out = await Bun.readableStreamToText(stdout); + expect(out).toBe("whoami-npmrc\n"); + const err = await Bun.readableStreamToText(stderr); + expect(err).not.toContain("error:"); + expect(await exited).toBe(0); +}); +test("two .npmrc", async () => { + const token = await verdaccio.generateUser("whoami-two-npmrc", "whoami-two-npmrc"); + const packageNpmrc = `registry=http://localhost:${verdaccio.port}/`; + const homeNpmrc = `//localhost:${verdaccio.port}/:_authToken=${token}`; + const homeDir = `${packageDir}/home_dir`; + await Bun.$`mkdir -p ${homeDir}`; + await Promise.all([ + write(packageJson, JSON.stringify({ name: "whoami-pkg", version: "1.1.1" })), + write(join(packageDir, ".npmrc"), packageNpmrc), + write(join(homeDir, ".npmrc"), homeNpmrc), + ]); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "pm", "whoami"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: { + ...env, + XDG_CONFIG_HOME: `${homeDir}`, + }, + }); + const out = await Bun.readableStreamToText(stdout); + expect(out).toBe("whoami-two-npmrc\n"); + const err = await Bun.readableStreamToText(stderr); + expect(err).not.toContain("error:"); + expect(await exited).toBe(0); +}); +test("not logged in", async () => { + await write(packageJson, JSON.stringify({ name: "whoami-pkg", version: "1.1.1" })); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "pm", "whoami"], + cwd: packageDir, + env, + stdout: "pipe", + stderr: "pipe", + }); + const out = await Bun.readableStreamToText(stdout); + expect(out).toBeEmpty(); + const err = await Bun.readableStreamToText(stderr); + expect(err).toBe("error: missing authentication (run `bunx npm login`)\n"); + expect(await exited).toBe(1); +}); +test("invalid token", async () => { + // create the user and provide an invalid token + const token = await verdaccio.generateUser("invalid-token", "invalid-token"); + const bunfig = ` + [install] + cache = false + registry = { url = "http://localhost:${verdaccio.port}/", token = "1234567" }`; + await Promise.all([ + write(packageJson, JSON.stringify({ name: "whoami-pkg", version: "1.1.1" })), + write(join(packageDir, "bunfig.toml"), bunfig), + ]); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "pm", "whoami"], + cwd: packageDir, + env, + stdout: "pipe", + stderr: "pipe", + }); + const out = await Bun.readableStreamToText(stdout); + expect(out).toBeEmpty(); + const err = await Bun.readableStreamToText(stderr); + expect(err).toBe(`error: failed to authenticate with registry 'http://localhost:${verdaccio.port}/'\n`); + expect(await exited).toBe(1); +}); diff --git a/test/cli/install/registry/packages/zzz-1/package.json b/test/cli/install/registry/packages/zzz-1/package.json new file mode 100644 index 00000000000000..a80425f2e91156 --- /dev/null +++ b/test/cli/install/registry/packages/zzz-1/package.json @@ -0,0 +1,56 @@ +{ + "name": "zzz-1", + "versions": { + "1.0.89": { + "name": "zzz-1", + "version": "1.0.89", + "_id": "zzz-1@1.0.89", + "_nodeVersion": "23.5.0", + "_npmVersion": "10.9.2", + "dist": { + "integrity": "sha512-EPmDE1AhS+Dfwa/KYhcIqkBF66nrEA8qQI924xddQOaRvqZGkBqk4AXHG0gIe8KNqllVYFhs5exY5tKi/MaCDQ==", + "shasum": "ea3eba357864416df74721d989750ce112c4b112", + "tarball": "http://localhost:4873/zzz-1/-/zzz-1-1.0.89.tgz" + }, + "contributors": [] + }, + "1.0.90": { + "name": "zzz-1", + "version": "1.0.90", + "_id": "zzz-1@1.0.90", + "_nodeVersion": "23.5.0", + "_npmVersion": "10.9.2", + "dist": { + "integrity": "sha512-UaWGWc1xwYMtgxAdZbS2jj5s2UWwdcgEzYT0kAlTQR2pHPi48bmrI6ZKRySEEROcHh1sD1HWqqopg1HRIcWXKw==", + "shasum": "65bfff389d760c43a339f34499fb3bc52fcf345e", + "tarball": "http://localhost:4873/zzz-1/-/zzz-1-1.0.90.tgz" + }, + "contributors": [] + } + }, + "time": { + "modified": "2025-01-09T00:53:52.021Z", + "created": "2025-01-09T00:50:38.582Z", + "1.0.89": "2025-01-09T00:50:38.582Z", + "1.0.90": "2025-01-09T00:53:52.021Z" + }, + "users": {}, + "dist-tags": { + "latest": "1.0.90" + }, + "_uplinks": {}, + "_distfiles": {}, + "_attachments": { + "zzz-1-1.0.89.tgz": { + "shasum": "ea3eba357864416df74721d989750ce112c4b112", + "version": "1.0.89" + }, + "zzz-1-1.0.90.tgz": { + "shasum": "65bfff389d760c43a339f34499fb3bc52fcf345e", + "version": "1.0.90" + } + }, + "_rev": "", + "_id": "zzz-1", + "readme": "ERROR: No README data found!" +} \ No newline at end of file diff --git a/test/cli/install/registry/packages/zzz-1/zzz-1-1.0.89.tgz b/test/cli/install/registry/packages/zzz-1/zzz-1-1.0.89.tgz new file mode 100644 index 00000000000000..7c7267155cd14b Binary files /dev/null and b/test/cli/install/registry/packages/zzz-1/zzz-1-1.0.89.tgz differ diff --git a/test/cli/install/registry/packages/zzz-1/zzz-1-1.0.90.tgz b/test/cli/install/registry/packages/zzz-1/zzz-1-1.0.90.tgz new file mode 100644 index 00000000000000..6ccf97a0c1173d Binary files /dev/null and b/test/cli/install/registry/packages/zzz-1/zzz-1-1.0.90.tgz differ diff --git a/test/cli/install/registry/packages/zzz-2/package.json b/test/cli/install/registry/packages/zzz-2/package.json new file mode 100644 index 00000000000000..32c1802b9d443e --- /dev/null +++ b/test/cli/install/registry/packages/zzz-2/package.json @@ -0,0 +1,41 @@ +{ + "name": "zzz-2", + "versions": { + "1.0.89": { + "name": "zzz-2", + "version": "1.0.89", + "dependencies": { + "zzz-1": "^1.0.89" + }, + "_id": "zzz-2@1.0.89", + "_nodeVersion": "23.5.0", + "_npmVersion": "10.9.2", + "dist": { + "integrity": "sha512-AiBHP+QdotgUq83hsFKLYIu33c9rUP2tXaCZs66QqrkoSO/X09I6RRRV0IUzMlemBCp+WedWvLSOVs8E7DgIag==", + "shasum": "69dbe7e7dd5c3b465977ded8864f34e6539b7468", + "tarball": "http://localhost:4873/zzz-2/-/zzz-2-1.0.89.tgz" + }, + "contributors": [] + } + }, + "time": { + "modified": "2025-01-09T00:51:28.532Z", + "created": "2025-01-09T00:51:28.532Z", + "1.0.89": "2025-01-09T00:51:28.532Z" + }, + "users": {}, + "dist-tags": { + "latest": "1.0.89" + }, + "_uplinks": {}, + "_distfiles": {}, + "_attachments": { + "zzz-2-1.0.89.tgz": { + "shasum": "69dbe7e7dd5c3b465977ded8864f34e6539b7468", + "version": "1.0.89" + } + }, + "_rev": "", + "_id": "zzz-2", + "readme": "ERROR: No README data found!" +} \ No newline at end of file diff --git a/test/cli/install/registry/packages/zzz-2/zzz-2-1.0.89.tgz b/test/cli/install/registry/packages/zzz-2/zzz-2-1.0.89.tgz new file mode 100644 index 00000000000000..cb144d026738ca Binary files /dev/null and b/test/cli/install/registry/packages/zzz-2/zzz-2-1.0.89.tgz differ diff --git a/test/harness.ts b/test/harness.ts index 5fe4cf1a18727a..423cb37623c986 100644 --- a/test/harness.ts +++ b/test/harness.ts @@ -551,12 +551,7 @@ Received ${JSON.stringify({ name: onDisk.name, version: onDisk.version })}`, case "npm": const name = dep.is_alias ? dep.npm.name : dep.name; if (!Bun.deepMatch({ name, version: pkg.resolution.value }, resolved)) { - if (dep.literal === "*") { - // allow any version, just needs to be resolvable - continue; - } - if (dep.behavior.peer && dep.npm) { - // allow peer dependencies to not match exactly, but still satisfy + if (dep.npm) { if (Bun.semver.satisfies(pkg.resolution.value, dep.npm.version)) continue; } return { @@ -1441,6 +1436,7 @@ export class VerdaccioRegistry { process: ChildProcess | undefined; configPath: string; packagesPath: string; + users: Record = {}; constructor(opts?: { configPath?: string; packagesPath?: string; verbose?: boolean }) { this.port = randomPort(); @@ -1493,7 +1489,50 @@ export class VerdaccioRegistry { this.process?.kill(); } + /** + * returns auth token + */ + async generateUser(username: string, password: string): Promise { + if (this.users[username]) { + throw new Error(`User ${username} already exists`); + } else this.users[username] = password; + + const url = `http://localhost:${this.port}/-/user/org.couchdb.user:${username}`; + const user = { + name: username, + password: password, + email: `${username}@example.com`, + }; + + const response = await fetch(url, { + method: "PUT", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(user), + }); + + if (response.ok) { + const data = await response.json(); + return data.token; + } + + throw new Error("Failed to create user:", response.statusText); + } + + async authBunfig(user: string) { + const authToken = await this.generateUser(user, user); + return ` + [install] + cache = false + registry = { url = "http://localhost:${this.port}/", token = "${authToken}" } + `; + } + async createTestDir() { + await rm(join(dirname(this.configPath), "htpasswd"), { force: true }); + await rm(join(this.packagesPath, "private-pkg-dont-touch"), { force: true }); + const packageDir = tmpdirSync(); const packageJson = join(packageDir, "package.json"); await write( @@ -1504,7 +1543,7 @@ export class VerdaccioRegistry { registry = "${this.registryUrl()}" `, ); - + this.users = {}; return { packageDir, packageJson }; } } diff --git a/test/integration/next-pages/test/__snapshots__/dev-server-ssr-100.test.ts.snap b/test/integration/next-pages/test/__snapshots__/dev-server-ssr-100.test.ts.snap index b1bb52a16fd93d..f593e07e96843c 100644 --- a/test/integration/next-pages/test/__snapshots__/dev-server-ssr-100.test.ts.snap +++ b/test/integration/next-pages/test/__snapshots__/dev-server-ssr-100.test.ts.snap @@ -22258,17 +22258,6 @@ exports[`ssr works for 100-ish requests 1`] = ` "id": 2, "path": "node_modules/next/node_modules", }, - { - "dependencies": { - "@types/node": { - "id": 863, - "package_id": 183, - }, - }, - "depth": 1, - "id": 3, - "path": "node_modules/@types/ws/node_modules", - }, { "dependencies": { "debug": { @@ -22277,7 +22266,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 1, - "id": 4, + "id": 3, "path": "node_modules/eslint-import-resolver-node/node_modules", }, { @@ -22292,7 +22281,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 1, - "id": 5, + "id": 4, "path": "node_modules/eslint-plugin-import/node_modules", }, { @@ -22307,7 +22296,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 1, - "id": 6, + "id": 5, "path": "node_modules/eslint-plugin-react/node_modules", }, { @@ -22322,7 +22311,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 1, - "id": 7, + "id": 6, "path": "node_modules/@puppeteer/browsers/node_modules", }, { @@ -22333,7 +22322,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 1, - "id": 8, + "id": 7, "path": "node_modules/chokidar/node_modules", }, { @@ -22344,7 +22333,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 1, - "id": 9, + "id": 8, "path": "node_modules/fast-glob/node_modules", }, { @@ -22355,7 +22344,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 1, - "id": 10, + "id": 9, "path": "node_modules/postcss-load-config/node_modules", }, { @@ -22366,7 +22355,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 1, - "id": 11, + "id": 10, "path": "node_modules/glob/node_modules", }, { @@ -22381,7 +22370,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 1, - "id": 12, + "id": 11, "path": "node_modules/@typescript-eslint/typescript-estree/node_modules", }, { @@ -22392,7 +22381,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 1, - "id": 13, + "id": 12, "path": "node_modules/eslint-module-utils/node_modules", }, { @@ -22403,7 +22392,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 2, - "id": 14, + "id": 13, "path": "node_modules/@puppeteer/browsers/node_modules/semver/node_modules", }, { @@ -22414,7 +22403,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 1, - "id": 15, + "id": 14, "path": "node_modules/rimraf/node_modules", }, { @@ -22425,7 +22414,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 2, - "id": 16, + "id": 15, "path": "node_modules/glob/node_modules/minimatch/node_modules", }, { @@ -22436,7 +22425,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 1, - "id": 17, + "id": 16, "path": "node_modules/path-scurry/node_modules", }, { @@ -22447,7 +22436,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 2, - "id": 18, + "id": 17, "path": "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch/node_modules", }, { @@ -22458,20 +22447,9 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 2, - "id": 19, + "id": 18, "path": "node_modules/@typescript-eslint/typescript-estree/node_modules/semver/node_modules", }, - { - "dependencies": { - "@types/node": { - "id": 254, - "package_id": 183, - }, - }, - "depth": 1, - "id": 20, - "path": "node_modules/@types/yauzl/node_modules", - }, { "dependencies": { "emoji-regex": { @@ -22480,7 +22458,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 1, - "id": 21, + "id": 19, "path": "node_modules/string-width/node_modules", }, { @@ -22499,7 +22477,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 1, - "id": 22, + "id": 20, "path": "node_modules/@isaacs/cliui/node_modules", }, { @@ -22510,7 +22488,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 1, - "id": 23, + "id": 21, "path": "node_modules/@babel/highlight/node_modules", }, { @@ -22521,7 +22499,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 1, - "id": 24, + "id": 22, "path": "node_modules/string-width-cjs/node_modules", }, { @@ -22532,7 +22510,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 2, - "id": 25, + "id": 23, "path": "node_modules/@isaacs/cliui/node_modules/strip-ansi/node_modules", }, { @@ -22543,7 +22521,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 2, - "id": 26, + "id": 24, "path": "node_modules/@isaacs/cliui/node_modules/wrap-ansi/node_modules", }, { @@ -22562,7 +22540,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 2, - "id": 27, + "id": 25, "path": "node_modules/@babel/highlight/node_modules/chalk/node_modules", }, { @@ -22573,7 +22551,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 3, - "id": 28, + "id": 26, "path": "node_modules/@babel/highlight/node_modules/chalk/node_modules/ansi-styles/node_modules", }, { @@ -22584,7 +22562,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 3, - "id": 29, + "id": 27, "path": "node_modules/@babel/highlight/node_modules/chalk/node_modules/supports-color/node_modules", }, { @@ -22595,7 +22573,7 @@ exports[`ssr works for 100-ish requests 1`] = ` }, }, "depth": 4, - "id": 30, + "id": 28, "path": "node_modules/@babel/highlight/node_modules/chalk/node_modules/ansi-styles/node_modules/color-convert/node_modules", }, ], diff --git a/test/integration/next-pages/test/__snapshots__/dev-server.test.ts.snap b/test/integration/next-pages/test/__snapshots__/dev-server.test.ts.snap index 6bacb22ab5647a..2f0b2a02685de8 100644 --- a/test/integration/next-pages/test/__snapshots__/dev-server.test.ts.snap +++ b/test/integration/next-pages/test/__snapshots__/dev-server.test.ts.snap @@ -22258,17 +22258,6 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` "id": 2, "path": "node_modules/next/node_modules", }, - { - "dependencies": { - "@types/node": { - "id": 863, - "package_id": 183, - }, - }, - "depth": 1, - "id": 3, - "path": "node_modules/@types/ws/node_modules", - }, { "dependencies": { "debug": { @@ -22277,7 +22266,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 1, - "id": 4, + "id": 3, "path": "node_modules/eslint-import-resolver-node/node_modules", }, { @@ -22292,7 +22281,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 1, - "id": 5, + "id": 4, "path": "node_modules/eslint-plugin-import/node_modules", }, { @@ -22307,7 +22296,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 1, - "id": 6, + "id": 5, "path": "node_modules/eslint-plugin-react/node_modules", }, { @@ -22322,7 +22311,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 1, - "id": 7, + "id": 6, "path": "node_modules/@puppeteer/browsers/node_modules", }, { @@ -22333,7 +22322,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 1, - "id": 8, + "id": 7, "path": "node_modules/chokidar/node_modules", }, { @@ -22344,7 +22333,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 1, - "id": 9, + "id": 8, "path": "node_modules/fast-glob/node_modules", }, { @@ -22355,7 +22344,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 1, - "id": 10, + "id": 9, "path": "node_modules/postcss-load-config/node_modules", }, { @@ -22366,7 +22355,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 1, - "id": 11, + "id": 10, "path": "node_modules/glob/node_modules", }, { @@ -22381,7 +22370,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 1, - "id": 12, + "id": 11, "path": "node_modules/@typescript-eslint/typescript-estree/node_modules", }, { @@ -22392,7 +22381,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 1, - "id": 13, + "id": 12, "path": "node_modules/eslint-module-utils/node_modules", }, { @@ -22403,7 +22392,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 2, - "id": 14, + "id": 13, "path": "node_modules/@puppeteer/browsers/node_modules/semver/node_modules", }, { @@ -22414,7 +22403,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 1, - "id": 15, + "id": 14, "path": "node_modules/rimraf/node_modules", }, { @@ -22425,7 +22414,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 2, - "id": 16, + "id": 15, "path": "node_modules/glob/node_modules/minimatch/node_modules", }, { @@ -22436,7 +22425,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 1, - "id": 17, + "id": 16, "path": "node_modules/path-scurry/node_modules", }, { @@ -22447,7 +22436,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 2, - "id": 18, + "id": 17, "path": "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch/node_modules", }, { @@ -22458,20 +22447,9 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 2, - "id": 19, + "id": 18, "path": "node_modules/@typescript-eslint/typescript-estree/node_modules/semver/node_modules", }, - { - "dependencies": { - "@types/node": { - "id": 254, - "package_id": 183, - }, - }, - "depth": 1, - "id": 20, - "path": "node_modules/@types/yauzl/node_modules", - }, { "dependencies": { "emoji-regex": { @@ -22480,7 +22458,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 1, - "id": 21, + "id": 19, "path": "node_modules/string-width/node_modules", }, { @@ -22499,7 +22477,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 1, - "id": 22, + "id": 20, "path": "node_modules/@isaacs/cliui/node_modules", }, { @@ -22510,7 +22488,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 1, - "id": 23, + "id": 21, "path": "node_modules/@babel/highlight/node_modules", }, { @@ -22521,7 +22499,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 1, - "id": 24, + "id": 22, "path": "node_modules/string-width-cjs/node_modules", }, { @@ -22532,7 +22510,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 2, - "id": 25, + "id": 23, "path": "node_modules/@isaacs/cliui/node_modules/strip-ansi/node_modules", }, { @@ -22543,7 +22521,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 2, - "id": 26, + "id": 24, "path": "node_modules/@isaacs/cliui/node_modules/wrap-ansi/node_modules", }, { @@ -22562,7 +22540,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 2, - "id": 27, + "id": 25, "path": "node_modules/@babel/highlight/node_modules/chalk/node_modules", }, { @@ -22573,7 +22551,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 3, - "id": 28, + "id": 26, "path": "node_modules/@babel/highlight/node_modules/chalk/node_modules/ansi-styles/node_modules", }, { @@ -22584,7 +22562,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 3, - "id": 29, + "id": 27, "path": "node_modules/@babel/highlight/node_modules/chalk/node_modules/supports-color/node_modules", }, { @@ -22595,7 +22573,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` }, }, "depth": 4, - "id": 30, + "id": 28, "path": "node_modules/@babel/highlight/node_modules/chalk/node_modules/ansi-styles/node_modules/color-convert/node_modules", }, ], diff --git a/test/integration/next-pages/test/__snapshots__/next-build.test.ts.snap b/test/integration/next-pages/test/__snapshots__/next-build.test.ts.snap index fe8eb9c7d8d7dd..ac2c1fb92b528a 100644 --- a/test/integration/next-pages/test/__snapshots__/next-build.test.ts.snap +++ b/test/integration/next-pages/test/__snapshots__/next-build.test.ts.snap @@ -22258,17 +22258,6 @@ exports[`next build works: bun 1`] = ` "id": 2, "path": "node_modules/next/node_modules", }, - { - "dependencies": { - "@types/node": { - "id": 863, - "package_id": 183, - }, - }, - "depth": 1, - "id": 3, - "path": "node_modules/@types/ws/node_modules", - }, { "dependencies": { "debug": { @@ -22277,7 +22266,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 1, - "id": 4, + "id": 3, "path": "node_modules/eslint-import-resolver-node/node_modules", }, { @@ -22292,7 +22281,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 1, - "id": 5, + "id": 4, "path": "node_modules/eslint-plugin-import/node_modules", }, { @@ -22307,7 +22296,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 1, - "id": 6, + "id": 5, "path": "node_modules/eslint-plugin-react/node_modules", }, { @@ -22322,7 +22311,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 1, - "id": 7, + "id": 6, "path": "node_modules/@puppeteer/browsers/node_modules", }, { @@ -22333,7 +22322,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 1, - "id": 8, + "id": 7, "path": "node_modules/chokidar/node_modules", }, { @@ -22344,7 +22333,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 1, - "id": 9, + "id": 8, "path": "node_modules/fast-glob/node_modules", }, { @@ -22355,7 +22344,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 1, - "id": 10, + "id": 9, "path": "node_modules/postcss-load-config/node_modules", }, { @@ -22366,7 +22355,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 1, - "id": 11, + "id": 10, "path": "node_modules/glob/node_modules", }, { @@ -22381,7 +22370,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 1, - "id": 12, + "id": 11, "path": "node_modules/@typescript-eslint/typescript-estree/node_modules", }, { @@ -22392,7 +22381,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 1, - "id": 13, + "id": 12, "path": "node_modules/eslint-module-utils/node_modules", }, { @@ -22403,7 +22392,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 2, - "id": 14, + "id": 13, "path": "node_modules/@puppeteer/browsers/node_modules/semver/node_modules", }, { @@ -22414,7 +22403,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 1, - "id": 15, + "id": 14, "path": "node_modules/rimraf/node_modules", }, { @@ -22425,7 +22414,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 2, - "id": 16, + "id": 15, "path": "node_modules/glob/node_modules/minimatch/node_modules", }, { @@ -22436,7 +22425,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 1, - "id": 17, + "id": 16, "path": "node_modules/path-scurry/node_modules", }, { @@ -22447,7 +22436,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 2, - "id": 18, + "id": 17, "path": "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch/node_modules", }, { @@ -22458,20 +22447,9 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 2, - "id": 19, + "id": 18, "path": "node_modules/@typescript-eslint/typescript-estree/node_modules/semver/node_modules", }, - { - "dependencies": { - "@types/node": { - "id": 254, - "package_id": 183, - }, - }, - "depth": 1, - "id": 20, - "path": "node_modules/@types/yauzl/node_modules", - }, { "dependencies": { "emoji-regex": { @@ -22480,7 +22458,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 1, - "id": 21, + "id": 19, "path": "node_modules/string-width/node_modules", }, { @@ -22499,7 +22477,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 1, - "id": 22, + "id": 20, "path": "node_modules/@isaacs/cliui/node_modules", }, { @@ -22510,7 +22488,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 1, - "id": 23, + "id": 21, "path": "node_modules/@babel/highlight/node_modules", }, { @@ -22521,7 +22499,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 1, - "id": 24, + "id": 22, "path": "node_modules/string-width-cjs/node_modules", }, { @@ -22532,7 +22510,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 2, - "id": 25, + "id": 23, "path": "node_modules/@isaacs/cliui/node_modules/strip-ansi/node_modules", }, { @@ -22543,7 +22521,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 2, - "id": 26, + "id": 24, "path": "node_modules/@isaacs/cliui/node_modules/wrap-ansi/node_modules", }, { @@ -22562,7 +22540,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 2, - "id": 27, + "id": 25, "path": "node_modules/@babel/highlight/node_modules/chalk/node_modules", }, { @@ -22573,7 +22551,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 3, - "id": 28, + "id": 26, "path": "node_modules/@babel/highlight/node_modules/chalk/node_modules/ansi-styles/node_modules", }, { @@ -22584,7 +22562,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 3, - "id": 29, + "id": 27, "path": "node_modules/@babel/highlight/node_modules/chalk/node_modules/supports-color/node_modules", }, { @@ -22595,7 +22573,7 @@ exports[`next build works: bun 1`] = ` }, }, "depth": 4, - "id": 30, + "id": 28, "path": "node_modules/@babel/highlight/node_modules/chalk/node_modules/ansi-styles/node_modules/color-convert/node_modules", }, ], @@ -44862,17 +44840,6 @@ exports[`next build works: node 1`] = ` "id": 2, "path": "node_modules/next/node_modules", }, - { - "dependencies": { - "@types/node": { - "id": 863, - "package_id": 183, - }, - }, - "depth": 1, - "id": 3, - "path": "node_modules/@types/ws/node_modules", - }, { "dependencies": { "debug": { @@ -44881,7 +44848,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 1, - "id": 4, + "id": 3, "path": "node_modules/eslint-import-resolver-node/node_modules", }, { @@ -44896,7 +44863,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 1, - "id": 5, + "id": 4, "path": "node_modules/eslint-plugin-import/node_modules", }, { @@ -44911,7 +44878,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 1, - "id": 6, + "id": 5, "path": "node_modules/eslint-plugin-react/node_modules", }, { @@ -44926,7 +44893,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 1, - "id": 7, + "id": 6, "path": "node_modules/@puppeteer/browsers/node_modules", }, { @@ -44937,7 +44904,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 1, - "id": 8, + "id": 7, "path": "node_modules/chokidar/node_modules", }, { @@ -44948,7 +44915,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 1, - "id": 9, + "id": 8, "path": "node_modules/fast-glob/node_modules", }, { @@ -44959,7 +44926,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 1, - "id": 10, + "id": 9, "path": "node_modules/postcss-load-config/node_modules", }, { @@ -44970,7 +44937,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 1, - "id": 11, + "id": 10, "path": "node_modules/glob/node_modules", }, { @@ -44985,7 +44952,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 1, - "id": 12, + "id": 11, "path": "node_modules/@typescript-eslint/typescript-estree/node_modules", }, { @@ -44996,7 +44963,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 1, - "id": 13, + "id": 12, "path": "node_modules/eslint-module-utils/node_modules", }, { @@ -45007,7 +44974,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 2, - "id": 14, + "id": 13, "path": "node_modules/@puppeteer/browsers/node_modules/semver/node_modules", }, { @@ -45018,7 +44985,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 1, - "id": 15, + "id": 14, "path": "node_modules/rimraf/node_modules", }, { @@ -45029,7 +44996,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 2, - "id": 16, + "id": 15, "path": "node_modules/glob/node_modules/minimatch/node_modules", }, { @@ -45040,7 +45007,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 1, - "id": 17, + "id": 16, "path": "node_modules/path-scurry/node_modules", }, { @@ -45051,7 +45018,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 2, - "id": 18, + "id": 17, "path": "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch/node_modules", }, { @@ -45062,20 +45029,9 @@ exports[`next build works: node 1`] = ` }, }, "depth": 2, - "id": 19, + "id": 18, "path": "node_modules/@typescript-eslint/typescript-estree/node_modules/semver/node_modules", }, - { - "dependencies": { - "@types/node": { - "id": 254, - "package_id": 183, - }, - }, - "depth": 1, - "id": 20, - "path": "node_modules/@types/yauzl/node_modules", - }, { "dependencies": { "emoji-regex": { @@ -45084,7 +45040,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 1, - "id": 21, + "id": 19, "path": "node_modules/string-width/node_modules", }, { @@ -45103,7 +45059,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 1, - "id": 22, + "id": 20, "path": "node_modules/@isaacs/cliui/node_modules", }, { @@ -45114,7 +45070,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 1, - "id": 23, + "id": 21, "path": "node_modules/@babel/highlight/node_modules", }, { @@ -45125,7 +45081,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 1, - "id": 24, + "id": 22, "path": "node_modules/string-width-cjs/node_modules", }, { @@ -45136,7 +45092,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 2, - "id": 25, + "id": 23, "path": "node_modules/@isaacs/cliui/node_modules/strip-ansi/node_modules", }, { @@ -45147,7 +45103,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 2, - "id": 26, + "id": 24, "path": "node_modules/@isaacs/cliui/node_modules/wrap-ansi/node_modules", }, { @@ -45166,7 +45122,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 2, - "id": 27, + "id": 25, "path": "node_modules/@babel/highlight/node_modules/chalk/node_modules", }, { @@ -45177,7 +45133,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 3, - "id": 28, + "id": 26, "path": "node_modules/@babel/highlight/node_modules/chalk/node_modules/ansi-styles/node_modules", }, { @@ -45188,7 +45144,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 3, - "id": 29, + "id": 27, "path": "node_modules/@babel/highlight/node_modules/chalk/node_modules/supports-color/node_modules", }, { @@ -45199,7 +45155,7 @@ exports[`next build works: node 1`] = ` }, }, "depth": 4, - "id": 30, + "id": 28, "path": "node_modules/@babel/highlight/node_modules/chalk/node_modules/ansi-styles/node_modules/color-convert/node_modules", }, ],