mirror of
https://github.com/oven-sh/bun
synced 2026-02-09 18:38:55 +00:00
Switch to std.HashMap for DirEntry store
This commit is contained in:
@@ -392,17 +392,18 @@ pub const RunCommand = struct {
|
||||
var has_copied = false;
|
||||
var dir_slice: string = "";
|
||||
while (iter.next()) |entry| {
|
||||
if (entry.value.kind(&this_bundler.fs.fs) == .file) {
|
||||
const value = entry.value_ptr.*;
|
||||
if (value.kind(&this_bundler.fs.fs) == .file) {
|
||||
if (!has_copied) {
|
||||
std.mem.copy(u8, &path_buf, entry.value.dir);
|
||||
dir_slice = path_buf[0..entry.value.dir.len];
|
||||
if (!strings.endsWithChar(entry.value.dir, std.fs.path.sep)) {
|
||||
dir_slice = path_buf[0 .. entry.value.dir.len + 1];
|
||||
std.mem.copy(u8, &path_buf, value.dir);
|
||||
dir_slice = path_buf[0..value.dir.len];
|
||||
if (!strings.endsWithChar(value.dir, std.fs.path.sep)) {
|
||||
dir_slice = path_buf[0 .. value.dir.len + 1];
|
||||
}
|
||||
has_copied = true;
|
||||
}
|
||||
|
||||
const base = entry.value.base();
|
||||
const base = value.base();
|
||||
std.mem.copy(u8, path_buf[dir_slice.len..], base);
|
||||
path_buf[dir_slice.len + base.len] = 0;
|
||||
var slice = path_buf[0 .. dir_slice.len + base.len :0];
|
||||
@@ -422,11 +423,12 @@ pub const RunCommand = struct {
|
||||
var iter = entries.data.iterator();
|
||||
|
||||
while (iter.next()) |entry| {
|
||||
const name = entry.value.base();
|
||||
const value = entry.value_ptr.*;
|
||||
const name = value.base();
|
||||
if (name[0] != '.' and this_bundler.options.loader(std.fs.path.extension(name)).isJavaScriptLike() and
|
||||
!strings.contains(name, ".config") and
|
||||
!strings.contains(name, ".d.ts") and
|
||||
entry.value.kind(&this_bundler.fs.fs) == .file)
|
||||
value.kind(&this_bundler.fs.fs) == .file)
|
||||
{
|
||||
_ = try results.getOrPut(this_bundler.fs.filename_store.append(@TypeOf(name), name) catch continue);
|
||||
}
|
||||
|
||||
@@ -140,7 +140,7 @@ const Scanner = struct {
|
||||
var iter = root.entries.data.iterator();
|
||||
const fd = root.entries.fd;
|
||||
while (iter.next()) |entry| {
|
||||
this.next(entry.value, fd);
|
||||
this.next(entry.value_ptr.*, fd);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
34
src/fs.zig
34
src/fs.zig
@@ -19,7 +19,6 @@ const Fs = @This();
|
||||
const path_handler = @import("./resolver/resolve_path.zig");
|
||||
const PathString = bun.PathString;
|
||||
const allocators = @import("./allocators.zig");
|
||||
const hash_map = @import("hash_map.zig");
|
||||
|
||||
pub const MAX_PATH_BYTES = bun.MAX_PATH_BYTES;
|
||||
pub const PathBuffer = [bun.MAX_PATH_BYTES]u8;
|
||||
@@ -183,7 +182,7 @@ pub const FileSystem = struct {
|
||||
}
|
||||
|
||||
pub const DirEntry = struct {
|
||||
pub const EntryMap = hash_map.StringHashMapUnmanaged(*Entry);
|
||||
pub const EntryMap = std.StringHashMapUnmanaged(*Entry);
|
||||
pub const EntryStore = allocators.BSSList(Entry, Preallocate.Counts.files);
|
||||
dir: string,
|
||||
fd: StoredFileDescriptorType = 0,
|
||||
@@ -299,9 +298,21 @@ pub const FileSystem = struct {
|
||||
query[i] = std.ascii.toLower(c);
|
||||
};
|
||||
|
||||
const query_hashed = comptime DirEntry.EntryMap.getHash(&query);
|
||||
const query_hashed = comptime std.hash_map.hashString(&query);
|
||||
|
||||
const result = entry.data.getAdapted(
|
||||
@as([]const u8, &query),
|
||||
struct {
|
||||
pub fn hash(_: @This(), _: []const u8) @TypeOf(query_hashed) {
|
||||
return query_hashed;
|
||||
}
|
||||
|
||||
pub fn eql(_: @This(), _: []const u8, b: []const u8) bool {
|
||||
return strings.eqlComptime(b, query);
|
||||
}
|
||||
}{},
|
||||
) orelse return null;
|
||||
|
||||
const result = entry.data.getWithHash(&query, query_hashed) orelse return null;
|
||||
const basename = result.base();
|
||||
|
||||
if (!strings.eqlComptime(basename, comptime query[0..query_str.len])) {
|
||||
@@ -324,9 +335,20 @@ pub const FileSystem = struct {
|
||||
query[i] = std.ascii.toLower(c);
|
||||
};
|
||||
|
||||
const query_hashed = comptime DirEntry.EntryMap.getHash(&query);
|
||||
const query_hashed = comptime std.hash_map.hashString(&query);
|
||||
|
||||
return entry.data.getWithHash(&query, query_hashed) != null;
|
||||
return entry.data.containsAdapted(
|
||||
@as([]const u8, &query),
|
||||
struct {
|
||||
pub fn hash(_: @This(), _: []const u8) @TypeOf(query_hashed) {
|
||||
return query_hashed;
|
||||
}
|
||||
|
||||
pub fn eql(_: @This(), _: []const u8, b: []const u8) bool {
|
||||
return strings.eqlComptime(b, query);
|
||||
}
|
||||
}{},
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -401,7 +401,7 @@ const RouteLoader = struct {
|
||||
if (root_dir_info.getEntriesConst()) |entries| {
|
||||
var iter = entries.data.iterator();
|
||||
outer: while (iter.next()) |entry_ptr| {
|
||||
const entry = entry_ptr.value;
|
||||
const entry = entry_ptr.value_ptr.*;
|
||||
if (entry.base()[0] == '.') {
|
||||
continue :outer;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user