feat: improve rules_js parity
This commit is contained in:
@@ -9,7 +9,10 @@ exports_files([
|
||||
"bun_install.bzl",
|
||||
"bun_script.bzl",
|
||||
"bun_test.bzl",
|
||||
"js_compat.bzl",
|
||||
"js_library.bzl",
|
||||
"js_run_devserver.bzl",
|
||||
"workspace.bzl",
|
||||
])
|
||||
|
||||
bzl_library(
|
||||
@@ -44,7 +47,32 @@ bzl_library(
|
||||
deps = [":js_library_bzl"],
|
||||
)
|
||||
|
||||
bzl_library(
|
||||
name = "js_compat_bzl",
|
||||
srcs = ["js_compat.bzl"],
|
||||
deps = [
|
||||
":bun_binary_bzl",
|
||||
":bun_test_bzl",
|
||||
":js_library_bzl",
|
||||
":js_run_devserver_bzl",
|
||||
],
|
||||
)
|
||||
|
||||
bzl_library(
|
||||
name = "js_library_bzl",
|
||||
srcs = ["js_library.bzl"],
|
||||
)
|
||||
|
||||
bzl_library(
|
||||
name = "js_run_devserver_bzl",
|
||||
srcs = ["js_run_devserver.bzl"],
|
||||
deps = [
|
||||
":js_library_bzl",
|
||||
":workspace_bzl",
|
||||
],
|
||||
)
|
||||
|
||||
bzl_library(
|
||||
name = "workspace_bzl",
|
||||
srcs = ["workspace.bzl"],
|
||||
)
|
||||
|
||||
@@ -1,67 +1,55 @@
|
||||
"""Rule for running JS/TS scripts with Bun."""
|
||||
|
||||
load("//internal:js_library.bzl", "collect_js_runfiles")
|
||||
load("//internal:workspace.bzl", "create_bun_workspace_info", "render_workspace_setup", "workspace_runfiles")
|
||||
|
||||
def _shell_quote(value):
|
||||
return "'" + value.replace("'", "'\"'\"'") + "'"
|
||||
|
||||
def _bun_binary_impl(ctx):
|
||||
toolchain = ctx.toolchains["//bun:toolchain_type"]
|
||||
bun_bin = toolchain.bun.bun_bin
|
||||
entry_point = ctx.file.entry_point
|
||||
dep_runfiles = [collect_js_runfiles(dep) for dep in ctx.attr.deps]
|
||||
workspace_info = create_bun_workspace_info(
|
||||
ctx,
|
||||
extra_files = ctx.files.data + [bun_bin],
|
||||
primary_file = entry_point,
|
||||
)
|
||||
|
||||
command = """
|
||||
trap cleanup_runtime_workspace EXIT
|
||||
cd "${runtime_exec_dir}"
|
||||
exec "${bun_bin}" --bun run "${primary_source}" "$@"
|
||||
"""
|
||||
if ctx.attr.args:
|
||||
command = """
|
||||
trap cleanup_runtime_workspace EXIT
|
||||
cd "${runtime_exec_dir}"
|
||||
exec "${bun_bin}" --bun run "${primary_source}" __DEFAULT_ARGS__ "$@"
|
||||
""".replace("__DEFAULT_ARGS__", " ".join([_shell_quote(arg) for arg in ctx.attr.args]))
|
||||
|
||||
launcher = ctx.actions.declare_file(ctx.label.name)
|
||||
ctx.actions.write(
|
||||
output = launcher,
|
||||
is_executable = True,
|
||||
content = """#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
runfiles_dir="${{RUNFILES_DIR:-$0.runfiles}}"
|
||||
workspace_root="${{runfiles_dir}}/_main"
|
||||
bun_bin="${{runfiles_dir}}/_main/{bun_short_path}"
|
||||
entry_point="${{runfiles_dir}}/_main/{entry_short_path}"
|
||||
|
||||
resolve_entrypoint_workdir() {{
|
||||
local dir
|
||||
dir="$(dirname "${{entry_point}}")"
|
||||
while [[ "${{dir}}" == "${{workspace_root}}"* ]]; do
|
||||
if [[ -f "${{dir}}/.env" || -f "${{dir}}/package.json" ]]; then
|
||||
echo "${{dir}}"
|
||||
return 0
|
||||
fi
|
||||
if [[ "${{dir}}" == "${{workspace_root}}" ]]; then
|
||||
break
|
||||
fi
|
||||
dir="$(dirname "${{dir}}")"
|
||||
done
|
||||
echo "$(dirname "${{entry_point}}")"
|
||||
}}
|
||||
|
||||
working_dir="{working_dir}"
|
||||
if [[ "${{working_dir}}" == "entry_point" ]]; then
|
||||
cd "$(resolve_entrypoint_workdir)"
|
||||
else
|
||||
cd "${{workspace_root}}"
|
||||
fi
|
||||
|
||||
exec "${{bun_bin}}" --bun run "${{entry_point}}" "$@"
|
||||
""".format(
|
||||
content = render_workspace_setup(
|
||||
bun_short_path = bun_bin.short_path,
|
||||
entry_short_path = entry_point.short_path,
|
||||
working_dir = ctx.attr.working_dir,
|
||||
),
|
||||
)
|
||||
|
||||
transitive_files = []
|
||||
if ctx.attr.node_modules:
|
||||
transitive_files.append(ctx.attr.node_modules[DefaultInfo].files)
|
||||
|
||||
runfiles = ctx.runfiles(
|
||||
files = [bun_bin, entry_point] + ctx.files.data,
|
||||
transitive_files = depset(transitive = transitive_files),
|
||||
primary_source_short_path = entry_point.short_path,
|
||||
working_dir_mode = ctx.attr.working_dir,
|
||||
) + command,
|
||||
)
|
||||
|
||||
return [
|
||||
workspace_info,
|
||||
DefaultInfo(
|
||||
executable = launcher,
|
||||
runfiles = runfiles,
|
||||
runfiles = workspace_runfiles(
|
||||
ctx,
|
||||
workspace_info,
|
||||
direct_files = [launcher],
|
||||
transitive_files = dep_runfiles,
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -85,6 +73,9 @@ Use this rule for non-test scripts and CLIs that should run via `bazel run`.
|
||||
allow_files = True,
|
||||
doc = "Additional runtime files required by the program.",
|
||||
),
|
||||
"deps": attr.label_list(
|
||||
doc = "Library dependencies required by the program.",
|
||||
),
|
||||
"working_dir": attr.string(
|
||||
default = "workspace",
|
||||
values = ["workspace", "entry_point"],
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Rule for bundling JS/TS sources with Bun."""
|
||||
|
||||
load("//internal:js_library.bzl", "BunSourcesInfo")
|
||||
load("//internal:js_library.bzl", "collect_js_sources")
|
||||
|
||||
|
||||
def _output_name(target_name, entry):
|
||||
@@ -16,10 +16,7 @@ def _bun_bundle_impl(ctx):
|
||||
if ctx.attr.node_modules:
|
||||
transitive_inputs.append(ctx.attr.node_modules[DefaultInfo].files)
|
||||
for dep in ctx.attr.deps:
|
||||
if BunSourcesInfo in dep:
|
||||
transitive_inputs.append(dep[BunSourcesInfo].transitive_sources)
|
||||
else:
|
||||
transitive_inputs.append(dep[DefaultInfo].files)
|
||||
transitive_inputs.append(collect_js_sources(dep))
|
||||
|
||||
outputs = []
|
||||
for entry in ctx.files.entry_points:
|
||||
|
||||
@@ -1,101 +1,76 @@
|
||||
"""Rule for running JS/TS scripts with Bun in watch mode for development."""
|
||||
|
||||
load("//internal:workspace.bzl", "create_bun_workspace_info", "render_workspace_setup", "workspace_runfiles")
|
||||
|
||||
def _bun_dev_impl(ctx):
|
||||
toolchain = ctx.toolchains["//bun:toolchain_type"]
|
||||
bun_bin = toolchain.bun.bun_bin
|
||||
entry_point = ctx.file.entry_point
|
||||
workspace_info = create_bun_workspace_info(
|
||||
ctx,
|
||||
extra_files = ctx.files.data + ctx.files.restart_on + [bun_bin],
|
||||
primary_file = entry_point,
|
||||
)
|
||||
|
||||
restart_watch_paths = "\n".join([path.short_path for path in ctx.files.restart_on])
|
||||
|
||||
launcher = ctx.actions.declare_file(ctx.label.name)
|
||||
ctx.actions.write(
|
||||
output = launcher,
|
||||
is_executable = True,
|
||||
content = """#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
runfiles_dir="${{RUNFILES_DIR:-$0.runfiles}}"
|
||||
workspace_root="${{runfiles_dir}}/_main"
|
||||
bun_bin="${{runfiles_dir}}/_main/{bun_short_path}"
|
||||
entry_point="${{runfiles_dir}}/_main/{entry_short_path}"
|
||||
|
||||
resolve_entrypoint_workdir() {{
|
||||
local dir
|
||||
dir="$(dirname "${{entry_point}}")"
|
||||
while [[ "${{dir}}" == "${{workspace_root}}"* ]]; do
|
||||
if [[ -f "${{dir}}/.env" || -f "${{dir}}/package.json" ]]; then
|
||||
echo "${{dir}}"
|
||||
return 0
|
||||
fi
|
||||
if [[ "${{dir}}" == "${{workspace_root}}" ]]; then
|
||||
break
|
||||
fi
|
||||
dir="$(dirname "${{dir}}")"
|
||||
done
|
||||
echo "$(dirname "${{entry_point}}")"
|
||||
}}
|
||||
|
||||
working_dir="{working_dir}"
|
||||
if [[ "${{working_dir}}" == "entry_point" ]]; then
|
||||
cd "$(resolve_entrypoint_workdir)"
|
||||
else
|
||||
cd "${{workspace_root}}"
|
||||
fi
|
||||
|
||||
watch_mode="{watch_mode}"
|
||||
if [[ "${{watch_mode}}" == "hot" ]]; then
|
||||
command = """
|
||||
watch_mode="__WATCH_MODE__"
|
||||
if [[ "${watch_mode}" == "hot" ]]; then
|
||||
dev_flag="--hot"
|
||||
else
|
||||
dev_flag="--watch"
|
||||
fi
|
||||
|
||||
run_dev() {{
|
||||
exec "${{bun_bin}}" --bun "${{dev_flag}}" run "${{entry_point}}" "$@"
|
||||
}}
|
||||
|
||||
if [[ {restart_count} -eq 0 ]]; then
|
||||
run_dev "$@"
|
||||
if [[ __RESTART_COUNT__ -eq 0 ]]; then
|
||||
trap cleanup_runtime_workspace EXIT
|
||||
cd "${runtime_exec_dir}"
|
||||
exec "${bun_bin}" --bun "${dev_flag}" run "${primary_source}" "$@"
|
||||
fi
|
||||
|
||||
readarray -t restart_paths <<'EOF_RESTART_PATHS'
|
||||
{restart_watch_paths}
|
||||
__RESTART_PATHS__
|
||||
EOF_RESTART_PATHS
|
||||
|
||||
file_mtime() {{
|
||||
local p="$1"
|
||||
if stat -f '%m' "${{p}}" >/dev/null 2>&1; then
|
||||
stat -f '%m' "${{p}}"
|
||||
file_mtime() {
|
||||
local path="$1"
|
||||
if stat -f '%m' "${path}" >/dev/null 2>&1; then
|
||||
stat -f '%m' "${path}"
|
||||
return 0
|
||||
fi
|
||||
stat -c '%Y' "${{p}}"
|
||||
}}
|
||||
stat -c '%Y' "${path}"
|
||||
}
|
||||
|
||||
declare -A mtimes
|
||||
for rel in "${{restart_paths[@]}}"; do
|
||||
path="${{runfiles_dir}}/_main/${{rel}}"
|
||||
if [[ -e "${{path}}" ]]; then
|
||||
mtimes["${{rel}}"]="$(file_mtime "${{path}}")"
|
||||
for rel in "${restart_paths[@]}"; do
|
||||
path="${runfiles_dir}/_main/${rel}"
|
||||
if [[ -e "${path}" ]]; then
|
||||
mtimes["${rel}"]="$(file_mtime "${path}")"
|
||||
else
|
||||
mtimes["${{rel}}"]="missing"
|
||||
mtimes["${rel}"]="missing"
|
||||
fi
|
||||
done
|
||||
|
||||
child_pid=""
|
||||
restart_child() {{
|
||||
if [[ -n "${{child_pid}}" ]] && kill -0 "${{child_pid}}" 2>/dev/null; then
|
||||
kill "${{child_pid}}"
|
||||
wait "${{child_pid}}" || true
|
||||
restart_child() {
|
||||
if [[ -n "${child_pid}" ]] && kill -0 "${child_pid}" 2>/dev/null; then
|
||||
kill "${child_pid}"
|
||||
wait "${child_pid}" || true
|
||||
fi
|
||||
"${{bun_bin}}" --bun "${{dev_flag}}" run "${{entry_point}}" "$@" &
|
||||
child_pid=$!
|
||||
}}
|
||||
|
||||
cleanup() {{
|
||||
if [[ -n "${{child_pid}}" ]] && kill -0 "${{child_pid}}" 2>/dev/null; then
|
||||
kill "${{child_pid}}"
|
||||
wait "${{child_pid}}" || true
|
||||
(
|
||||
cd "${runtime_exec_dir}"
|
||||
exec "${bun_bin}" --bun "${dev_flag}" run "${primary_source}" "$@"
|
||||
) &
|
||||
child_pid=$!
|
||||
}
|
||||
|
||||
cleanup() {
|
||||
if [[ -n "${child_pid}" ]] && kill -0 "${child_pid}" 2>/dev/null; then
|
||||
kill "${child_pid}"
|
||||
wait "${child_pid}" || true
|
||||
fi
|
||||
}}
|
||||
cleanup_runtime_workspace
|
||||
}
|
||||
|
||||
trap cleanup EXIT INT TERM
|
||||
|
||||
@@ -104,45 +79,46 @@ restart_child "$@"
|
||||
while true; do
|
||||
sleep 1
|
||||
changed=0
|
||||
for rel in "${{restart_paths[@]}}"; do
|
||||
path="${{runfiles_dir}}/_main/${{rel}}"
|
||||
if [[ -e "${{path}}" ]]; then
|
||||
current="$(file_mtime "${{path}}")"
|
||||
for rel in "${restart_paths[@]}"; do
|
||||
path="${runfiles_dir}/_main/${rel}"
|
||||
if [[ -e "${path}" ]]; then
|
||||
current="$(file_mtime "${path}")"
|
||||
else
|
||||
current="missing"
|
||||
fi
|
||||
if [[ "${{current}}" != "${{mtimes[${{rel}}]}}" ]]; then
|
||||
mtimes["${{rel}}"]="${{current}}"
|
||||
if [[ "${current}" != "${mtimes[${rel}]}" ]]; then
|
||||
mtimes["${rel}"]="${current}"
|
||||
changed=1
|
||||
fi
|
||||
done
|
||||
if [[ "${{changed}}" -eq 1 ]]; then
|
||||
if [[ "${changed}" -eq 1 ]]; then
|
||||
restart_child "$@"
|
||||
fi
|
||||
done
|
||||
""".format(
|
||||
bun_short_path = bun_bin.short_path,
|
||||
entry_short_path = entry_point.short_path,
|
||||
watch_mode = ctx.attr.watch_mode,
|
||||
working_dir = ctx.attr.working_dir,
|
||||
restart_count = len(ctx.files.restart_on),
|
||||
restart_watch_paths = restart_watch_paths,
|
||||
),
|
||||
""".replace("__WATCH_MODE__", ctx.attr.watch_mode).replace(
|
||||
"__RESTART_COUNT__",
|
||||
str(len(ctx.files.restart_on)),
|
||||
).replace(
|
||||
"__RESTART_PATHS__",
|
||||
restart_watch_paths,
|
||||
)
|
||||
|
||||
transitive_files = []
|
||||
if ctx.attr.node_modules:
|
||||
transitive_files.append(ctx.attr.node_modules[DefaultInfo].files)
|
||||
|
||||
runfiles = ctx.runfiles(
|
||||
files = [bun_bin, entry_point] + ctx.files.data + ctx.files.restart_on,
|
||||
transitive_files = depset(transitive = transitive_files),
|
||||
launcher = ctx.actions.declare_file(ctx.label.name)
|
||||
ctx.actions.write(
|
||||
output = launcher,
|
||||
is_executable = True,
|
||||
content = render_workspace_setup(
|
||||
bun_short_path = bun_bin.short_path,
|
||||
primary_source_short_path = entry_point.short_path,
|
||||
working_dir_mode = ctx.attr.working_dir,
|
||||
) + command,
|
||||
)
|
||||
|
||||
return [
|
||||
workspace_info,
|
||||
DefaultInfo(
|
||||
executable = launcher,
|
||||
runfiles = runfiles,
|
||||
runfiles = workspace_runfiles(ctx, workspace_info, direct_files = [launcher]),
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@@ -6,6 +6,13 @@ _DEFAULT_INSTALL_INPUTS = [
|
||||
"bunfig.toml",
|
||||
]
|
||||
|
||||
_MANIFEST_DEP_FIELDS = [
|
||||
"dependencies",
|
||||
"devDependencies",
|
||||
"optionalDependencies",
|
||||
"peerDependencies",
|
||||
]
|
||||
|
||||
def _normalize_path(path):
|
||||
normalized = path.replace("\\", "/")
|
||||
if normalized.endswith("/") and normalized != "/":
|
||||
@@ -115,6 +122,30 @@ def _validate_catalog_shape(field, value):
|
||||
def _copy_json_value(value):
|
||||
return json.decode(json.encode(value))
|
||||
|
||||
def _package_target_name(package_name):
|
||||
sanitized = package_name
|
||||
sanitized = sanitized.replace("@", "at_")
|
||||
sanitized = sanitized.replace("/", "_")
|
||||
sanitized = sanitized.replace("-", "_")
|
||||
sanitized = sanitized.replace(".", "_")
|
||||
sanitized = sanitized.replace("__", "_").replace("__", "_").replace("__", "_")
|
||||
sanitized = sanitized.strip("_")
|
||||
if not sanitized:
|
||||
sanitized = "package"
|
||||
return "npm__" + sanitized
|
||||
|
||||
def _manifest_dependency_names(manifest):
|
||||
names = {}
|
||||
for field in _MANIFEST_DEP_FIELDS:
|
||||
dependencies = manifest.get(field)
|
||||
if dependencies == None:
|
||||
continue
|
||||
if type(dependencies) != type({}):
|
||||
fail("bun_install: `{}` must be an object when present".format(field))
|
||||
for name in dependencies.keys():
|
||||
names[name] = True
|
||||
return names
|
||||
|
||||
def _normalized_root_manifest(repository_ctx, package_json):
|
||||
manifest = json.decode(repository_ctx.read(package_json))
|
||||
workspaces = manifest.get("workspaces")
|
||||
@@ -147,6 +178,7 @@ def _materialize_workspace_packages(repository_ctx, package_json):
|
||||
package_root = package_json.dirname
|
||||
package_root_str = str(package_root)
|
||||
written = {}
|
||||
workspace_packages = {}
|
||||
|
||||
for pattern in _workspace_patterns(repository_ctx, package_json):
|
||||
segments = pattern.split("/")
|
||||
@@ -168,6 +200,15 @@ def _materialize_workspace_packages(repository_ctx, package_json):
|
||||
repository_ctx.read(workspace_package_json),
|
||||
)
|
||||
written[relative_dir] = True
|
||||
manifest = json.decode(repository_ctx.read(workspace_package_json))
|
||||
package_name = manifest.get("name")
|
||||
workspace_packages[relative_dir] = package_name if type(package_name) == type("") else ""
|
||||
|
||||
package_dirs = sorted(workspace_packages.keys())
|
||||
return struct(
|
||||
package_dirs = package_dirs,
|
||||
package_names = [workspace_packages[package_dir] for package_dir in package_dirs if workspace_packages[package_dir]],
|
||||
)
|
||||
|
||||
def _materialize_install_inputs(repository_ctx, package_json):
|
||||
package_root = package_json.dirname
|
||||
@@ -218,6 +259,68 @@ def _select_bun_binary(repository_ctx):
|
||||
|
||||
fail("Unsupported host platform: os={}, arch={}".format(repository_ctx.os.name, repository_ctx.os.arch))
|
||||
|
||||
def _render_package_targets_file(package_names):
|
||||
lines = ["NPM_PACKAGE_TARGETS = {"]
|
||||
for package_name in package_names:
|
||||
lines.append(' "{}": "{}",'.format(package_name, _package_target_name(package_name)))
|
||||
lines.extend([
|
||||
"}",
|
||||
"",
|
||||
])
|
||||
return "\n".join(lines)
|
||||
|
||||
def _render_repo_defs_bzl(repo_name):
|
||||
return """load(":packages.bzl", "NPM_PACKAGE_TARGETS")
|
||||
|
||||
def package_target_name(package_name):
|
||||
return NPM_PACKAGE_TARGETS.get(package_name)
|
||||
|
||||
def npm_link_all_packages(name = "node_modules", imported_links = []):
|
||||
if not native.existing_rule(name):
|
||||
native.alias(
|
||||
name = name,
|
||||
actual = "@{repo_name}//:node_modules",
|
||||
)
|
||||
|
||||
requested = {{}}
|
||||
for package_name in imported_links:
|
||||
requested[package_name] = True
|
||||
|
||||
for package_name, target_name in NPM_PACKAGE_TARGETS.items():
|
||||
if imported_links and package_name not in requested:
|
||||
continue
|
||||
if native.existing_rule(target_name):
|
||||
continue
|
||||
native.alias(
|
||||
name = target_name,
|
||||
actual = "@{repo_name}//:%s" % target_name,
|
||||
)
|
||||
""".format(repo_name = repo_name)
|
||||
|
||||
def _render_repo_build(package_names):
|
||||
lines = [
|
||||
'exports_files(["defs.bzl", "packages.bzl"])',
|
||||
"",
|
||||
"filegroup(",
|
||||
' name = "node_modules",',
|
||||
' srcs = glob(["**/node_modules/**"], allow_empty = False),',
|
||||
' visibility = ["//visibility:public"],',
|
||||
")",
|
||||
"",
|
||||
]
|
||||
|
||||
for package_name in package_names:
|
||||
lines.extend([
|
||||
"filegroup(",
|
||||
' name = "{}",'.format(_package_target_name(package_name)),
|
||||
' srcs = glob(["node_modules/{}/**"], allow_empty = True),'.format(package_name),
|
||||
' visibility = ["//visibility:public"],',
|
||||
")",
|
||||
"",
|
||||
])
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _bun_install_repository_impl(repository_ctx):
|
||||
package_json = repository_ctx.path(repository_ctx.attr.package_json)
|
||||
bun_lockfile = repository_ctx.path(repository_ctx.attr.bun_lockfile)
|
||||
@@ -230,6 +333,7 @@ def _bun_install_repository_impl(repository_ctx):
|
||||
|
||||
bun_bin = _select_bun_binary(repository_ctx)
|
||||
lockfile_name = bun_lockfile.basename
|
||||
root_manifest = json.decode(repository_ctx.read(package_json))
|
||||
|
||||
if lockfile_name not in ["bun.lock", "bun.lockb"]:
|
||||
lockfile_name = "bun.lock"
|
||||
@@ -237,7 +341,7 @@ def _bun_install_repository_impl(repository_ctx):
|
||||
repository_ctx.file("package.json", _normalized_root_manifest(repository_ctx, package_json))
|
||||
repository_ctx.symlink(bun_lockfile, lockfile_name)
|
||||
_materialize_install_inputs(repository_ctx, package_json)
|
||||
_materialize_workspace_packages(repository_ctx, package_json)
|
||||
workspace_packages = _materialize_workspace_packages(repository_ctx, package_json)
|
||||
|
||||
install_args = [str(bun_bin), "--bun", "install", "--frozen-lockfile", "--no-progress"]
|
||||
if repository_ctx.attr.isolated_home:
|
||||
@@ -263,15 +367,26 @@ stderr:
|
||||
""".format(result.stdout, result.stderr))
|
||||
|
||||
repository_ctx.file(
|
||||
"BUILD.bazel",
|
||||
"""filegroup(
|
||||
name = "node_modules",
|
||||
srcs = glob(["**/node_modules/**"], allow_empty = False),
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
""",
|
||||
"node_modules/.rules_bun/install.json",
|
||||
json.encode({
|
||||
"bun_lockfile": lockfile_name,
|
||||
"package_json": "package.json",
|
||||
"workspace_package_dirs": workspace_packages.package_dirs,
|
||||
}) + "\n",
|
||||
)
|
||||
|
||||
package_names = {}
|
||||
for package_name in _manifest_dependency_names(root_manifest).keys():
|
||||
package_names[package_name] = True
|
||||
for package_name in workspace_packages.package_names:
|
||||
package_names[package_name] = True
|
||||
|
||||
sorted_package_names = sorted(package_names.keys())
|
||||
visible_repo_name = repository_ctx.attr.visible_repo_name or repository_ctx.name
|
||||
repository_ctx.file("packages.bzl", _render_package_targets_file(sorted_package_names))
|
||||
repository_ctx.file("defs.bzl", _render_repo_defs_bzl(visible_repo_name))
|
||||
repository_ctx.file("BUILD.bazel", _render_repo_build(sorted_package_names))
|
||||
|
||||
bun_install_repository = repository_rule(
|
||||
implementation = _bun_install_repository_impl,
|
||||
attrs = {
|
||||
@@ -279,6 +394,7 @@ bun_install_repository = repository_rule(
|
||||
"bun_lockfile": attr.label(mandatory = True, allow_single_file = True),
|
||||
"install_inputs": attr.label_list(allow_files = True),
|
||||
"isolated_home": attr.bool(default = True),
|
||||
"visible_repo_name": attr.string(),
|
||||
"bun_linux_x64": attr.label(default = "@bun_linux_x64//:bun-linux-x64/bun", allow_single_file = True),
|
||||
"bun_linux_aarch64": attr.label(default = "@bun_linux_aarch64//:bun-linux-aarch64/bun", allow_single_file = True),
|
||||
"bun_darwin_x64": attr.label(default = "@bun_darwin_x64//:bun-darwin-x64/bun", allow_single_file = True),
|
||||
@@ -313,4 +429,5 @@ def bun_install(name, package_json, bun_lockfile, install_inputs = [], isolated_
|
||||
bun_lockfile = bun_lockfile,
|
||||
install_inputs = install_inputs,
|
||||
isolated_home = isolated_home,
|
||||
visible_repo_name = name,
|
||||
)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Rule for running package.json scripts with Bun."""
|
||||
|
||||
load("//internal:workspace.bzl", "create_bun_workspace_info", "render_workspace_setup", "workspace_runfiles")
|
||||
|
||||
def _shell_quote(value):
|
||||
return "'" + value.replace("'", "'\"'\"'") + "'"
|
||||
@@ -9,302 +10,37 @@ def _bun_script_impl(ctx):
|
||||
toolchain = ctx.toolchains["//bun:toolchain_type"]
|
||||
bun_bin = toolchain.bun.bun_bin
|
||||
package_json = ctx.file.package_json
|
||||
workspace_info = create_bun_workspace_info(
|
||||
ctx,
|
||||
extra_files = ctx.files.data + [bun_bin],
|
||||
package_dir_hint = package_json.dirname or ".",
|
||||
package_json = package_json,
|
||||
primary_file = package_json,
|
||||
)
|
||||
command = """
|
||||
trap cleanup_runtime_workspace EXIT
|
||||
cd "${runtime_exec_dir}"
|
||||
exec "${bun_bin}" --bun run __SCRIPT__ "$@"
|
||||
""".replace("__SCRIPT__", _shell_quote(ctx.attr.script))
|
||||
|
||||
launcher = ctx.actions.declare_file(ctx.label.name)
|
||||
ctx.actions.write(
|
||||
output = launcher,
|
||||
is_executable = True,
|
||||
content = """#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
runfiles_dir="${{RUNFILES_DIR:-$0.runfiles}}"
|
||||
workspace_root="${{runfiles_dir}}/_main"
|
||||
workspace_root="$(cd "${{workspace_root}}" && pwd -P)"
|
||||
bun_bin="${{runfiles_dir}}/_main/{bun_short_path}"
|
||||
package_json="${{runfiles_dir}}/_main/{package_json_short_path}"
|
||||
package_dir="$(cd "$(dirname "${{package_json}}")" && pwd -P)"
|
||||
package_rel_dir="{package_rel_dir}"
|
||||
|
||||
select_primary_node_modules() {{
|
||||
local selected=""
|
||||
local fallback=""
|
||||
while IFS= read -r node_modules_dir; do
|
||||
if [[ -z "${{fallback}}" ]]; then
|
||||
fallback="${{node_modules_dir}}"
|
||||
fi
|
||||
|
||||
if [[ ! -d "${{node_modules_dir}}/.bun" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "${{node_modules_dir}}" != *"/runfiles/_main/"* ]]; then
|
||||
selected="${{node_modules_dir}}"
|
||||
break
|
||||
fi
|
||||
|
||||
if [[ -z "${{selected}}" ]]; then
|
||||
selected="${{node_modules_dir}}"
|
||||
fi
|
||||
done < <(find -L "${{runfiles_dir}}" -type d -name node_modules 2>/dev/null | sort)
|
||||
|
||||
if [[ -n "${{selected}}" ]]; then
|
||||
echo "${{selected}}"
|
||||
else
|
||||
echo "${{fallback}}"
|
||||
fi
|
||||
}}
|
||||
|
||||
primary_node_modules="$(select_primary_node_modules)"
|
||||
|
||||
runtime_workspace="$(mktemp -d)"
|
||||
cleanup_runtime_workspace() {{
|
||||
rm -rf "${{runtime_workspace}}"
|
||||
}}
|
||||
trap cleanup_runtime_workspace EXIT
|
||||
|
||||
runtime_package_dir="${{runtime_workspace}}/${{package_rel_dir}}"
|
||||
mkdir -p "${{runtime_package_dir}}"
|
||||
cp -RL "${{package_dir}}/." "${{runtime_package_dir}}/"
|
||||
|
||||
workspace_package_map="${{runtime_workspace}}/workspace-packages.tsv"
|
||||
python3 - "${{runtime_package_dir}}" >"${{workspace_package_map}}" <<'PY'
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
root = sys.argv[1]
|
||||
|
||||
for dirpath, dirnames, filenames in os.walk(root):
|
||||
dirnames[:] = [name for name in dirnames if name != "node_modules"]
|
||||
if "package.json" not in filenames:
|
||||
continue
|
||||
|
||||
manifest_path = os.path.join(dirpath, "package.json")
|
||||
try:
|
||||
with open(manifest_path, "r", encoding="utf-8") as manifest_file:
|
||||
package_name = json.load(manifest_file).get("name")
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if isinstance(package_name, str):
|
||||
print(f"{{package_name}}\t{{dirpath}}")
|
||||
PY
|
||||
|
||||
install_repo_root=""
|
||||
if [[ -n "${{primary_node_modules}}" ]]; then
|
||||
install_repo_root="$(dirname "${{primary_node_modules}}")"
|
||||
ln -s "${{primary_node_modules}}" "${{runtime_workspace}}/node_modules"
|
||||
fi
|
||||
|
||||
workspace_package_dir_for_source() {{
|
||||
local source="$1"
|
||||
local manifest_path="${{source}}/package.json"
|
||||
local package_name=""
|
||||
local workspace_dir=""
|
||||
|
||||
if [[ ! -f "${{manifest_path}}" ]]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
package_name="$(python3 - "${{manifest_path}}" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
|
||||
try:
|
||||
with open(sys.argv[1], "r", encoding="utf-8") as manifest_file:
|
||||
package_name = json.load(manifest_file).get("name", "")
|
||||
except Exception:
|
||||
package_name = ""
|
||||
|
||||
if isinstance(package_name, str):
|
||||
print(package_name)
|
||||
PY
|
||||
)"
|
||||
|
||||
workspace_dir="$(awk -F '\t' -v name="$package_name" '$1 == name {{ print $2; exit }}' "${{workspace_package_map}}")"
|
||||
if [[ -n "${{package_name}}" && -n "${{workspace_dir}}" ]]; then
|
||||
echo "${{workspace_dir}}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
return 1
|
||||
}}
|
||||
|
||||
link_node_modules_entry() {{
|
||||
local source="$1"
|
||||
local destination="$2"
|
||||
local workspace_target=""
|
||||
|
||||
rm -rf "${{destination}}"
|
||||
workspace_target="$(workspace_package_dir_for_source "${{source}}" || true)"
|
||||
if [[ -n "${{workspace_target}}" ]]; then
|
||||
ln -s "${{workspace_target}}" "${{destination}}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ -L "${{source}}" ]]; then
|
||||
ln -s "$(readlink "${{source}}")" "${{destination}}"
|
||||
else
|
||||
ln -s "${{source}}" "${{destination}}"
|
||||
fi
|
||||
}}
|
||||
|
||||
mirror_node_modules_dir() {{
|
||||
local source_dir="$1"
|
||||
local destination_dir="$2"
|
||||
local entry=""
|
||||
local scoped_entry=""
|
||||
|
||||
rm -rf "${{destination_dir}}"
|
||||
mkdir -p "${{destination_dir}}"
|
||||
|
||||
shopt -s dotglob nullglob
|
||||
for entry in "${{source_dir}}"/* "${{source_dir}}"/.[!.]* "${{source_dir}}"/..?*; do
|
||||
local entry_name="$(basename "${{entry}}")"
|
||||
if [[ "${{entry_name}}" == "." || "${{entry_name}}" == ".." ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ -d "${{entry}}" && ! -L "${{entry}}" && "${{entry_name}}" == @* ]]; then
|
||||
mkdir -p "${{destination_dir}}/${{entry_name}}"
|
||||
for scoped_entry in "${{entry}}"/* "${{entry}}"/.[!.]* "${{entry}}"/..?*; do
|
||||
local scoped_name="$(basename "${{scoped_entry}}")"
|
||||
if [[ "${{scoped_name}}" == "." || "${{scoped_name}}" == ".." ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
link_node_modules_entry "${{scoped_entry}}" "${{destination_dir}}/${{entry_name}}/${{scoped_name}}"
|
||||
done
|
||||
continue
|
||||
fi
|
||||
|
||||
link_node_modules_entry "${{entry}}" "${{destination_dir}}/${{entry_name}}"
|
||||
done
|
||||
shopt -u dotglob nullglob
|
||||
}}
|
||||
|
||||
find_node_modules() {{
|
||||
local dir="$1"
|
||||
local root="$2"
|
||||
|
||||
while [[ "$dir" == "$root"* ]]; do
|
||||
if [[ -d "$dir/node_modules" ]]; then
|
||||
echo "$dir/node_modules"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ "$dir" == "$root" ]]; then
|
||||
break
|
||||
fi
|
||||
|
||||
dir="$(dirname "$dir")"
|
||||
done
|
||||
|
||||
return 1
|
||||
}}
|
||||
|
||||
find_install_repo_node_modules() {{
|
||||
local repo_root="$1"
|
||||
local rel_dir="$2"
|
||||
local candidate="${{rel_dir}}"
|
||||
|
||||
while [[ -n "${{candidate}}" ]]; do
|
||||
if [[ -d "${{repo_root}}/${{candidate}}/node_modules" ]]; then
|
||||
echo "${{repo_root}}/${{candidate}}/node_modules"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ "${{candidate}}" != */* ]]; then
|
||||
break
|
||||
fi
|
||||
|
||||
candidate="${{candidate#*/}}"
|
||||
done
|
||||
|
||||
if [[ -d "${{repo_root}}/node_modules" ]]; then
|
||||
echo "${{repo_root}}/node_modules"
|
||||
return 0
|
||||
fi
|
||||
|
||||
return 1
|
||||
}}
|
||||
|
||||
mirror_install_repo_workspace_node_modules() {{
|
||||
local repo_root="$1"
|
||||
local destination_root="$2"
|
||||
|
||||
while IFS= read -r install_node_modules; do
|
||||
local rel_path="${{install_node_modules#${{repo_root}}/}}"
|
||||
local destination="${{destination_root}}/${{rel_path}}"
|
||||
|
||||
mkdir -p "$(dirname "${{destination}}")"
|
||||
mirror_node_modules_dir "${{install_node_modules}}" "${{destination}}"
|
||||
done < <(find "${{repo_root}}" \
|
||||
-path "${{repo_root}}/node_modules" -prune -o \
|
||||
-type d -name node_modules -print 2>/dev/null | sort)
|
||||
}}
|
||||
|
||||
resolved_install_node_modules=""
|
||||
if [[ -n "${{install_repo_root}}" ]]; then
|
||||
resolved_install_node_modules="$(find_install_repo_node_modules "${{install_repo_root}}" "${{package_rel_dir}}" || true)"
|
||||
fi
|
||||
|
||||
if [[ -n "${{resolved_install_node_modules}}" ]]; then
|
||||
mirror_node_modules_dir "${{resolved_install_node_modules}}" "${{runtime_package_dir}}/node_modules"
|
||||
else
|
||||
resolved_node_modules="$(find_node_modules "${{runtime_package_dir}}" "${{runtime_workspace}}" || true)"
|
||||
if [[ -n "${{resolved_node_modules}}" && "${{resolved_node_modules}}" != "${{runtime_package_dir}}/node_modules" ]]; then
|
||||
mirror_node_modules_dir "${{resolved_node_modules}}" "${{runtime_package_dir}}/node_modules"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "${{install_repo_root}}" ]]; then
|
||||
mirror_install_repo_workspace_node_modules "${{install_repo_root}}" "${{runtime_package_dir}}"
|
||||
fi
|
||||
|
||||
path_entries=()
|
||||
if [[ -d "${{runtime_package_dir}}/node_modules/.bin" ]]; then
|
||||
path_entries+=("${{runtime_package_dir}}/node_modules/.bin")
|
||||
fi
|
||||
|
||||
if [[ -d "${{runtime_workspace}}/node_modules/.bin" && "${{runtime_workspace}}/node_modules/.bin" != "${{runtime_package_dir}}/node_modules/.bin" ]]; then
|
||||
path_entries+=("${{runtime_workspace}}/node_modules/.bin")
|
||||
fi
|
||||
|
||||
if [[ ${{#path_entries[@]}} -gt 0 ]]; then
|
||||
export PATH="$(IFS=:; echo "${{path_entries[*]}}"):${{PATH}}"
|
||||
fi
|
||||
|
||||
working_dir="{working_dir}"
|
||||
if [[ "${{working_dir}}" == "package" ]]; then
|
||||
cd "${{runtime_package_dir}}"
|
||||
else
|
||||
cd "${{runtime_workspace}}"
|
||||
fi
|
||||
|
||||
exec "${{bun_bin}}" --bun run {script} "$@"
|
||||
""".format(
|
||||
content = render_workspace_setup(
|
||||
bun_short_path = bun_bin.short_path,
|
||||
package_dir_hint = package_json.dirname or ".",
|
||||
package_json_short_path = package_json.short_path,
|
||||
package_rel_dir = package_json.dirname,
|
||||
working_dir = ctx.attr.working_dir,
|
||||
script = _shell_quote(ctx.attr.script),
|
||||
),
|
||||
)
|
||||
|
||||
transitive_files = []
|
||||
if ctx.attr.node_modules:
|
||||
transitive_files.append(ctx.attr.node_modules[DefaultInfo].files)
|
||||
|
||||
runfiles = ctx.runfiles(
|
||||
files = [bun_bin, package_json] + ctx.files.data,
|
||||
transitive_files = depset(transitive = transitive_files),
|
||||
primary_source_short_path = package_json.short_path,
|
||||
working_dir_mode = ctx.attr.working_dir,
|
||||
) + command,
|
||||
)
|
||||
|
||||
return [
|
||||
workspace_info,
|
||||
DefaultInfo(
|
||||
executable = launcher,
|
||||
runfiles = runfiles,
|
||||
runfiles = workspace_runfiles(ctx, workspace_info, direct_files = [launcher]),
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Rule for running test suites with Bun."""
|
||||
|
||||
load("//internal:js_library.bzl", "BunSourcesInfo")
|
||||
load("//internal:js_library.bzl", "collect_js_runfiles")
|
||||
load("//internal:workspace.bzl", "create_bun_workspace_info", "render_workspace_setup", "workspace_runfiles")
|
||||
|
||||
|
||||
def _shell_quote(value):
|
||||
@@ -10,53 +11,58 @@ def _shell_quote(value):
|
||||
def _bun_test_impl(ctx):
|
||||
toolchain = ctx.toolchains["//bun:toolchain_type"]
|
||||
bun_bin = toolchain.bun.bun_bin
|
||||
primary_file = ctx.files.srcs[0]
|
||||
dep_runfiles = [collect_js_runfiles(dep) for dep in ctx.attr.deps]
|
||||
workspace_info = create_bun_workspace_info(
|
||||
ctx,
|
||||
extra_files = ctx.files.srcs + ctx.files.data + [bun_bin],
|
||||
primary_file = primary_file,
|
||||
)
|
||||
|
||||
src_args = " ".join([_shell_quote(src.short_path) for src in ctx.files.srcs])
|
||||
command = """
|
||||
trap cleanup_runtime_workspace EXIT
|
||||
cd "${runtime_workspace}"
|
||||
test_args=(__SRC_ARGS__)
|
||||
|
||||
if [[ -n "${TESTBRIDGE_TEST_ONLY:-}" && -n "${COVERAGE_DIR:-}" ]]; then
|
||||
exec "${bun_bin}" --bun test "${test_args[@]}" --test-name-pattern "${TESTBRIDGE_TEST_ONLY}" --coverage "$@"
|
||||
fi
|
||||
if [[ -n "${TESTBRIDGE_TEST_ONLY:-}" ]]; then
|
||||
exec "${bun_bin}" --bun test "${test_args[@]}" --test-name-pattern "${TESTBRIDGE_TEST_ONLY}" "$@"
|
||||
fi
|
||||
if [[ -n "${COVERAGE_DIR:-}" ]]; then
|
||||
exec "${bun_bin}" --bun test "${test_args[@]}" --coverage "$@"
|
||||
fi
|
||||
exec "${bun_bin}" --bun test "${test_args[@]}" "$@"
|
||||
""".replace("__SRC_ARGS__", src_args)
|
||||
if ctx.attr.args:
|
||||
default_args = "\n".join(['test_args+=({})'.format(_shell_quote(arg)) for arg in ctx.attr.args])
|
||||
command = command.replace(
|
||||
'test_args=(__SRC_ARGS__)',
|
||||
'test_args=(__SRC_ARGS__)\n' + default_args,
|
||||
)
|
||||
|
||||
launcher = ctx.actions.declare_file(ctx.label.name)
|
||||
ctx.actions.write(
|
||||
output = launcher,
|
||||
is_executable = True,
|
||||
content = """#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
runfiles_dir="${{RUNFILES_DIR:-$0.runfiles}}"
|
||||
bun_bin="${{runfiles_dir}}/_main/{bun_short_path}"
|
||||
cd "${{runfiles_dir}}/_main"
|
||||
|
||||
if [[ -n "${{TESTBRIDGE_TEST_ONLY:-}}" && -n "${{COVERAGE_DIR:-}}" ]]; then
|
||||
exec "${{bun_bin}}" --bun test {src_args} --test-name-pattern "${{TESTBRIDGE_TEST_ONLY}}" --coverage "$@"
|
||||
fi
|
||||
if [[ -n "${{TESTBRIDGE_TEST_ONLY:-}}" ]]; then
|
||||
exec "${{bun_bin}}" --bun test {src_args} --test-name-pattern "${{TESTBRIDGE_TEST_ONLY}}" "$@"
|
||||
fi
|
||||
if [[ -n "${{COVERAGE_DIR:-}}" ]]; then
|
||||
exec "${{bun_bin}}" --bun test {src_args} --coverage "$@"
|
||||
fi
|
||||
exec "${{bun_bin}}" --bun test {src_args} "$@"
|
||||
""".format(
|
||||
bun_short_path = bun_bin.short_path,
|
||||
src_args = src_args,
|
||||
),
|
||||
content = render_workspace_setup(
|
||||
bun_short_path = bun_bin.short_path,
|
||||
primary_source_short_path = primary_file.short_path,
|
||||
working_dir_mode = "workspace",
|
||||
) + command,
|
||||
)
|
||||
|
||||
transitive_files = []
|
||||
if ctx.attr.node_modules:
|
||||
transitive_files.append(ctx.attr.node_modules[DefaultInfo].files)
|
||||
for dep in ctx.attr.deps:
|
||||
if BunSourcesInfo in dep:
|
||||
transitive_files.append(dep[BunSourcesInfo].transitive_sources)
|
||||
else:
|
||||
transitive_files.append(dep[DefaultInfo].files)
|
||||
|
||||
runfiles = ctx.runfiles(
|
||||
files = [bun_bin] + ctx.files.srcs + ctx.files.data,
|
||||
transitive_files = depset(transitive = transitive_files),
|
||||
)
|
||||
|
||||
return [
|
||||
workspace_info,
|
||||
DefaultInfo(
|
||||
executable = launcher,
|
||||
runfiles = runfiles,
|
||||
runfiles = workspace_runfiles(
|
||||
ctx,
|
||||
workspace_info,
|
||||
direct_files = [launcher],
|
||||
transitive_files = dep_runfiles,
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
29
internal/js_compat.bzl
Normal file
29
internal/js_compat.bzl
Normal file
@@ -0,0 +1,29 @@
|
||||
"""rules_js-style compatibility exports backed by Bun."""
|
||||
|
||||
load("//internal:bun_binary.bzl", _bun_binary = "bun_binary")
|
||||
load("//internal:bun_test.bzl", _bun_test = "bun_test")
|
||||
load("//internal:js_library.bzl", _JsInfo = "JsInfo", _js_library = "js_library", _ts_library = "ts_library")
|
||||
load("//internal:js_run_devserver.bzl", _js_run_devserver = "js_run_devserver")
|
||||
|
||||
JsInfo = _JsInfo
|
||||
js_library = _js_library
|
||||
ts_library = _ts_library
|
||||
js_run_devserver = _js_run_devserver
|
||||
|
||||
def js_binary(name, **kwargs):
|
||||
_bun_binary(name = name, **kwargs)
|
||||
|
||||
def js_test(name, entry_point = None, srcs = None, **kwargs):
|
||||
if entry_point != None:
|
||||
if srcs != None:
|
||||
fail("js_test accepts either `entry_point` or `srcs`, but not both")
|
||||
srcs = [entry_point]
|
||||
|
||||
if srcs == None:
|
||||
fail("js_test requires `entry_point` or `srcs`")
|
||||
|
||||
_bun_test(
|
||||
name = name,
|
||||
srcs = srcs,
|
||||
**kwargs
|
||||
)
|
||||
@@ -1,23 +1,74 @@
|
||||
"""Lightweight JS/TS source grouping rules."""
|
||||
|
||||
JsInfo = provider(
|
||||
doc = "Provides transitive JavaScript/TypeScript metadata for Bun and JS compatibility rules.",
|
||||
fields = {
|
||||
"sources": "Direct source files owned by this target.",
|
||||
"transitive_sources": "Transitive source files from this target and its deps.",
|
||||
"types": "Direct type files owned by this target.",
|
||||
"transitive_types": "Transitive type files from this target and its deps.",
|
||||
"data_files": "Direct runtime data files owned by this target.",
|
||||
"transitive_runfiles": "Transitive runtime files from this target and its deps.",
|
||||
},
|
||||
)
|
||||
|
||||
BunSourcesInfo = provider(
|
||||
"Provides transitive sources for Bun libraries.",
|
||||
fields = ["transitive_sources"],
|
||||
)
|
||||
|
||||
def collect_js_sources(dep):
|
||||
if JsInfo in dep:
|
||||
return dep[JsInfo].transitive_sources
|
||||
if BunSourcesInfo in dep:
|
||||
return dep[BunSourcesInfo].transitive_sources
|
||||
return dep[DefaultInfo].files
|
||||
|
||||
def collect_js_runfiles(dep):
|
||||
if JsInfo in dep:
|
||||
return dep[JsInfo].transitive_runfiles
|
||||
if BunSourcesInfo in dep:
|
||||
return dep[BunSourcesInfo].transitive_sources
|
||||
return dep[DefaultInfo].files
|
||||
|
||||
def _bun_library_impl(ctx):
|
||||
transitive_sources = [
|
||||
dep[BunSourcesInfo].transitive_sources
|
||||
transitive_sources = [collect_js_sources(dep) for dep in ctx.attr.deps]
|
||||
transitive_types = [
|
||||
dep[JsInfo].transitive_types
|
||||
for dep in ctx.attr.deps
|
||||
if BunSourcesInfo in dep
|
||||
if JsInfo in dep
|
||||
]
|
||||
transitive_runfiles = [collect_js_runfiles(dep) for dep in ctx.attr.deps]
|
||||
|
||||
all_sources = depset(
|
||||
direct = ctx.files.srcs,
|
||||
transitive = transitive_sources,
|
||||
)
|
||||
all_types = depset(
|
||||
direct = ctx.files.types,
|
||||
transitive = transitive_types,
|
||||
)
|
||||
all_runfiles = depset(
|
||||
direct = ctx.files.srcs + ctx.files.types + ctx.files.data,
|
||||
transitive = transitive_runfiles,
|
||||
)
|
||||
default_files = depset(
|
||||
direct = ctx.files.srcs + ctx.files.types + ctx.files.data,
|
||||
transitive = transitive_sources + transitive_types + transitive_runfiles,
|
||||
)
|
||||
|
||||
js_info = JsInfo(
|
||||
sources = depset(ctx.files.srcs),
|
||||
transitive_sources = all_sources,
|
||||
types = depset(ctx.files.types),
|
||||
transitive_types = all_types,
|
||||
data_files = depset(ctx.files.data),
|
||||
transitive_runfiles = all_runfiles,
|
||||
)
|
||||
return [
|
||||
js_info,
|
||||
BunSourcesInfo(transitive_sources = all_sources),
|
||||
DefaultInfo(files = all_sources),
|
||||
DefaultInfo(files = default_files),
|
||||
]
|
||||
|
||||
js_library = rule(
|
||||
@@ -28,6 +79,14 @@ js_library = rule(
|
||||
allow_files = [".js", ".jsx", ".mjs", ".cjs"],
|
||||
doc = "JavaScript source files in this library.",
|
||||
),
|
||||
"types": attr.label_list(
|
||||
allow_files = [".d.ts"],
|
||||
doc = "Optional declaration files associated with this library.",
|
||||
),
|
||||
"data": attr.label_list(
|
||||
allow_files = True,
|
||||
doc = "Optional runtime files propagated to dependents.",
|
||||
),
|
||||
"deps": attr.label_list(
|
||||
doc = "Other Bun source libraries to include transitively.",
|
||||
),
|
||||
@@ -42,6 +101,14 @@ ts_library = rule(
|
||||
allow_files = [".ts", ".tsx"],
|
||||
doc = "TypeScript source files in this library.",
|
||||
),
|
||||
"types": attr.label_list(
|
||||
allow_files = [".d.ts"],
|
||||
doc = "Optional declaration files associated with this library.",
|
||||
),
|
||||
"data": attr.label_list(
|
||||
allow_files = True,
|
||||
doc = "Optional runtime files propagated to dependents.",
|
||||
),
|
||||
"deps": attr.label_list(
|
||||
doc = "Other Bun source libraries to include transitively.",
|
||||
),
|
||||
|
||||
100
internal/js_run_devserver.bzl
Normal file
100
internal/js_run_devserver.bzl
Normal file
@@ -0,0 +1,100 @@
|
||||
"""Compatibility rule for running an executable target as a dev server."""
|
||||
|
||||
load("//internal:js_library.bzl", "collect_js_runfiles")
|
||||
load("//internal:workspace.bzl", "create_bun_workspace_info", "render_workspace_setup", "workspace_runfiles")
|
||||
|
||||
def _shell_quote(value):
|
||||
return "'" + value.replace("'", "'\"'\"'") + "'"
|
||||
|
||||
def _js_run_devserver_impl(ctx):
|
||||
toolchain = ctx.toolchains["//bun:toolchain_type"]
|
||||
bun_bin = toolchain.bun.bun_bin
|
||||
package_json = ctx.file.package_json
|
||||
dep_runfiles = [collect_js_runfiles(dep) for dep in ctx.attr.deps]
|
||||
tool_default_info = ctx.attr.tool[DefaultInfo]
|
||||
|
||||
workspace_info = create_bun_workspace_info(
|
||||
ctx,
|
||||
primary_file = package_json or tool_default_info.files_to_run.executable,
|
||||
package_json = package_json,
|
||||
package_dir_hint = ctx.attr.package_dir_hint,
|
||||
extra_files = ctx.files.data + [bun_bin, tool_default_info.files_to_run.executable],
|
||||
)
|
||||
|
||||
tool_workspace = ctx.attr.tool.label.workspace_name or "_main"
|
||||
tool_path = "{}/{}".format(tool_workspace, tool_default_info.files_to_run.executable.short_path)
|
||||
default_args = " ".join([_shell_quote(arg) for arg in ctx.attr.args])
|
||||
|
||||
launcher = ctx.actions.declare_file(ctx.label.name)
|
||||
ctx.actions.write(
|
||||
output = launcher,
|
||||
is_executable = True,
|
||||
content = render_workspace_setup(
|
||||
bun_short_path = bun_bin.short_path,
|
||||
primary_source_short_path = package_json.short_path if package_json else tool_default_info.files_to_run.executable.short_path,
|
||||
package_json_short_path = package_json.short_path if package_json else "",
|
||||
package_dir_hint = ctx.attr.package_dir_hint,
|
||||
working_dir_mode = ctx.attr.working_dir,
|
||||
) + """
|
||||
trap cleanup_runtime_workspace EXIT
|
||||
cd "${runtime_exec_dir}"
|
||||
tool="${runfiles_dir}/__TOOL_SHORT_PATH__"
|
||||
exec "${tool}" __DEFAULT_ARGS__ "$@"
|
||||
""".replace("__TOOL_SHORT_PATH__", tool_path).replace("__DEFAULT_ARGS__", default_args),
|
||||
)
|
||||
|
||||
return [
|
||||
workspace_info,
|
||||
DefaultInfo(
|
||||
executable = launcher,
|
||||
runfiles = workspace_runfiles(
|
||||
ctx,
|
||||
workspace_info,
|
||||
direct_files = [launcher, tool_default_info.files_to_run.executable],
|
||||
transitive_files = dep_runfiles,
|
||||
).merge(tool_default_info.default_runfiles),
|
||||
),
|
||||
]
|
||||
|
||||
js_run_devserver = rule(
|
||||
implementation = _js_run_devserver_impl,
|
||||
doc = """Runs an executable target from a staged JS workspace.
|
||||
|
||||
This is a Bun-backed compatibility adapter for `rules_js`-style devserver
|
||||
targets. It stages the same runtime workspace as the Bun rules, then executes
|
||||
the provided tool with any default arguments.
|
||||
""",
|
||||
attrs = {
|
||||
"tool": attr.label(
|
||||
mandatory = True,
|
||||
executable = True,
|
||||
cfg = "target",
|
||||
doc = "Executable target to launch as the dev server.",
|
||||
),
|
||||
"package_json": attr.label(
|
||||
allow_single_file = True,
|
||||
doc = "Optional package.json used to resolve the package working directory.",
|
||||
),
|
||||
"package_dir_hint": attr.string(
|
||||
default = ".",
|
||||
doc = "Optional package-relative directory hint when package_json is not supplied.",
|
||||
),
|
||||
"node_modules": attr.label(
|
||||
doc = "Optional label providing package files from a node_modules tree, typically produced by bun_install or npm_translate_lock, in runfiles.",
|
||||
),
|
||||
"deps": attr.label_list(
|
||||
doc = "Library dependencies required by the dev server.",
|
||||
),
|
||||
"data": attr.label_list(
|
||||
allow_files = True,
|
||||
doc = "Additional runtime files required by the dev server.",
|
||||
),
|
||||
"working_dir": attr.string(
|
||||
default = "workspace",
|
||||
values = ["workspace", "package"],
|
||||
doc = "Working directory at runtime: Bazel runfiles workspace root or the resolved package directory.",
|
||||
),
|
||||
},
|
||||
executable = True,
|
||||
toolchains = ["//bun:toolchain_type"],
|
||||
)
|
||||
649
internal/workspace.bzl
Normal file
649
internal/workspace.bzl
Normal file
@@ -0,0 +1,649 @@
|
||||
"""Shared Bun workspace metadata and launcher helpers."""
|
||||
|
||||
BunWorkspaceInfo = provider(
|
||||
doc = "Workspace/runtime metadata shared by Bun rules and adapters.",
|
||||
fields = {
|
||||
"install_metadata_file": "Optional install metadata file from bun_install.",
|
||||
"metadata_file": "Rule-local metadata file describing the staged workspace inputs.",
|
||||
"node_modules_files": "Depset of node_modules files from bun_install.",
|
||||
"package_dir_hint": "Package-relative directory when known at analysis time.",
|
||||
"package_json": "Package manifest file when explicitly provided.",
|
||||
"primary_file": "Primary source file used to resolve the runtime package context.",
|
||||
"runtime_files": "Depset of runtime files required to stage the workspace.",
|
||||
},
|
||||
)
|
||||
|
||||
_WORKSPACE_SETUP_TEMPLATE = """#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
runfiles_dir="${RUNFILES_DIR:-$0.runfiles}"
|
||||
workspace_root="${runfiles_dir}/_main"
|
||||
workspace_root="$(cd "${workspace_root}" && pwd -P)"
|
||||
bun_bin="${runfiles_dir}/_main/__BUN_SHORT_PATH__"
|
||||
primary_source=""
|
||||
if [[ -n "__PRIMARY_SOURCE_SHORT_PATH__" ]]; then
|
||||
primary_source="${runfiles_dir}/_main/__PRIMARY_SOURCE_SHORT_PATH__"
|
||||
fi
|
||||
package_json=""
|
||||
if [[ -n "__PACKAGE_JSON_SHORT_PATH__" ]]; then
|
||||
package_json="${runfiles_dir}/_main/__PACKAGE_JSON_SHORT_PATH__"
|
||||
fi
|
||||
package_rel_dir_hint="__PACKAGE_DIR_HINT__"
|
||||
working_dir_mode="__WORKING_DIR_MODE__"
|
||||
|
||||
normalize_rel_dir() {
|
||||
local value="$1"
|
||||
if [[ -z "${value}" || "${value}" == "." ]]; then
|
||||
echo "."
|
||||
else
|
||||
echo "${value#./}"
|
||||
fi
|
||||
}
|
||||
|
||||
dirname_rel_dir() {
|
||||
local value
|
||||
value="$(normalize_rel_dir "$1")"
|
||||
if [[ "${value}" == "." || "${value}" != */* ]]; then
|
||||
echo "."
|
||||
return 0
|
||||
fi
|
||||
echo "${value%/*}"
|
||||
}
|
||||
|
||||
first_path_component() {
|
||||
local value
|
||||
value="$(normalize_rel_dir "$1")"
|
||||
if [[ "${value}" == "." ]]; then
|
||||
echo ""
|
||||
return 0
|
||||
fi
|
||||
echo "${value%%/*}"
|
||||
}
|
||||
|
||||
rel_dir_from_abs_path() {
|
||||
local absolute_path="$1"
|
||||
if [[ "${absolute_path}" == "${workspace_root}" ]]; then
|
||||
echo "."
|
||||
return 0
|
||||
fi
|
||||
echo "${absolute_path#"${workspace_root}/"}"
|
||||
}
|
||||
|
||||
find_package_rel_dir_for_path() {
|
||||
local path="$1"
|
||||
local dir="$1"
|
||||
if [[ -f "${dir}" ]]; then
|
||||
dir="$(dirname "${dir}")"
|
||||
fi
|
||||
|
||||
while [[ "${dir}" == "${workspace_root}"* ]]; do
|
||||
if [[ -f "${dir}/package.json" ]]; then
|
||||
rel_dir_from_abs_path "${dir}"
|
||||
return 0
|
||||
fi
|
||||
if [[ "${dir}" == "${workspace_root}" ]]; then
|
||||
break
|
||||
fi
|
||||
dir="$(dirname "${dir}")"
|
||||
done
|
||||
|
||||
rel_dir_from_abs_path "$(dirname "${path}")"
|
||||
}
|
||||
|
||||
find_working_rel_dir_for_path() {
|
||||
local path="$1"
|
||||
local dir="$1"
|
||||
if [[ -f "${dir}" ]]; then
|
||||
dir="$(dirname "${dir}")"
|
||||
fi
|
||||
|
||||
while [[ "${dir}" == "${workspace_root}"* ]]; do
|
||||
if [[ -f "${dir}/.env" || -f "${dir}/package.json" ]]; then
|
||||
rel_dir_from_abs_path "${dir}"
|
||||
return 0
|
||||
fi
|
||||
if [[ "${dir}" == "${workspace_root}" ]]; then
|
||||
break
|
||||
fi
|
||||
dir="$(dirname "${dir}")"
|
||||
done
|
||||
|
||||
rel_dir_from_abs_path "$(dirname "${path}")"
|
||||
}
|
||||
|
||||
select_primary_node_modules() {
|
||||
local selected=""
|
||||
local fallback=""
|
||||
while IFS= read -r node_modules_dir; do
|
||||
if [[ -z "${fallback}" ]]; then
|
||||
fallback="${node_modules_dir}"
|
||||
fi
|
||||
|
||||
if [[ ! -d "${node_modules_dir}/.bun" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "${node_modules_dir}" != *"/runfiles/_main/"* ]]; then
|
||||
selected="${node_modules_dir}"
|
||||
break
|
||||
fi
|
||||
|
||||
if [[ -z "${selected}" ]]; then
|
||||
selected="${node_modules_dir}"
|
||||
fi
|
||||
done < <(find -L "${runfiles_dir}" -type d -name node_modules 2>/dev/null | sort)
|
||||
|
||||
if [[ -n "${selected}" ]]; then
|
||||
echo "${selected}"
|
||||
else
|
||||
echo "${fallback}"
|
||||
fi
|
||||
}
|
||||
|
||||
link_top_level_entries() {
|
||||
local source_root="$1"
|
||||
local destination_root="$2"
|
||||
local skipped_entry="$3"
|
||||
local entry=""
|
||||
local entry_name=""
|
||||
|
||||
shopt -s dotglob nullglob
|
||||
for entry in "${source_root}"/* "${source_root}"/.[!.]* "${source_root}"/..?*; do
|
||||
entry_name="$(basename "${entry}")"
|
||||
if [[ "${entry_name}" == "." || "${entry_name}" == ".." ]]; then
|
||||
continue
|
||||
fi
|
||||
if [[ -n "${skipped_entry}" && "${entry_name}" == "${skipped_entry}" ]]; then
|
||||
continue
|
||||
fi
|
||||
ln -s "${entry}" "${destination_root}/${entry_name}"
|
||||
done
|
||||
shopt -u dotglob nullglob
|
||||
}
|
||||
|
||||
materialize_package_path() {
|
||||
local source_root="$1"
|
||||
local destination_root="$2"
|
||||
local package_rel_dir
|
||||
package_rel_dir="$(normalize_rel_dir "$3")"
|
||||
|
||||
if [[ "${package_rel_dir}" == "." ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
local source_cursor="${source_root}"
|
||||
local destination_cursor="${destination_root}"
|
||||
local parts=()
|
||||
local current="${package_rel_dir}"
|
||||
|
||||
while [[ -n "${current}" ]]; do
|
||||
if [[ "${current}" == */* ]]; then
|
||||
parts+=("${current%%/*}")
|
||||
current="${current#*/}"
|
||||
else
|
||||
parts+=("${current}")
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
local index=0
|
||||
while [[ ${index} -lt $((${#parts[@]} - 1)) ]]; do
|
||||
local part="${parts[${index}]}"
|
||||
local next_part="${parts[$((index + 1))]}"
|
||||
source_cursor="${source_cursor}/${part}"
|
||||
destination_cursor="${destination_cursor}/${part}"
|
||||
mkdir -p "${destination_cursor}"
|
||||
|
||||
local sibling=""
|
||||
local sibling_name=""
|
||||
shopt -s dotglob nullglob
|
||||
for sibling in "${source_cursor}"/* "${source_cursor}"/.[!.]* "${source_cursor}"/..?*; do
|
||||
sibling_name="$(basename "${sibling}")"
|
||||
if [[ "${sibling_name}" == "." || "${sibling_name}" == ".." || "${sibling_name}" == "${next_part}" ]]; then
|
||||
continue
|
||||
fi
|
||||
if [[ ! -e "${destination_cursor}/${sibling_name}" ]]; then
|
||||
ln -s "${sibling}" "${destination_cursor}/${sibling_name}"
|
||||
fi
|
||||
done
|
||||
shopt -u dotglob nullglob
|
||||
index=$((index + 1))
|
||||
done
|
||||
|
||||
mkdir -p "${destination_root}/${package_rel_dir}"
|
||||
}
|
||||
|
||||
materialize_directory_entries() {
|
||||
local source_root="$1"
|
||||
local destination_root="$2"
|
||||
local entry=""
|
||||
local entry_name=""
|
||||
|
||||
mkdir -p "${destination_root}"
|
||||
shopt -s dotglob nullglob
|
||||
for entry in "${source_root}"/* "${source_root}"/.[!.]* "${source_root}"/..?*; do
|
||||
entry_name="$(basename "${entry}")"
|
||||
if [[ "${entry_name}" == "." || "${entry_name}" == ".." ]]; then
|
||||
continue
|
||||
fi
|
||||
rm -rf "${destination_root}/${entry_name}"
|
||||
ln -s "${entry}" "${destination_root}/${entry_name}"
|
||||
done
|
||||
shopt -u dotglob nullglob
|
||||
}
|
||||
|
||||
stage_workspace_view() {
|
||||
local source_root="$1"
|
||||
local destination_root="$2"
|
||||
local package_rel_dir
|
||||
package_rel_dir="$(normalize_rel_dir "$3")"
|
||||
local skipped_entry
|
||||
skipped_entry="$(first_path_component "${package_rel_dir}")"
|
||||
|
||||
link_top_level_entries "${source_root}" "${destination_root}" "${skipped_entry}"
|
||||
|
||||
if [[ "${package_rel_dir}" == "." ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
materialize_package_path "${source_root}" "${destination_root}" "${package_rel_dir}"
|
||||
materialize_directory_entries "${source_root}/${package_rel_dir}" "${destination_root}/${package_rel_dir}"
|
||||
}
|
||||
|
||||
build_workspace_package_map() {
|
||||
local root="$1"
|
||||
local out="$2"
|
||||
|
||||
python3 - "${root}" >"${out}" <<'PY'
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
root = os.path.abspath(sys.argv[1])
|
||||
|
||||
for dirpath, dirnames, filenames in os.walk(root):
|
||||
dirnames[:] = [name for name in dirnames if name != "node_modules"]
|
||||
if "package.json" not in filenames:
|
||||
continue
|
||||
|
||||
manifest_path = os.path.join(dirpath, "package.json")
|
||||
try:
|
||||
with open(manifest_path, "r", encoding="utf-8") as manifest_file:
|
||||
package_name = json.load(manifest_file).get("name")
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if not isinstance(package_name, str):
|
||||
continue
|
||||
|
||||
rel_dir = os.path.relpath(dirpath, root)
|
||||
if rel_dir == ".":
|
||||
rel_dir = "."
|
||||
print(f"{package_name}\t{rel_dir}")
|
||||
PY
|
||||
}
|
||||
|
||||
workspace_package_rel_dir_for_source() {
|
||||
local source="$1"
|
||||
local manifest_path="${source}/package.json"
|
||||
local package_name=""
|
||||
|
||||
if [[ ! -f "${manifest_path}" ]]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
package_name="$(python3 - "${manifest_path}" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
|
||||
try:
|
||||
with open(sys.argv[1], "r", encoding="utf-8") as manifest_file:
|
||||
package_name = json.load(manifest_file).get("name", "")
|
||||
except Exception:
|
||||
package_name = ""
|
||||
|
||||
if isinstance(package_name, str):
|
||||
print(package_name)
|
||||
PY
|
||||
)"
|
||||
|
||||
if [[ -z "${package_name}" ]]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
awk -F '\t' -v name="${package_name}" '$1 == name { print $2; exit }' "${workspace_package_map}"
|
||||
}
|
||||
|
||||
link_node_modules_entry() {
|
||||
local source="$1"
|
||||
local destination="$2"
|
||||
local workspace_rel_dir=""
|
||||
|
||||
rm -rf "${destination}"
|
||||
workspace_rel_dir="$(workspace_package_rel_dir_for_source "${source}" || true)"
|
||||
if [[ -n "${workspace_rel_dir}" ]]; then
|
||||
ln -s "${runtime_workspace}/${workspace_rel_dir}" "${destination}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ -L "${source}" ]]; then
|
||||
ln -s "$(readlink "${source}")" "${destination}"
|
||||
else
|
||||
ln -s "${source}" "${destination}"
|
||||
fi
|
||||
}
|
||||
|
||||
mirror_node_modules_dir() {
|
||||
local source_dir="$1"
|
||||
local destination_dir="$2"
|
||||
local entry=""
|
||||
local entry_name=""
|
||||
local scoped_entry=""
|
||||
local scoped_name=""
|
||||
|
||||
rm -rf "${destination_dir}"
|
||||
mkdir -p "${destination_dir}"
|
||||
|
||||
shopt -s dotglob nullglob
|
||||
for entry in "${source_dir}"/* "${source_dir}"/.[!.]* "${source_dir}"/..?*; do
|
||||
entry_name="$(basename "${entry}")"
|
||||
if [[ "${entry_name}" == "." || "${entry_name}" == ".." || "${entry_name}" == ".rules_bun" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ -d "${entry}" && ! -L "${entry}" && "${entry_name}" == @* ]]; then
|
||||
mkdir -p "${destination_dir}/${entry_name}"
|
||||
for scoped_entry in "${entry}"/* "${entry}"/.[!.]* "${entry}"/..?*; do
|
||||
scoped_name="$(basename "${scoped_entry}")"
|
||||
if [[ "${scoped_name}" == "." || "${scoped_name}" == ".." ]]; then
|
||||
continue
|
||||
fi
|
||||
link_node_modules_entry "${scoped_entry}" "${destination_dir}/${entry_name}/${scoped_name}"
|
||||
done
|
||||
continue
|
||||
fi
|
||||
|
||||
link_node_modules_entry "${entry}" "${destination_dir}/${entry_name}"
|
||||
done
|
||||
shopt -u dotglob nullglob
|
||||
}
|
||||
|
||||
find_install_repo_node_modules() {
|
||||
local repo_root="$1"
|
||||
local package_rel_dir
|
||||
package_rel_dir="$(normalize_rel_dir "$2")"
|
||||
|
||||
if [[ "${package_rel_dir}" != "." ]]; then
|
||||
local candidate="${package_rel_dir}"
|
||||
while true; do
|
||||
if [[ -d "${repo_root}/${candidate}/node_modules" ]]; then
|
||||
echo "${repo_root}/${candidate}/node_modules"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ "${candidate}" != */* ]]; then
|
||||
break
|
||||
fi
|
||||
candidate="${candidate%/*}"
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ -d "${repo_root}/node_modules" ]]; then
|
||||
echo "${repo_root}/node_modules"
|
||||
return 0
|
||||
fi
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
mirror_install_repo_workspace_node_modules() {
|
||||
local repo_root="$1"
|
||||
local destination_root="$2"
|
||||
|
||||
while IFS= read -r install_node_modules; do
|
||||
local rel_path="${install_node_modules#${repo_root}/}"
|
||||
local destination="${destination_root}/${rel_path}"
|
||||
|
||||
mkdir -p "$(dirname "${destination}")"
|
||||
mirror_node_modules_dir "${install_node_modules}" "${destination}"
|
||||
done < <(find "${repo_root}" \
|
||||
-path "${repo_root}/node_modules" -prune -o \
|
||||
-type d -name node_modules -print 2>/dev/null | sort)
|
||||
}
|
||||
|
||||
build_runtime_path() {
|
||||
local workspace_dir="$1"
|
||||
local package_dir="$2"
|
||||
local entries=()
|
||||
|
||||
if [[ -d "${package_dir}/node_modules/.bin" ]]; then
|
||||
entries+=("${package_dir}/node_modules/.bin")
|
||||
fi
|
||||
if [[ -d "${workspace_dir}/node_modules/.bin" && "${workspace_dir}/node_modules/.bin" != "${package_dir}/node_modules/.bin" ]]; then
|
||||
entries+=("${workspace_dir}/node_modules/.bin")
|
||||
fi
|
||||
if [[ -n "${PATH:-}" ]]; then
|
||||
entries+=("${PATH}")
|
||||
fi
|
||||
|
||||
if [[ ${#entries[@]} -eq 0 ]]; then
|
||||
echo ""
|
||||
return 0
|
||||
fi
|
||||
|
||||
local path_value=""
|
||||
local entry=""
|
||||
for entry in "${entries[@]}"; do
|
||||
if [[ -z "${path_value}" ]]; then
|
||||
path_value="${entry}"
|
||||
else
|
||||
path_value="${path_value}:${entry}"
|
||||
fi
|
||||
done
|
||||
echo "${path_value}"
|
||||
}
|
||||
|
||||
resolve_package_rel_dir() {
|
||||
if [[ -n "${package_rel_dir_hint}" && "${package_rel_dir_hint}" != "." ]]; then
|
||||
normalize_rel_dir "${package_rel_dir_hint}"
|
||||
return 0
|
||||
fi
|
||||
if [[ -n "${package_json}" ]]; then
|
||||
find_package_rel_dir_for_path "${package_json}"
|
||||
return 0
|
||||
fi
|
||||
if [[ -n "${primary_source}" ]]; then
|
||||
find_package_rel_dir_for_path "${primary_source}"
|
||||
return 0
|
||||
fi
|
||||
echo "."
|
||||
}
|
||||
|
||||
resolve_execution_rel_dir() {
|
||||
local package_rel_dir="$1"
|
||||
case "${working_dir_mode}" in
|
||||
workspace)
|
||||
echo "."
|
||||
;;
|
||||
package)
|
||||
echo "${package_rel_dir}"
|
||||
;;
|
||||
entry_point)
|
||||
if [[ -n "${primary_source}" ]]; then
|
||||
find_working_rel_dir_for_path "${primary_source}"
|
||||
else
|
||||
echo "${package_rel_dir}"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo "${package_rel_dir}"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
package_rel_dir="$(resolve_package_rel_dir)"
|
||||
execution_rel_dir="$(resolve_execution_rel_dir "${package_rel_dir}")"
|
||||
|
||||
runtime_workspace="$(mktemp -d)"
|
||||
cleanup_runtime_workspace() {
|
||||
rm -rf "${runtime_workspace}"
|
||||
}
|
||||
|
||||
stage_workspace_view "${workspace_root}" "${runtime_workspace}" "${package_rel_dir}"
|
||||
runtime_package_dir="${runtime_workspace}"
|
||||
if [[ "${package_rel_dir}" != "." ]]; then
|
||||
runtime_package_dir="${runtime_workspace}/${package_rel_dir}"
|
||||
fi
|
||||
runtime_exec_dir="${runtime_workspace}"
|
||||
if [[ "${execution_rel_dir}" != "." ]]; then
|
||||
runtime_exec_dir="${runtime_workspace}/${execution_rel_dir}"
|
||||
fi
|
||||
|
||||
workspace_package_map="${runtime_workspace}/.rules_bun_workspace_packages.tsv"
|
||||
build_workspace_package_map "${runtime_workspace}" "${workspace_package_map}"
|
||||
|
||||
primary_node_modules="$(select_primary_node_modules)"
|
||||
install_repo_root=""
|
||||
if [[ -n "${primary_node_modules}" ]]; then
|
||||
install_repo_root="$(dirname "${primary_node_modules}")"
|
||||
mirror_node_modules_dir "${primary_node_modules}" "${runtime_workspace}/node_modules"
|
||||
fi
|
||||
|
||||
if [[ -n "${install_repo_root}" ]]; then
|
||||
resolved_install_node_modules="$(find_install_repo_node_modules "${install_repo_root}" "${package_rel_dir}" || true)"
|
||||
if [[ -n "${resolved_install_node_modules}" && "${resolved_install_node_modules}" != "${install_repo_root}/node_modules" ]]; then
|
||||
mirror_node_modules_dir "${resolved_install_node_modules}" "${runtime_package_dir}/node_modules"
|
||||
fi
|
||||
mirror_install_repo_workspace_node_modules "${install_repo_root}" "${runtime_workspace}"
|
||||
fi
|
||||
|
||||
if [[ ! -e "${runtime_package_dir}/node_modules" && -e "${runtime_workspace}/node_modules" && "${runtime_package_dir}" != "${runtime_workspace}" ]]; then
|
||||
ln -s "${runtime_workspace}/node_modules" "${runtime_package_dir}/node_modules"
|
||||
fi
|
||||
|
||||
runtime_path="$(build_runtime_path "${runtime_workspace}" "${runtime_package_dir}")"
|
||||
if [[ -n "${runtime_path}" ]]; then
|
||||
export PATH="${runtime_path}"
|
||||
fi
|
||||
"""
|
||||
|
||||
def _shell_quote(value):
|
||||
return "'" + value.replace("'", "'\"'\"'") + "'"
|
||||
|
||||
def _dirname(path):
|
||||
if not path or path == ".":
|
||||
return "."
|
||||
|
||||
index = path.rfind("/")
|
||||
if index < 0:
|
||||
return "."
|
||||
if index == 0:
|
||||
return "/"
|
||||
return path[:index]
|
||||
|
||||
def find_install_metadata_file(files):
|
||||
for file in files:
|
||||
if file.short_path.endswith("node_modules/.rules_bun/install.json"):
|
||||
return file
|
||||
return None
|
||||
|
||||
def resolve_node_modules_roots(files, workspace_dir = ""):
|
||||
install_metadata_file = find_install_metadata_file(files)
|
||||
shared_node_modules_root = None
|
||||
workspace_node_modules_root = None
|
||||
|
||||
if install_metadata_file:
|
||||
shared_node_modules_root = _dirname(_dirname(install_metadata_file.path))
|
||||
|
||||
workspace_marker = ""
|
||||
if workspace_dir:
|
||||
workspace_marker = "/%s/node_modules/" % workspace_dir.strip("/")
|
||||
|
||||
shortest_path = None
|
||||
for src in files:
|
||||
if workspace_marker and workspace_marker in src.path and workspace_node_modules_root == None:
|
||||
workspace_node_modules_root = src.path[:src.path.find(workspace_marker) + len(workspace_marker) - 1]
|
||||
if shortest_path == None or len(src.path) < len(shortest_path):
|
||||
shortest_path = src.path
|
||||
|
||||
if shared_node_modules_root == None and shortest_path:
|
||||
marker = "/node_modules/"
|
||||
marker_index = shortest_path.find(marker)
|
||||
if marker_index >= 0:
|
||||
shared_node_modules_root = shortest_path[:marker_index + len("/node_modules")]
|
||||
|
||||
return struct(
|
||||
install_metadata_file = install_metadata_file,
|
||||
node_modules_root = workspace_node_modules_root or shared_node_modules_root,
|
||||
shared_node_modules_root = shared_node_modules_root,
|
||||
)
|
||||
|
||||
def create_bun_workspace_info(ctx, primary_file = None, package_json = None, package_dir_hint = ".", extra_files = None):
|
||||
direct_runtime_files = []
|
||||
if primary_file:
|
||||
direct_runtime_files.append(primary_file)
|
||||
if package_json and package_json != primary_file:
|
||||
direct_runtime_files.append(package_json)
|
||||
direct_runtime_files.extend(extra_files or [])
|
||||
|
||||
node_modules_files = depset()
|
||||
install_metadata_file = None
|
||||
if getattr(ctx.attr, "node_modules", None):
|
||||
node_modules_files = ctx.attr.node_modules[DefaultInfo].files
|
||||
install_metadata_file = find_install_metadata_file(node_modules_files.to_list())
|
||||
|
||||
metadata_file = ctx.actions.declare_file(ctx.label.name + ".bun_workspace.json")
|
||||
ctx.actions.write(
|
||||
output = metadata_file,
|
||||
content = json.encode({
|
||||
"install_metadata": install_metadata_file.short_path if install_metadata_file else "",
|
||||
"package_dir_hint": package_dir_hint or ".",
|
||||
"package_json": package_json.short_path if package_json else "",
|
||||
"primary_file": primary_file.short_path if primary_file else "",
|
||||
}) + "\n",
|
||||
)
|
||||
direct_runtime_files.append(metadata_file)
|
||||
|
||||
runtime_files = depset(
|
||||
direct = direct_runtime_files,
|
||||
transitive = [node_modules_files],
|
||||
)
|
||||
|
||||
return BunWorkspaceInfo(
|
||||
install_metadata_file = install_metadata_file,
|
||||
metadata_file = metadata_file,
|
||||
node_modules_files = node_modules_files,
|
||||
package_dir_hint = package_dir_hint or ".",
|
||||
package_json = package_json,
|
||||
primary_file = primary_file,
|
||||
runtime_files = runtime_files,
|
||||
)
|
||||
|
||||
def workspace_runfiles(ctx, workspace_info, direct_files = None, transitive_files = None):
|
||||
return ctx.runfiles(
|
||||
files = direct_files or [],
|
||||
transitive_files = depset(
|
||||
transitive = [workspace_info.runtime_files] + (transitive_files or []),
|
||||
),
|
||||
)
|
||||
|
||||
def render_workspace_setup(
|
||||
bun_short_path,
|
||||
working_dir_mode,
|
||||
primary_source_short_path = "",
|
||||
package_json_short_path = "",
|
||||
package_dir_hint = "."):
|
||||
return _WORKSPACE_SETUP_TEMPLATE.replace("__BUN_SHORT_PATH__", bun_short_path).replace(
|
||||
"__PRIMARY_SOURCE_SHORT_PATH__",
|
||||
primary_source_short_path,
|
||||
).replace(
|
||||
"__PACKAGE_JSON_SHORT_PATH__",
|
||||
package_json_short_path,
|
||||
).replace(
|
||||
"__PACKAGE_DIR_HINT__",
|
||||
package_dir_hint or ".",
|
||||
).replace(
|
||||
"__WORKING_DIR_MODE__",
|
||||
working_dir_mode,
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user