feat: improve rules_js parity

This commit is contained in:
eric
2026-03-14 23:50:26 +01:00
parent d7a6d6b0ba
commit c446f23a35
36 changed files with 1683 additions and 639 deletions

2
MODULE.bazel.lock generated
View File

@@ -192,7 +192,7 @@
"moduleExtensions": {
"//bun:extensions.bzl%bun": {
"general": {
"bzlTransitiveDigest": "eSFVebwDN61an1dp3505njvMKN961HH+iY2tK6fEBQQ=",
"bzlTransitiveDigest": "mWoMIEcKvXURFuMv68yk2TPrpNykLSLygedENme3WrQ=",
"usagesDigest": "/0BcCMA6AOzLhQaRK6DquxrCfpPHJUjSUaFz4zmQrsM=",
"recordedInputs": [
"REPO_MAPPING:,bazel_tools bazel_tools"

View File

@@ -33,6 +33,9 @@ The public entrypoint for rule authors and users is `@rules_bun//bun:defs.bzl`.
- `bun_dev`
- `bun_script`
- `bun_test`
- `js_binary`
- `js_test`
- `js_run_devserver`
- `js_library`
- `ts_library`
@@ -105,6 +108,36 @@ bun_install_ext.install(
use_repo(bun_install_ext, "bun_deps")
```
## `rules_js` compatibility layer
`rules_bun` now exposes a Bun-backed compatibility layer for the most common
`rules_js` entrypoints:
- `@rules_bun//js:defs.bzl` exports `js_binary`, `js_test`, `js_run_devserver`,
`js_library`, `ts_library`, and `JsInfo`.
- `@rules_bun//npm:extensions.bzl` exports `npm_translate_lock`, which creates a
Bun-installed external repo and generates `@<repo>//:defs.bzl` with
`npm_link_all_packages()`.
Example:
```starlark
load("@rules_bun//js:defs.bzl", "js_binary")
load("@npm//:defs.bzl", "npm_link_all_packages")
npm_link_all_packages()
js_binary(
name = "app",
entry_point = "src/main.ts",
node_modules = ":node_modules",
)
```
This is a compatibility subset, not a full reimplementation of `rules_js`.
Package aliases created by `npm_link_all_packages()` use sanitized target names
such as `npm__vite` or `npm__at_types_node`.
## Legacy WORKSPACE usage
For non-Bzlmod consumers, the repository exposes a legacy setup macro in

View File

@@ -40,6 +40,7 @@ bzl_library(
"//internal:bun_dev_bzl",
"//internal:bun_script_bzl",
"//internal:bun_test_bzl",
"//internal:js_compat_bzl",
"//internal:js_library_bzl",
],
)

View File

@@ -4,6 +4,7 @@ load("//internal:bun_bundle.bzl", _bun_bundle = "bun_bundle")
load("//internal:bun_dev.bzl", _bun_dev = "bun_dev")
load("//internal:bun_script.bzl", _bun_script = "bun_script")
load("//internal:bun_test.bzl", _bun_test = "bun_test")
load("//internal:js_compat.bzl", _JsInfo = "JsInfo", _js_binary = "js_binary", _js_run_devserver = "js_run_devserver", _js_test = "js_test")
load("//internal:js_library.bzl", _js_library = "js_library", _ts_library = "ts_library")
load(":toolchain.bzl", _BunToolchainInfo = "BunToolchainInfo", _bun_toolchain = "bun_toolchain")
@@ -14,8 +15,12 @@ bun_bundle = _bun_bundle
bun_dev = _bun_dev
bun_script = _bun_script
bun_test = _bun_test
js_binary = _js_binary
js_test = _js_test
js_run_devserver = _js_run_devserver
js_library = _js_library
ts_library = _ts_library
JsInfo = _JsInfo
BunToolchainInfo = _BunToolchainInfo
bun_toolchain = _bun_toolchain

View File

@@ -75,6 +75,7 @@ def _bun_install_impl(ctx):
bun_lockfile = install.bun_lockfile,
install_inputs = install.install_inputs,
isolated_home = install.isolated_home,
visible_repo_name = install.name,
)

View File

@@ -12,6 +12,9 @@ stardoc(
"bun_dev",
"bun_script",
"bun_test",
"js_binary",
"js_run_devserver",
"js_test",
"js_library",
"ts_library",
],

View File

@@ -14,6 +14,7 @@ Unlike the build rules in [rules.md](rules.md), `bun_install` is not loaded from
- runs `bun install --frozen-lockfile`
- uses your checked-in `package.json` and `bun.lock` or `bun.lockb`
- creates an external Bazel repository exposing `:node_modules`
- generates `:defs.bzl` with `npm_link_all_packages()` and `package_target_name()`
- keeps dependency installation under Bun rather than npm
The generated repository can then be passed to rules such as `bun_script`,

View File

@@ -2,6 +2,46 @@
This file documents the public rules exported from `@rules_bun//bun:defs.bzl`.
## js_binary
Runs a JS/TS entry point with Bun behind a `rules_js`-style name.
Attributes:
- `entry_point` (label, required): path to the main JS/TS file to execute.
- `node_modules` (label, optional): package files from a `node_modules` tree, typically produced by `bun_install` or `npm_translate_lock`, made available in runfiles.
- `data` (label_list, optional): additional runtime files.
- `deps` (label_list, optional): library dependencies required by the program.
- `args` (string_list, optional): default arguments appended before command-line arguments passed to the binary.
- `working_dir` (string, default: `"workspace"`, values: `"workspace" | "entry_point"`): runtime working directory.
## js_test
Runs Bun tests behind a `rules_js`-style name.
Attributes:
- `srcs` (label_list, required): test source files passed to `bun test`.
- `node_modules` (label, optional): package files from a `node_modules` tree, typically produced by `bun_install` or `npm_translate_lock`, made available in runfiles.
- `deps` (label_list, optional): library dependencies required by tests.
- `data` (label_list, optional): additional runtime files needed by tests.
- `args` (string_list, optional): default arguments appended after the test source list.
## js_run_devserver
Runs an executable target from a staged JS workspace.
Attributes:
- `tool` (label, required): executable target to launch as the dev server.
- `args` (string_list, optional): default arguments appended before command-line arguments passed to the dev server.
- `package_json` (label, optional): package manifest used to resolve the package working directory.
- `package_dir_hint` (string, default: `"."`): package-relative directory hint when `package_json` is omitted.
- `node_modules` (label, optional): package files from a `node_modules` tree, typically produced by `bun_install` or `npm_translate_lock`, made available in runfiles.
- `deps` (label_list, optional): library dependencies required by the dev server.
- `data` (label_list, optional): additional runtime files.
- `working_dir` (string, default: `"workspace"`, values: `"workspace" | "package"`): runtime working directory.
## bun_binary
Runs a JS/TS entry point with Bun as an executable target (`bazel run`).
@@ -11,6 +51,8 @@ Attributes:
- `entry_point` (label, required): path to the main JS/TS file to execute.
- `node_modules` (label, optional): package files from a `node_modules` tree, typically produced by `bun_install`, made available in runfiles.
- `data` (label_list, optional): additional runtime files.
- `deps` (label_list, optional): library dependencies required by the program.
- `args` (string_list, optional): default arguments appended before command-line arguments passed to the binary.
- `working_dir` (string, default: `"workspace"`, values: `"workspace" | "entry_point"`): runtime working directory.
## bun_dev
@@ -68,6 +110,7 @@ Attributes:
- `node_modules` (label, optional): package files from a `node_modules` tree, typically produced by `bun_install`, made available in runfiles.
- `deps` (label_list, optional): library dependencies required by tests.
- `data` (label_list, optional): additional runtime files needed by tests.
- `args` (string_list, optional): default arguments appended after the test source list.
## js_library
@@ -76,6 +119,8 @@ Aggregates JavaScript sources and transitive Bun source dependencies.
Attributes:
- `srcs` (label_list, optional): `.js`, `.jsx`, `.mjs`, `.cjs` files.
- `types` (label_list, optional): `.d.ts` files propagated to dependents.
- `data` (label_list, optional): runtime files propagated to dependents.
- `deps` (label_list, optional): dependent source libraries.
## ts_library
@@ -85,4 +130,6 @@ Aggregates TypeScript sources and transitive Bun source dependencies.
Attributes:
- `srcs` (label_list, optional): `.ts`, `.tsx` files.
- `types` (label_list, optional): `.d.ts` files propagated to dependents.
- `data` (label_list, optional): runtime files propagated to dependents.
- `deps` (label_list, optional): dependent source libraries.

80
flake.lock generated
View File

@@ -1,28 +1,5 @@
{
"nodes": {
"devshell-lib": {
"inputs": {
"git-hooks": "git-hooks",
"nixpkgs": [
"nixpkgs"
],
"treefmt-nix": "treefmt-nix"
},
"locked": {
"lastModified": 1772815059,
"narHash": "sha256-9Mn8t/a7b43omtmKRsF0HmFpCkNpTsvYEq0y85KLL5s=",
"ref": "v2.0.1",
"rev": "80cc529de7060e079d89a69d8daaf0347b53d8f9",
"revCount": 43,
"type": "git",
"url": "https://git.dgren.dev/eric/nix-flake-lib"
},
"original": {
"ref": "v2.0.1",
"type": "git",
"url": "https://git.dgren.dev/eric/nix-flake-lib"
}
},
"flake-compat": {
"flake": false,
"locked": {
@@ -43,7 +20,7 @@
"inputs": {
"flake-compat": "flake-compat",
"gitignore": "gitignore",
"nixpkgs": "nixpkgs"
"nixpkgs": "nixpkgs_2"
},
"locked": {
"lastModified": 1772024342,
@@ -62,7 +39,7 @@
"gitignore": {
"inputs": {
"nixpkgs": [
"devshell-lib",
"repo-lib",
"git-hooks",
"nixpkgs"
]
@@ -82,6 +59,22 @@
}
},
"nixpkgs": {
"locked": {
"lastModified": 1772542754,
"narHash": "sha256-WGV2hy+VIeQsYXpsLjdr4GvHv5eECMISX1zKLTedhdg=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "8c809a146a140c5c8806f13399592dbcb1bb5dc4",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1770073757,
"narHash": "sha256-Vy+G+F+3E/Tl+GMNgiHl9Pah2DgShmIUBJXmbiQPHbI=",
@@ -97,7 +90,7 @@
"type": "github"
}
},
"nixpkgs_2": {
"nixpkgs_3": {
"locked": {
"lastModified": 1770107345,
"narHash": "sha256-tbS0Ebx2PiA1FRW8mt8oejR0qMXmziJmPaU1d4kYY9g=",
@@ -113,31 +106,38 @@
"type": "github"
}
},
"nixpkgs_3": {
"repo-lib": {
"inputs": {
"git-hooks": "git-hooks",
"nixpkgs": [
"nixpkgs"
],
"treefmt-nix": "treefmt-nix"
},
"locked": {
"lastModified": 1772542754,
"narHash": "sha256-WGV2hy+VIeQsYXpsLjdr4GvHv5eECMISX1zKLTedhdg=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "8c809a146a140c5c8806f13399592dbcb1bb5dc4",
"type": "github"
"lastModified": 1772866275,
"narHash": "sha256-lsJrFIbq6OO5wUC648VnvOmJm3qgJrlEugbdjeZsP34=",
"ref": "refs/tags/v3.0.0",
"rev": "96d2d190466dddcb9e652c38b70152f09b9fcb05",
"revCount": 50,
"type": "git",
"url": "https://git.dgren.dev/eric/nix-flake-lib"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
"ref": "refs/tags/v3.0.0",
"type": "git",
"url": "https://git.dgren.dev/eric/nix-flake-lib"
}
},
"root": {
"inputs": {
"devshell-lib": "devshell-lib",
"nixpkgs": "nixpkgs_3"
"nixpkgs": "nixpkgs",
"repo-lib": "repo-lib"
}
},
"treefmt-nix": {
"inputs": {
"nixpkgs": "nixpkgs_2"
"nixpkgs": "nixpkgs_3"
},
"locked": {
"lastModified": 1770228511,

188
flake.nix
View File

@@ -3,58 +3,34 @@
inputs = {
nixpkgs.url = "github:nixos/nixpkgs?ref=nixos-unstable";
devshell-lib.url = "git+https://git.dgren.dev/eric/nix-flake-lib?ref=v2.0.1";
devshell-lib.inputs.nixpkgs.follows = "nixpkgs";
repo-lib.url = "git+https://git.dgren.dev/eric/nix-flake-lib?ref=refs/tags/v3.0.0";
repo-lib.inputs.nixpkgs.follows = "nixpkgs";
};
outputs =
{
self,
nixpkgs,
devshell-lib,
repo-lib,
...
}:
let
supportedSystems = [
"x86_64-linux"
"aarch64-linux"
"x86_64-darwin"
"aarch64-darwin"
];
forAllSystems = nixpkgs.lib.genAttrs supportedSystems;
bazelVersion = "9.0.0";
in
{
devShells = forAllSystems (
system:
let
pkgs = import nixpkgs { inherit system; };
bazel9 = pkgs.writeShellScriptBin "bazel" ''
export USE_BAZEL_VERSION="''${USE_BAZEL_VERSION:-9.0.0}"
exec ${pkgs.bazelisk}/bin/bazelisk "$@"
repo-lib.lib.mkRepo {
inherit self nixpkgs;
src = ./.;
config = {
shell.extraShellText = ''
export USE_BAZEL_VERSION="''${USE_BAZEL_VERSION:-${bazelVersion}}"
export BUN_INSTALL="''${BUN_INSTALL:-$HOME/.bun}"
export PATH="$BUN_INSTALL/bin:$PATH"
'';
env = devshell-lib.lib.mkDevShell {
inherit system;
extraPackages = with pkgs; [
go
gopls
gotools
bun
bazel9
bazel-buildtools
self.packages.${system}.release
];
features = {
oxfmt = false;
};
formatters = {
shfmt.enable = true;
};
formatterSettings = {
shfmt.options = [
formatting = {
programs.shfmt.enable = true;
settings.shfmt.options = [
"-i"
"2"
"-s"
@@ -62,84 +38,15 @@
];
};
additionalHooks = {
tests = {
enable = true;
entry = ''
bazel test //tests/...
'';
pass_filenames = false;
stages = [ "pre-push" ];
};
};
tools = [
release = {
steps = [
{
name = "Bun";
bin = "${pkgs.bun}/bin/bun";
versionCmd = "--version";
color = "YELLOW";
}
{
name = "Go";
bin = "${pkgs.go}/bin/go";
versionCmd = "version";
color = "CYAN";
}
{
name = "Bazel";
bin = "${bazel9}/bin/bazel";
versionCmd = "--version";
color = "GREEN";
}
];
extraShellHook = ''
export USE_BAZEL_VERSION="''${USE_BAZEL_VERSION:-9.0.0}"
export BUN_INSTALL="''${BUN_INSTALL:-$HOME/.bun}"
export PATH="$BUN_INSTALL/bin:$PATH"
'';
};
in
{
default = env.shell;
}
);
checks = forAllSystems (
system:
let
env = devshell-lib.lib.mkDevShell { inherit system; };
in
{
inherit (env) pre-commit-check;
}
);
formatter = forAllSystems (system: (devshell-lib.lib.mkDevShell { inherit system; }).formatter);
# Optional: release command (`release`)
#
# The release script always updates VERSION first, then:
# 1) runs release steps in order (file writes and scripts)
# 2) runs postVersion hook
# 3) formats, stages, commits, tags, and pushes
#
# Runtime env vars available in release.run/postVersion:
# BASE_VERSION, CHANNEL, PRERELEASE_NUM, FULL_VERSION, FULL_TAG
#
packages = forAllSystems (system: {
release = devshell-lib.lib.mkRelease {
inherit system;
release = [
{
run = ''
run.script = ''
sed -E -i 's#^([[:space:]]*version[[:space:]]*=[[:space:]]*")[^"]*(",)$#\1'"$FULL_VERSION"'\2#' "$ROOT_DIR/MODULE.bazel"
'';
}
{
run = ''
run.script = ''
README="$ROOT_DIR/README.md"
TMP="$README.tmp"
@@ -174,8 +81,59 @@
echo "Released $FULL_TAG"
'';
};
});
};
perSystem =
{
pkgs,
system,
...
}:
let
bazel9 = pkgs.writeShellScriptBin "bazel" ''
export USE_BAZEL_VERSION="''${USE_BAZEL_VERSION:-${bazelVersion}}"
exec ${pkgs.bazelisk}/bin/bazelisk "$@"
'';
in
{
tools = [
(repo-lib.lib.tools.fromPackage {
name = "Bun";
package = pkgs.bun;
version.args = [ "--version" ];
banner.color = "YELLOW";
})
(repo-lib.lib.tools.fromPackage {
name = "Go";
package = pkgs.go;
version.args = [ "version" ];
banner.color = "CYAN";
})
(repo-lib.lib.tools.fromPackage {
name = "Bazel";
package = bazel9;
version.args = [ "--version" ];
banner.color = "GREEN";
})
];
shell.packages = [
pkgs.gopls
pkgs.gotools
pkgs.bazel-buildtools
self.packages.${system}.release
];
checks.tests = {
command = "bazel test //tests/...";
stage = "pre-push";
passFilenames = false;
runtimeInputs = [
bazel9
pkgs.bun
pkgs.go
];
};
};
};
}

View File

@@ -9,7 +9,10 @@ exports_files([
"bun_install.bzl",
"bun_script.bzl",
"bun_test.bzl",
"js_compat.bzl",
"js_library.bzl",
"js_run_devserver.bzl",
"workspace.bzl",
])
bzl_library(
@@ -44,7 +47,32 @@ bzl_library(
deps = [":js_library_bzl"],
)
bzl_library(
name = "js_compat_bzl",
srcs = ["js_compat.bzl"],
deps = [
":bun_binary_bzl",
":bun_test_bzl",
":js_library_bzl",
":js_run_devserver_bzl",
],
)
bzl_library(
name = "js_library_bzl",
srcs = ["js_library.bzl"],
)
bzl_library(
name = "js_run_devserver_bzl",
srcs = ["js_run_devserver.bzl"],
deps = [
":js_library_bzl",
":workspace_bzl",
],
)
bzl_library(
name = "workspace_bzl",
srcs = ["workspace.bzl"],
)

View File

@@ -1,67 +1,55 @@
"""Rule for running JS/TS scripts with Bun."""
load("//internal:js_library.bzl", "collect_js_runfiles")
load("//internal:workspace.bzl", "create_bun_workspace_info", "render_workspace_setup", "workspace_runfiles")
def _shell_quote(value):
return "'" + value.replace("'", "'\"'\"'") + "'"
def _bun_binary_impl(ctx):
toolchain = ctx.toolchains["//bun:toolchain_type"]
bun_bin = toolchain.bun.bun_bin
entry_point = ctx.file.entry_point
dep_runfiles = [collect_js_runfiles(dep) for dep in ctx.attr.deps]
workspace_info = create_bun_workspace_info(
ctx,
extra_files = ctx.files.data + [bun_bin],
primary_file = entry_point,
)
command = """
trap cleanup_runtime_workspace EXIT
cd "${runtime_exec_dir}"
exec "${bun_bin}" --bun run "${primary_source}" "$@"
"""
if ctx.attr.args:
command = """
trap cleanup_runtime_workspace EXIT
cd "${runtime_exec_dir}"
exec "${bun_bin}" --bun run "${primary_source}" __DEFAULT_ARGS__ "$@"
""".replace("__DEFAULT_ARGS__", " ".join([_shell_quote(arg) for arg in ctx.attr.args]))
launcher = ctx.actions.declare_file(ctx.label.name)
ctx.actions.write(
output = launcher,
is_executable = True,
content = """#!/usr/bin/env bash
set -euo pipefail
runfiles_dir="${{RUNFILES_DIR:-$0.runfiles}}"
workspace_root="${{runfiles_dir}}/_main"
bun_bin="${{runfiles_dir}}/_main/{bun_short_path}"
entry_point="${{runfiles_dir}}/_main/{entry_short_path}"
resolve_entrypoint_workdir() {{
local dir
dir="$(dirname "${{entry_point}}")"
while [[ "${{dir}}" == "${{workspace_root}}"* ]]; do
if [[ -f "${{dir}}/.env" || -f "${{dir}}/package.json" ]]; then
echo "${{dir}}"
return 0
fi
if [[ "${{dir}}" == "${{workspace_root}}" ]]; then
break
fi
dir="$(dirname "${{dir}}")"
done
echo "$(dirname "${{entry_point}}")"
}}
working_dir="{working_dir}"
if [[ "${{working_dir}}" == "entry_point" ]]; then
cd "$(resolve_entrypoint_workdir)"
else
cd "${{workspace_root}}"
fi
exec "${{bun_bin}}" --bun run "${{entry_point}}" "$@"
""".format(
content = render_workspace_setup(
bun_short_path = bun_bin.short_path,
entry_short_path = entry_point.short_path,
working_dir = ctx.attr.working_dir,
),
)
transitive_files = []
if ctx.attr.node_modules:
transitive_files.append(ctx.attr.node_modules[DefaultInfo].files)
runfiles = ctx.runfiles(
files = [bun_bin, entry_point] + ctx.files.data,
transitive_files = depset(transitive = transitive_files),
primary_source_short_path = entry_point.short_path,
working_dir_mode = ctx.attr.working_dir,
) + command,
)
return [
workspace_info,
DefaultInfo(
executable = launcher,
runfiles = runfiles,
runfiles = workspace_runfiles(
ctx,
workspace_info,
direct_files = [launcher],
transitive_files = dep_runfiles,
),
),
]
@@ -85,6 +73,9 @@ Use this rule for non-test scripts and CLIs that should run via `bazel run`.
allow_files = True,
doc = "Additional runtime files required by the program.",
),
"deps": attr.label_list(
doc = "Library dependencies required by the program.",
),
"working_dir": attr.string(
default = "workspace",
values = ["workspace", "entry_point"],

View File

@@ -1,6 +1,6 @@
"""Rule for bundling JS/TS sources with Bun."""
load("//internal:js_library.bzl", "BunSourcesInfo")
load("//internal:js_library.bzl", "collect_js_sources")
def _output_name(target_name, entry):
@@ -16,10 +16,7 @@ def _bun_bundle_impl(ctx):
if ctx.attr.node_modules:
transitive_inputs.append(ctx.attr.node_modules[DefaultInfo].files)
for dep in ctx.attr.deps:
if BunSourcesInfo in dep:
transitive_inputs.append(dep[BunSourcesInfo].transitive_sources)
else:
transitive_inputs.append(dep[DefaultInfo].files)
transitive_inputs.append(collect_js_sources(dep))
outputs = []
for entry in ctx.files.entry_points:

View File

@@ -1,101 +1,76 @@
"""Rule for running JS/TS scripts with Bun in watch mode for development."""
load("//internal:workspace.bzl", "create_bun_workspace_info", "render_workspace_setup", "workspace_runfiles")
def _bun_dev_impl(ctx):
toolchain = ctx.toolchains["//bun:toolchain_type"]
bun_bin = toolchain.bun.bun_bin
entry_point = ctx.file.entry_point
workspace_info = create_bun_workspace_info(
ctx,
extra_files = ctx.files.data + ctx.files.restart_on + [bun_bin],
primary_file = entry_point,
)
restart_watch_paths = "\n".join([path.short_path for path in ctx.files.restart_on])
launcher = ctx.actions.declare_file(ctx.label.name)
ctx.actions.write(
output = launcher,
is_executable = True,
content = """#!/usr/bin/env bash
set -euo pipefail
runfiles_dir="${{RUNFILES_DIR:-$0.runfiles}}"
workspace_root="${{runfiles_dir}}/_main"
bun_bin="${{runfiles_dir}}/_main/{bun_short_path}"
entry_point="${{runfiles_dir}}/_main/{entry_short_path}"
resolve_entrypoint_workdir() {{
local dir
dir="$(dirname "${{entry_point}}")"
while [[ "${{dir}}" == "${{workspace_root}}"* ]]; do
if [[ -f "${{dir}}/.env" || -f "${{dir}}/package.json" ]]; then
echo "${{dir}}"
return 0
fi
if [[ "${{dir}}" == "${{workspace_root}}" ]]; then
break
fi
dir="$(dirname "${{dir}}")"
done
echo "$(dirname "${{entry_point}}")"
}}
working_dir="{working_dir}"
if [[ "${{working_dir}}" == "entry_point" ]]; then
cd "$(resolve_entrypoint_workdir)"
else
cd "${{workspace_root}}"
fi
watch_mode="{watch_mode}"
if [[ "${{watch_mode}}" == "hot" ]]; then
command = """
watch_mode="__WATCH_MODE__"
if [[ "${watch_mode}" == "hot" ]]; then
dev_flag="--hot"
else
dev_flag="--watch"
fi
run_dev() {{
exec "${{bun_bin}}" --bun "${{dev_flag}}" run "${{entry_point}}" "$@"
}}
if [[ {restart_count} -eq 0 ]]; then
run_dev "$@"
if [[ __RESTART_COUNT__ -eq 0 ]]; then
trap cleanup_runtime_workspace EXIT
cd "${runtime_exec_dir}"
exec "${bun_bin}" --bun "${dev_flag}" run "${primary_source}" "$@"
fi
readarray -t restart_paths <<'EOF_RESTART_PATHS'
{restart_watch_paths}
__RESTART_PATHS__
EOF_RESTART_PATHS
file_mtime() {{
local p="$1"
if stat -f '%m' "${{p}}" >/dev/null 2>&1; then
stat -f '%m' "${{p}}"
file_mtime() {
local path="$1"
if stat -f '%m' "${path}" >/dev/null 2>&1; then
stat -f '%m' "${path}"
return 0
fi
stat -c '%Y' "${{p}}"
}}
stat -c '%Y' "${path}"
}
declare -A mtimes
for rel in "${{restart_paths[@]}}"; do
path="${{runfiles_dir}}/_main/${{rel}}"
if [[ -e "${{path}}" ]]; then
mtimes["${{rel}}"]="$(file_mtime "${{path}}")"
for rel in "${restart_paths[@]}"; do
path="${runfiles_dir}/_main/${rel}"
if [[ -e "${path}" ]]; then
mtimes["${rel}"]="$(file_mtime "${path}")"
else
mtimes["${{rel}}"]="missing"
mtimes["${rel}"]="missing"
fi
done
child_pid=""
restart_child() {{
if [[ -n "${{child_pid}}" ]] && kill -0 "${{child_pid}}" 2>/dev/null; then
kill "${{child_pid}}"
wait "${{child_pid}}" || true
restart_child() {
if [[ -n "${child_pid}" ]] && kill -0 "${child_pid}" 2>/dev/null; then
kill "${child_pid}"
wait "${child_pid}" || true
fi
"${{bun_bin}}" --bun "${{dev_flag}}" run "${{entry_point}}" "$@" &
child_pid=$!
}}
cleanup() {{
if [[ -n "${{child_pid}}" ]] && kill -0 "${{child_pid}}" 2>/dev/null; then
kill "${{child_pid}}"
wait "${{child_pid}}" || true
(
cd "${runtime_exec_dir}"
exec "${bun_bin}" --bun "${dev_flag}" run "${primary_source}" "$@"
) &
child_pid=$!
}
cleanup() {
if [[ -n "${child_pid}" ]] && kill -0 "${child_pid}" 2>/dev/null; then
kill "${child_pid}"
wait "${child_pid}" || true
fi
}}
cleanup_runtime_workspace
}
trap cleanup EXIT INT TERM
@@ -104,45 +79,46 @@ restart_child "$@"
while true; do
sleep 1
changed=0
for rel in "${{restart_paths[@]}}"; do
path="${{runfiles_dir}}/_main/${{rel}}"
if [[ -e "${{path}}" ]]; then
current="$(file_mtime "${{path}}")"
for rel in "${restart_paths[@]}"; do
path="${runfiles_dir}/_main/${rel}"
if [[ -e "${path}" ]]; then
current="$(file_mtime "${path}")"
else
current="missing"
fi
if [[ "${{current}}" != "${{mtimes[${{rel}}]}}" ]]; then
mtimes["${{rel}}"]="${{current}}"
if [[ "${current}" != "${mtimes[${rel}]}" ]]; then
mtimes["${rel}"]="${current}"
changed=1
fi
done
if [[ "${{changed}}" -eq 1 ]]; then
if [[ "${changed}" -eq 1 ]]; then
restart_child "$@"
fi
done
""".format(
bun_short_path = bun_bin.short_path,
entry_short_path = entry_point.short_path,
watch_mode = ctx.attr.watch_mode,
working_dir = ctx.attr.working_dir,
restart_count = len(ctx.files.restart_on),
restart_watch_paths = restart_watch_paths,
),
""".replace("__WATCH_MODE__", ctx.attr.watch_mode).replace(
"__RESTART_COUNT__",
str(len(ctx.files.restart_on)),
).replace(
"__RESTART_PATHS__",
restart_watch_paths,
)
transitive_files = []
if ctx.attr.node_modules:
transitive_files.append(ctx.attr.node_modules[DefaultInfo].files)
runfiles = ctx.runfiles(
files = [bun_bin, entry_point] + ctx.files.data + ctx.files.restart_on,
transitive_files = depset(transitive = transitive_files),
launcher = ctx.actions.declare_file(ctx.label.name)
ctx.actions.write(
output = launcher,
is_executable = True,
content = render_workspace_setup(
bun_short_path = bun_bin.short_path,
primary_source_short_path = entry_point.short_path,
working_dir_mode = ctx.attr.working_dir,
) + command,
)
return [
workspace_info,
DefaultInfo(
executable = launcher,
runfiles = runfiles,
runfiles = workspace_runfiles(ctx, workspace_info, direct_files = [launcher]),
),
]

View File

@@ -6,6 +6,13 @@ _DEFAULT_INSTALL_INPUTS = [
"bunfig.toml",
]
_MANIFEST_DEP_FIELDS = [
"dependencies",
"devDependencies",
"optionalDependencies",
"peerDependencies",
]
def _normalize_path(path):
normalized = path.replace("\\", "/")
if normalized.endswith("/") and normalized != "/":
@@ -115,6 +122,30 @@ def _validate_catalog_shape(field, value):
def _copy_json_value(value):
return json.decode(json.encode(value))
def _package_target_name(package_name):
sanitized = package_name
sanitized = sanitized.replace("@", "at_")
sanitized = sanitized.replace("/", "_")
sanitized = sanitized.replace("-", "_")
sanitized = sanitized.replace(".", "_")
sanitized = sanitized.replace("__", "_").replace("__", "_").replace("__", "_")
sanitized = sanitized.strip("_")
if not sanitized:
sanitized = "package"
return "npm__" + sanitized
def _manifest_dependency_names(manifest):
names = {}
for field in _MANIFEST_DEP_FIELDS:
dependencies = manifest.get(field)
if dependencies == None:
continue
if type(dependencies) != type({}):
fail("bun_install: `{}` must be an object when present".format(field))
for name in dependencies.keys():
names[name] = True
return names
def _normalized_root_manifest(repository_ctx, package_json):
manifest = json.decode(repository_ctx.read(package_json))
workspaces = manifest.get("workspaces")
@@ -147,6 +178,7 @@ def _materialize_workspace_packages(repository_ctx, package_json):
package_root = package_json.dirname
package_root_str = str(package_root)
written = {}
workspace_packages = {}
for pattern in _workspace_patterns(repository_ctx, package_json):
segments = pattern.split("/")
@@ -168,6 +200,15 @@ def _materialize_workspace_packages(repository_ctx, package_json):
repository_ctx.read(workspace_package_json),
)
written[relative_dir] = True
manifest = json.decode(repository_ctx.read(workspace_package_json))
package_name = manifest.get("name")
workspace_packages[relative_dir] = package_name if type(package_name) == type("") else ""
package_dirs = sorted(workspace_packages.keys())
return struct(
package_dirs = package_dirs,
package_names = [workspace_packages[package_dir] for package_dir in package_dirs if workspace_packages[package_dir]],
)
def _materialize_install_inputs(repository_ctx, package_json):
package_root = package_json.dirname
@@ -218,6 +259,68 @@ def _select_bun_binary(repository_ctx):
fail("Unsupported host platform: os={}, arch={}".format(repository_ctx.os.name, repository_ctx.os.arch))
def _render_package_targets_file(package_names):
lines = ["NPM_PACKAGE_TARGETS = {"]
for package_name in package_names:
lines.append(' "{}": "{}",'.format(package_name, _package_target_name(package_name)))
lines.extend([
"}",
"",
])
return "\n".join(lines)
def _render_repo_defs_bzl(repo_name):
return """load(":packages.bzl", "NPM_PACKAGE_TARGETS")
def package_target_name(package_name):
return NPM_PACKAGE_TARGETS.get(package_name)
def npm_link_all_packages(name = "node_modules", imported_links = []):
if not native.existing_rule(name):
native.alias(
name = name,
actual = "@{repo_name}//:node_modules",
)
requested = {{}}
for package_name in imported_links:
requested[package_name] = True
for package_name, target_name in NPM_PACKAGE_TARGETS.items():
if imported_links and package_name not in requested:
continue
if native.existing_rule(target_name):
continue
native.alias(
name = target_name,
actual = "@{repo_name}//:%s" % target_name,
)
""".format(repo_name = repo_name)
def _render_repo_build(package_names):
lines = [
'exports_files(["defs.bzl", "packages.bzl"])',
"",
"filegroup(",
' name = "node_modules",',
' srcs = glob(["**/node_modules/**"], allow_empty = False),',
' visibility = ["//visibility:public"],',
")",
"",
]
for package_name in package_names:
lines.extend([
"filegroup(",
' name = "{}",'.format(_package_target_name(package_name)),
' srcs = glob(["node_modules/{}/**"], allow_empty = True),'.format(package_name),
' visibility = ["//visibility:public"],',
")",
"",
])
return "\n".join(lines)
def _bun_install_repository_impl(repository_ctx):
package_json = repository_ctx.path(repository_ctx.attr.package_json)
bun_lockfile = repository_ctx.path(repository_ctx.attr.bun_lockfile)
@@ -230,6 +333,7 @@ def _bun_install_repository_impl(repository_ctx):
bun_bin = _select_bun_binary(repository_ctx)
lockfile_name = bun_lockfile.basename
root_manifest = json.decode(repository_ctx.read(package_json))
if lockfile_name not in ["bun.lock", "bun.lockb"]:
lockfile_name = "bun.lock"
@@ -237,7 +341,7 @@ def _bun_install_repository_impl(repository_ctx):
repository_ctx.file("package.json", _normalized_root_manifest(repository_ctx, package_json))
repository_ctx.symlink(bun_lockfile, lockfile_name)
_materialize_install_inputs(repository_ctx, package_json)
_materialize_workspace_packages(repository_ctx, package_json)
workspace_packages = _materialize_workspace_packages(repository_ctx, package_json)
install_args = [str(bun_bin), "--bun", "install", "--frozen-lockfile", "--no-progress"]
if repository_ctx.attr.isolated_home:
@@ -263,15 +367,26 @@ stderr:
""".format(result.stdout, result.stderr))
repository_ctx.file(
"BUILD.bazel",
"""filegroup(
name = "node_modules",
srcs = glob(["**/node_modules/**"], allow_empty = False),
visibility = ["//visibility:public"],
)
""",
"node_modules/.rules_bun/install.json",
json.encode({
"bun_lockfile": lockfile_name,
"package_json": "package.json",
"workspace_package_dirs": workspace_packages.package_dirs,
}) + "\n",
)
package_names = {}
for package_name in _manifest_dependency_names(root_manifest).keys():
package_names[package_name] = True
for package_name in workspace_packages.package_names:
package_names[package_name] = True
sorted_package_names = sorted(package_names.keys())
visible_repo_name = repository_ctx.attr.visible_repo_name or repository_ctx.name
repository_ctx.file("packages.bzl", _render_package_targets_file(sorted_package_names))
repository_ctx.file("defs.bzl", _render_repo_defs_bzl(visible_repo_name))
repository_ctx.file("BUILD.bazel", _render_repo_build(sorted_package_names))
bun_install_repository = repository_rule(
implementation = _bun_install_repository_impl,
attrs = {
@@ -279,6 +394,7 @@ bun_install_repository = repository_rule(
"bun_lockfile": attr.label(mandatory = True, allow_single_file = True),
"install_inputs": attr.label_list(allow_files = True),
"isolated_home": attr.bool(default = True),
"visible_repo_name": attr.string(),
"bun_linux_x64": attr.label(default = "@bun_linux_x64//:bun-linux-x64/bun", allow_single_file = True),
"bun_linux_aarch64": attr.label(default = "@bun_linux_aarch64//:bun-linux-aarch64/bun", allow_single_file = True),
"bun_darwin_x64": attr.label(default = "@bun_darwin_x64//:bun-darwin-x64/bun", allow_single_file = True),
@@ -313,4 +429,5 @@ def bun_install(name, package_json, bun_lockfile, install_inputs = [], isolated_
bun_lockfile = bun_lockfile,
install_inputs = install_inputs,
isolated_home = isolated_home,
visible_repo_name = name,
)

View File

@@ -1,5 +1,6 @@
"""Rule for running package.json scripts with Bun."""
load("//internal:workspace.bzl", "create_bun_workspace_info", "render_workspace_setup", "workspace_runfiles")
def _shell_quote(value):
return "'" + value.replace("'", "'\"'\"'") + "'"
@@ -9,302 +10,37 @@ def _bun_script_impl(ctx):
toolchain = ctx.toolchains["//bun:toolchain_type"]
bun_bin = toolchain.bun.bun_bin
package_json = ctx.file.package_json
workspace_info = create_bun_workspace_info(
ctx,
extra_files = ctx.files.data + [bun_bin],
package_dir_hint = package_json.dirname or ".",
package_json = package_json,
primary_file = package_json,
)
command = """
trap cleanup_runtime_workspace EXIT
cd "${runtime_exec_dir}"
exec "${bun_bin}" --bun run __SCRIPT__ "$@"
""".replace("__SCRIPT__", _shell_quote(ctx.attr.script))
launcher = ctx.actions.declare_file(ctx.label.name)
ctx.actions.write(
output = launcher,
is_executable = True,
content = """#!/usr/bin/env bash
set -euo pipefail
runfiles_dir="${{RUNFILES_DIR:-$0.runfiles}}"
workspace_root="${{runfiles_dir}}/_main"
workspace_root="$(cd "${{workspace_root}}" && pwd -P)"
bun_bin="${{runfiles_dir}}/_main/{bun_short_path}"
package_json="${{runfiles_dir}}/_main/{package_json_short_path}"
package_dir="$(cd "$(dirname "${{package_json}}")" && pwd -P)"
package_rel_dir="{package_rel_dir}"
select_primary_node_modules() {{
local selected=""
local fallback=""
while IFS= read -r node_modules_dir; do
if [[ -z "${{fallback}}" ]]; then
fallback="${{node_modules_dir}}"
fi
if [[ ! -d "${{node_modules_dir}}/.bun" ]]; then
continue
fi
if [[ "${{node_modules_dir}}" != *"/runfiles/_main/"* ]]; then
selected="${{node_modules_dir}}"
break
fi
if [[ -z "${{selected}}" ]]; then
selected="${{node_modules_dir}}"
fi
done < <(find -L "${{runfiles_dir}}" -type d -name node_modules 2>/dev/null | sort)
if [[ -n "${{selected}}" ]]; then
echo "${{selected}}"
else
echo "${{fallback}}"
fi
}}
primary_node_modules="$(select_primary_node_modules)"
runtime_workspace="$(mktemp -d)"
cleanup_runtime_workspace() {{
rm -rf "${{runtime_workspace}}"
}}
trap cleanup_runtime_workspace EXIT
runtime_package_dir="${{runtime_workspace}}/${{package_rel_dir}}"
mkdir -p "${{runtime_package_dir}}"
cp -RL "${{package_dir}}/." "${{runtime_package_dir}}/"
workspace_package_map="${{runtime_workspace}}/workspace-packages.tsv"
python3 - "${{runtime_package_dir}}" >"${{workspace_package_map}}" <<'PY'
import json
import os
import sys
root = sys.argv[1]
for dirpath, dirnames, filenames in os.walk(root):
dirnames[:] = [name for name in dirnames if name != "node_modules"]
if "package.json" not in filenames:
continue
manifest_path = os.path.join(dirpath, "package.json")
try:
with open(manifest_path, "r", encoding="utf-8") as manifest_file:
package_name = json.load(manifest_file).get("name")
except Exception:
continue
if isinstance(package_name, str):
print(f"{{package_name}}\t{{dirpath}}")
PY
install_repo_root=""
if [[ -n "${{primary_node_modules}}" ]]; then
install_repo_root="$(dirname "${{primary_node_modules}}")"
ln -s "${{primary_node_modules}}" "${{runtime_workspace}}/node_modules"
fi
workspace_package_dir_for_source() {{
local source="$1"
local manifest_path="${{source}}/package.json"
local package_name=""
local workspace_dir=""
if [[ ! -f "${{manifest_path}}" ]]; then
return 1
fi
package_name="$(python3 - "${{manifest_path}}" <<'PY'
import json
import sys
try:
with open(sys.argv[1], "r", encoding="utf-8") as manifest_file:
package_name = json.load(manifest_file).get("name", "")
except Exception:
package_name = ""
if isinstance(package_name, str):
print(package_name)
PY
)"
workspace_dir="$(awk -F '\t' -v name="$package_name" '$1 == name {{ print $2; exit }}' "${{workspace_package_map}}")"
if [[ -n "${{package_name}}" && -n "${{workspace_dir}}" ]]; then
echo "${{workspace_dir}}"
return 0
fi
return 1
}}
link_node_modules_entry() {{
local source="$1"
local destination="$2"
local workspace_target=""
rm -rf "${{destination}}"
workspace_target="$(workspace_package_dir_for_source "${{source}}" || true)"
if [[ -n "${{workspace_target}}" ]]; then
ln -s "${{workspace_target}}" "${{destination}}"
return 0
fi
if [[ -L "${{source}}" ]]; then
ln -s "$(readlink "${{source}}")" "${{destination}}"
else
ln -s "${{source}}" "${{destination}}"
fi
}}
mirror_node_modules_dir() {{
local source_dir="$1"
local destination_dir="$2"
local entry=""
local scoped_entry=""
rm -rf "${{destination_dir}}"
mkdir -p "${{destination_dir}}"
shopt -s dotglob nullglob
for entry in "${{source_dir}}"/* "${{source_dir}}"/.[!.]* "${{source_dir}}"/..?*; do
local entry_name="$(basename "${{entry}}")"
if [[ "${{entry_name}}" == "." || "${{entry_name}}" == ".." ]]; then
continue
fi
if [[ -d "${{entry}}" && ! -L "${{entry}}" && "${{entry_name}}" == @* ]]; then
mkdir -p "${{destination_dir}}/${{entry_name}}"
for scoped_entry in "${{entry}}"/* "${{entry}}"/.[!.]* "${{entry}}"/..?*; do
local scoped_name="$(basename "${{scoped_entry}}")"
if [[ "${{scoped_name}}" == "." || "${{scoped_name}}" == ".." ]]; then
continue
fi
link_node_modules_entry "${{scoped_entry}}" "${{destination_dir}}/${{entry_name}}/${{scoped_name}}"
done
continue
fi
link_node_modules_entry "${{entry}}" "${{destination_dir}}/${{entry_name}}"
done
shopt -u dotglob nullglob
}}
find_node_modules() {{
local dir="$1"
local root="$2"
while [[ "$dir" == "$root"* ]]; do
if [[ -d "$dir/node_modules" ]]; then
echo "$dir/node_modules"
return 0
fi
if [[ "$dir" == "$root" ]]; then
break
fi
dir="$(dirname "$dir")"
done
return 1
}}
find_install_repo_node_modules() {{
local repo_root="$1"
local rel_dir="$2"
local candidate="${{rel_dir}}"
while [[ -n "${{candidate}}" ]]; do
if [[ -d "${{repo_root}}/${{candidate}}/node_modules" ]]; then
echo "${{repo_root}}/${{candidate}}/node_modules"
return 0
fi
if [[ "${{candidate}}" != */* ]]; then
break
fi
candidate="${{candidate#*/}}"
done
if [[ -d "${{repo_root}}/node_modules" ]]; then
echo "${{repo_root}}/node_modules"
return 0
fi
return 1
}}
mirror_install_repo_workspace_node_modules() {{
local repo_root="$1"
local destination_root="$2"
while IFS= read -r install_node_modules; do
local rel_path="${{install_node_modules#${{repo_root}}/}}"
local destination="${{destination_root}}/${{rel_path}}"
mkdir -p "$(dirname "${{destination}}")"
mirror_node_modules_dir "${{install_node_modules}}" "${{destination}}"
done < <(find "${{repo_root}}" \
-path "${{repo_root}}/node_modules" -prune -o \
-type d -name node_modules -print 2>/dev/null | sort)
}}
resolved_install_node_modules=""
if [[ -n "${{install_repo_root}}" ]]; then
resolved_install_node_modules="$(find_install_repo_node_modules "${{install_repo_root}}" "${{package_rel_dir}}" || true)"
fi
if [[ -n "${{resolved_install_node_modules}}" ]]; then
mirror_node_modules_dir "${{resolved_install_node_modules}}" "${{runtime_package_dir}}/node_modules"
else
resolved_node_modules="$(find_node_modules "${{runtime_package_dir}}" "${{runtime_workspace}}" || true)"
if [[ -n "${{resolved_node_modules}}" && "${{resolved_node_modules}}" != "${{runtime_package_dir}}/node_modules" ]]; then
mirror_node_modules_dir "${{resolved_node_modules}}" "${{runtime_package_dir}}/node_modules"
fi
fi
if [[ -n "${{install_repo_root}}" ]]; then
mirror_install_repo_workspace_node_modules "${{install_repo_root}}" "${{runtime_package_dir}}"
fi
path_entries=()
if [[ -d "${{runtime_package_dir}}/node_modules/.bin" ]]; then
path_entries+=("${{runtime_package_dir}}/node_modules/.bin")
fi
if [[ -d "${{runtime_workspace}}/node_modules/.bin" && "${{runtime_workspace}}/node_modules/.bin" != "${{runtime_package_dir}}/node_modules/.bin" ]]; then
path_entries+=("${{runtime_workspace}}/node_modules/.bin")
fi
if [[ ${{#path_entries[@]}} -gt 0 ]]; then
export PATH="$(IFS=:; echo "${{path_entries[*]}}"):${{PATH}}"
fi
working_dir="{working_dir}"
if [[ "${{working_dir}}" == "package" ]]; then
cd "${{runtime_package_dir}}"
else
cd "${{runtime_workspace}}"
fi
exec "${{bun_bin}}" --bun run {script} "$@"
""".format(
content = render_workspace_setup(
bun_short_path = bun_bin.short_path,
package_dir_hint = package_json.dirname or ".",
package_json_short_path = package_json.short_path,
package_rel_dir = package_json.dirname,
working_dir = ctx.attr.working_dir,
script = _shell_quote(ctx.attr.script),
),
)
transitive_files = []
if ctx.attr.node_modules:
transitive_files.append(ctx.attr.node_modules[DefaultInfo].files)
runfiles = ctx.runfiles(
files = [bun_bin, package_json] + ctx.files.data,
transitive_files = depset(transitive = transitive_files),
primary_source_short_path = package_json.short_path,
working_dir_mode = ctx.attr.working_dir,
) + command,
)
return [
workspace_info,
DefaultInfo(
executable = launcher,
runfiles = runfiles,
runfiles = workspace_runfiles(ctx, workspace_info, direct_files = [launcher]),
),
]

View File

@@ -1,6 +1,7 @@
"""Rule for running test suites with Bun."""
load("//internal:js_library.bzl", "BunSourcesInfo")
load("//internal:js_library.bzl", "collect_js_runfiles")
load("//internal:workspace.bzl", "create_bun_workspace_info", "render_workspace_setup", "workspace_runfiles")
def _shell_quote(value):
@@ -10,53 +11,58 @@ def _shell_quote(value):
def _bun_test_impl(ctx):
toolchain = ctx.toolchains["//bun:toolchain_type"]
bun_bin = toolchain.bun.bun_bin
primary_file = ctx.files.srcs[0]
dep_runfiles = [collect_js_runfiles(dep) for dep in ctx.attr.deps]
workspace_info = create_bun_workspace_info(
ctx,
extra_files = ctx.files.srcs + ctx.files.data + [bun_bin],
primary_file = primary_file,
)
src_args = " ".join([_shell_quote(src.short_path) for src in ctx.files.srcs])
command = """
trap cleanup_runtime_workspace EXIT
cd "${runtime_workspace}"
test_args=(__SRC_ARGS__)
if [[ -n "${TESTBRIDGE_TEST_ONLY:-}" && -n "${COVERAGE_DIR:-}" ]]; then
exec "${bun_bin}" --bun test "${test_args[@]}" --test-name-pattern "${TESTBRIDGE_TEST_ONLY}" --coverage "$@"
fi
if [[ -n "${TESTBRIDGE_TEST_ONLY:-}" ]]; then
exec "${bun_bin}" --bun test "${test_args[@]}" --test-name-pattern "${TESTBRIDGE_TEST_ONLY}" "$@"
fi
if [[ -n "${COVERAGE_DIR:-}" ]]; then
exec "${bun_bin}" --bun test "${test_args[@]}" --coverage "$@"
fi
exec "${bun_bin}" --bun test "${test_args[@]}" "$@"
""".replace("__SRC_ARGS__", src_args)
if ctx.attr.args:
default_args = "\n".join(['test_args+=({})'.format(_shell_quote(arg)) for arg in ctx.attr.args])
command = command.replace(
'test_args=(__SRC_ARGS__)',
'test_args=(__SRC_ARGS__)\n' + default_args,
)
launcher = ctx.actions.declare_file(ctx.label.name)
ctx.actions.write(
output = launcher,
is_executable = True,
content = """#!/usr/bin/env bash
set -euo pipefail
runfiles_dir="${{RUNFILES_DIR:-$0.runfiles}}"
bun_bin="${{runfiles_dir}}/_main/{bun_short_path}"
cd "${{runfiles_dir}}/_main"
if [[ -n "${{TESTBRIDGE_TEST_ONLY:-}}" && -n "${{COVERAGE_DIR:-}}" ]]; then
exec "${{bun_bin}}" --bun test {src_args} --test-name-pattern "${{TESTBRIDGE_TEST_ONLY}}" --coverage "$@"
fi
if [[ -n "${{TESTBRIDGE_TEST_ONLY:-}}" ]]; then
exec "${{bun_bin}}" --bun test {src_args} --test-name-pattern "${{TESTBRIDGE_TEST_ONLY}}" "$@"
fi
if [[ -n "${{COVERAGE_DIR:-}}" ]]; then
exec "${{bun_bin}}" --bun test {src_args} --coverage "$@"
fi
exec "${{bun_bin}}" --bun test {src_args} "$@"
""".format(
content = render_workspace_setup(
bun_short_path = bun_bin.short_path,
src_args = src_args,
),
primary_source_short_path = primary_file.short_path,
working_dir_mode = "workspace",
) + command,
)
transitive_files = []
if ctx.attr.node_modules:
transitive_files.append(ctx.attr.node_modules[DefaultInfo].files)
for dep in ctx.attr.deps:
if BunSourcesInfo in dep:
transitive_files.append(dep[BunSourcesInfo].transitive_sources)
else:
transitive_files.append(dep[DefaultInfo].files)
runfiles = ctx.runfiles(
files = [bun_bin] + ctx.files.srcs + ctx.files.data,
transitive_files = depset(transitive = transitive_files),
)
return [
workspace_info,
DefaultInfo(
executable = launcher,
runfiles = runfiles,
runfiles = workspace_runfiles(
ctx,
workspace_info,
direct_files = [launcher],
transitive_files = dep_runfiles,
),
),
]

29
internal/js_compat.bzl Normal file
View File

@@ -0,0 +1,29 @@
"""rules_js-style compatibility exports backed by Bun."""
load("//internal:bun_binary.bzl", _bun_binary = "bun_binary")
load("//internal:bun_test.bzl", _bun_test = "bun_test")
load("//internal:js_library.bzl", _JsInfo = "JsInfo", _js_library = "js_library", _ts_library = "ts_library")
load("//internal:js_run_devserver.bzl", _js_run_devserver = "js_run_devserver")
JsInfo = _JsInfo
js_library = _js_library
ts_library = _ts_library
js_run_devserver = _js_run_devserver
def js_binary(name, **kwargs):
_bun_binary(name = name, **kwargs)
def js_test(name, entry_point = None, srcs = None, **kwargs):
if entry_point != None:
if srcs != None:
fail("js_test accepts either `entry_point` or `srcs`, but not both")
srcs = [entry_point]
if srcs == None:
fail("js_test requires `entry_point` or `srcs`")
_bun_test(
name = name,
srcs = srcs,
**kwargs
)

View File

@@ -1,23 +1,74 @@
"""Lightweight JS/TS source grouping rules."""
JsInfo = provider(
doc = "Provides transitive JavaScript/TypeScript metadata for Bun and JS compatibility rules.",
fields = {
"sources": "Direct source files owned by this target.",
"transitive_sources": "Transitive source files from this target and its deps.",
"types": "Direct type files owned by this target.",
"transitive_types": "Transitive type files from this target and its deps.",
"data_files": "Direct runtime data files owned by this target.",
"transitive_runfiles": "Transitive runtime files from this target and its deps.",
},
)
BunSourcesInfo = provider(
"Provides transitive sources for Bun libraries.",
fields = ["transitive_sources"],
)
def collect_js_sources(dep):
if JsInfo in dep:
return dep[JsInfo].transitive_sources
if BunSourcesInfo in dep:
return dep[BunSourcesInfo].transitive_sources
return dep[DefaultInfo].files
def collect_js_runfiles(dep):
if JsInfo in dep:
return dep[JsInfo].transitive_runfiles
if BunSourcesInfo in dep:
return dep[BunSourcesInfo].transitive_sources
return dep[DefaultInfo].files
def _bun_library_impl(ctx):
transitive_sources = [
dep[BunSourcesInfo].transitive_sources
transitive_sources = [collect_js_sources(dep) for dep in ctx.attr.deps]
transitive_types = [
dep[JsInfo].transitive_types
for dep in ctx.attr.deps
if BunSourcesInfo in dep
if JsInfo in dep
]
transitive_runfiles = [collect_js_runfiles(dep) for dep in ctx.attr.deps]
all_sources = depset(
direct = ctx.files.srcs,
transitive = transitive_sources,
)
all_types = depset(
direct = ctx.files.types,
transitive = transitive_types,
)
all_runfiles = depset(
direct = ctx.files.srcs + ctx.files.types + ctx.files.data,
transitive = transitive_runfiles,
)
default_files = depset(
direct = ctx.files.srcs + ctx.files.types + ctx.files.data,
transitive = transitive_sources + transitive_types + transitive_runfiles,
)
js_info = JsInfo(
sources = depset(ctx.files.srcs),
transitive_sources = all_sources,
types = depset(ctx.files.types),
transitive_types = all_types,
data_files = depset(ctx.files.data),
transitive_runfiles = all_runfiles,
)
return [
js_info,
BunSourcesInfo(transitive_sources = all_sources),
DefaultInfo(files = all_sources),
DefaultInfo(files = default_files),
]
js_library = rule(
@@ -28,6 +79,14 @@ js_library = rule(
allow_files = [".js", ".jsx", ".mjs", ".cjs"],
doc = "JavaScript source files in this library.",
),
"types": attr.label_list(
allow_files = [".d.ts"],
doc = "Optional declaration files associated with this library.",
),
"data": attr.label_list(
allow_files = True,
doc = "Optional runtime files propagated to dependents.",
),
"deps": attr.label_list(
doc = "Other Bun source libraries to include transitively.",
),
@@ -42,6 +101,14 @@ ts_library = rule(
allow_files = [".ts", ".tsx"],
doc = "TypeScript source files in this library.",
),
"types": attr.label_list(
allow_files = [".d.ts"],
doc = "Optional declaration files associated with this library.",
),
"data": attr.label_list(
allow_files = True,
doc = "Optional runtime files propagated to dependents.",
),
"deps": attr.label_list(
doc = "Other Bun source libraries to include transitively.",
),

View File

@@ -0,0 +1,100 @@
"""Compatibility rule for running an executable target as a dev server."""
load("//internal:js_library.bzl", "collect_js_runfiles")
load("//internal:workspace.bzl", "create_bun_workspace_info", "render_workspace_setup", "workspace_runfiles")
def _shell_quote(value):
return "'" + value.replace("'", "'\"'\"'") + "'"
def _js_run_devserver_impl(ctx):
toolchain = ctx.toolchains["//bun:toolchain_type"]
bun_bin = toolchain.bun.bun_bin
package_json = ctx.file.package_json
dep_runfiles = [collect_js_runfiles(dep) for dep in ctx.attr.deps]
tool_default_info = ctx.attr.tool[DefaultInfo]
workspace_info = create_bun_workspace_info(
ctx,
primary_file = package_json or tool_default_info.files_to_run.executable,
package_json = package_json,
package_dir_hint = ctx.attr.package_dir_hint,
extra_files = ctx.files.data + [bun_bin, tool_default_info.files_to_run.executable],
)
tool_workspace = ctx.attr.tool.label.workspace_name or "_main"
tool_path = "{}/{}".format(tool_workspace, tool_default_info.files_to_run.executable.short_path)
default_args = " ".join([_shell_quote(arg) for arg in ctx.attr.args])
launcher = ctx.actions.declare_file(ctx.label.name)
ctx.actions.write(
output = launcher,
is_executable = True,
content = render_workspace_setup(
bun_short_path = bun_bin.short_path,
primary_source_short_path = package_json.short_path if package_json else tool_default_info.files_to_run.executable.short_path,
package_json_short_path = package_json.short_path if package_json else "",
package_dir_hint = ctx.attr.package_dir_hint,
working_dir_mode = ctx.attr.working_dir,
) + """
trap cleanup_runtime_workspace EXIT
cd "${runtime_exec_dir}"
tool="${runfiles_dir}/__TOOL_SHORT_PATH__"
exec "${tool}" __DEFAULT_ARGS__ "$@"
""".replace("__TOOL_SHORT_PATH__", tool_path).replace("__DEFAULT_ARGS__", default_args),
)
return [
workspace_info,
DefaultInfo(
executable = launcher,
runfiles = workspace_runfiles(
ctx,
workspace_info,
direct_files = [launcher, tool_default_info.files_to_run.executable],
transitive_files = dep_runfiles,
).merge(tool_default_info.default_runfiles),
),
]
js_run_devserver = rule(
implementation = _js_run_devserver_impl,
doc = """Runs an executable target from a staged JS workspace.
This is a Bun-backed compatibility adapter for `rules_js`-style devserver
targets. It stages the same runtime workspace as the Bun rules, then executes
the provided tool with any default arguments.
""",
attrs = {
"tool": attr.label(
mandatory = True,
executable = True,
cfg = "target",
doc = "Executable target to launch as the dev server.",
),
"package_json": attr.label(
allow_single_file = True,
doc = "Optional package.json used to resolve the package working directory.",
),
"package_dir_hint": attr.string(
default = ".",
doc = "Optional package-relative directory hint when package_json is not supplied.",
),
"node_modules": attr.label(
doc = "Optional label providing package files from a node_modules tree, typically produced by bun_install or npm_translate_lock, in runfiles.",
),
"deps": attr.label_list(
doc = "Library dependencies required by the dev server.",
),
"data": attr.label_list(
allow_files = True,
doc = "Additional runtime files required by the dev server.",
),
"working_dir": attr.string(
default = "workspace",
values = ["workspace", "package"],
doc = "Working directory at runtime: Bazel runfiles workspace root or the resolved package directory.",
),
},
executable = True,
toolchains = ["//bun:toolchain_type"],
)

649
internal/workspace.bzl Normal file
View File

@@ -0,0 +1,649 @@
"""Shared Bun workspace metadata and launcher helpers."""
BunWorkspaceInfo = provider(
doc = "Workspace/runtime metadata shared by Bun rules and adapters.",
fields = {
"install_metadata_file": "Optional install metadata file from bun_install.",
"metadata_file": "Rule-local metadata file describing the staged workspace inputs.",
"node_modules_files": "Depset of node_modules files from bun_install.",
"package_dir_hint": "Package-relative directory when known at analysis time.",
"package_json": "Package manifest file when explicitly provided.",
"primary_file": "Primary source file used to resolve the runtime package context.",
"runtime_files": "Depset of runtime files required to stage the workspace.",
},
)
_WORKSPACE_SETUP_TEMPLATE = """#!/usr/bin/env bash
set -euo pipefail
runfiles_dir="${RUNFILES_DIR:-$0.runfiles}"
workspace_root="${runfiles_dir}/_main"
workspace_root="$(cd "${workspace_root}" && pwd -P)"
bun_bin="${runfiles_dir}/_main/__BUN_SHORT_PATH__"
primary_source=""
if [[ -n "__PRIMARY_SOURCE_SHORT_PATH__" ]]; then
primary_source="${runfiles_dir}/_main/__PRIMARY_SOURCE_SHORT_PATH__"
fi
package_json=""
if [[ -n "__PACKAGE_JSON_SHORT_PATH__" ]]; then
package_json="${runfiles_dir}/_main/__PACKAGE_JSON_SHORT_PATH__"
fi
package_rel_dir_hint="__PACKAGE_DIR_HINT__"
working_dir_mode="__WORKING_DIR_MODE__"
normalize_rel_dir() {
local value="$1"
if [[ -z "${value}" || "${value}" == "." ]]; then
echo "."
else
echo "${value#./}"
fi
}
dirname_rel_dir() {
local value
value="$(normalize_rel_dir "$1")"
if [[ "${value}" == "." || "${value}" != */* ]]; then
echo "."
return 0
fi
echo "${value%/*}"
}
first_path_component() {
local value
value="$(normalize_rel_dir "$1")"
if [[ "${value}" == "." ]]; then
echo ""
return 0
fi
echo "${value%%/*}"
}
rel_dir_from_abs_path() {
local absolute_path="$1"
if [[ "${absolute_path}" == "${workspace_root}" ]]; then
echo "."
return 0
fi
echo "${absolute_path#"${workspace_root}/"}"
}
find_package_rel_dir_for_path() {
local path="$1"
local dir="$1"
if [[ -f "${dir}" ]]; then
dir="$(dirname "${dir}")"
fi
while [[ "${dir}" == "${workspace_root}"* ]]; do
if [[ -f "${dir}/package.json" ]]; then
rel_dir_from_abs_path "${dir}"
return 0
fi
if [[ "${dir}" == "${workspace_root}" ]]; then
break
fi
dir="$(dirname "${dir}")"
done
rel_dir_from_abs_path "$(dirname "${path}")"
}
find_working_rel_dir_for_path() {
local path="$1"
local dir="$1"
if [[ -f "${dir}" ]]; then
dir="$(dirname "${dir}")"
fi
while [[ "${dir}" == "${workspace_root}"* ]]; do
if [[ -f "${dir}/.env" || -f "${dir}/package.json" ]]; then
rel_dir_from_abs_path "${dir}"
return 0
fi
if [[ "${dir}" == "${workspace_root}" ]]; then
break
fi
dir="$(dirname "${dir}")"
done
rel_dir_from_abs_path "$(dirname "${path}")"
}
select_primary_node_modules() {
local selected=""
local fallback=""
while IFS= read -r node_modules_dir; do
if [[ -z "${fallback}" ]]; then
fallback="${node_modules_dir}"
fi
if [[ ! -d "${node_modules_dir}/.bun" ]]; then
continue
fi
if [[ "${node_modules_dir}" != *"/runfiles/_main/"* ]]; then
selected="${node_modules_dir}"
break
fi
if [[ -z "${selected}" ]]; then
selected="${node_modules_dir}"
fi
done < <(find -L "${runfiles_dir}" -type d -name node_modules 2>/dev/null | sort)
if [[ -n "${selected}" ]]; then
echo "${selected}"
else
echo "${fallback}"
fi
}
link_top_level_entries() {
local source_root="$1"
local destination_root="$2"
local skipped_entry="$3"
local entry=""
local entry_name=""
shopt -s dotglob nullglob
for entry in "${source_root}"/* "${source_root}"/.[!.]* "${source_root}"/..?*; do
entry_name="$(basename "${entry}")"
if [[ "${entry_name}" == "." || "${entry_name}" == ".." ]]; then
continue
fi
if [[ -n "${skipped_entry}" && "${entry_name}" == "${skipped_entry}" ]]; then
continue
fi
ln -s "${entry}" "${destination_root}/${entry_name}"
done
shopt -u dotglob nullglob
}
materialize_package_path() {
local source_root="$1"
local destination_root="$2"
local package_rel_dir
package_rel_dir="$(normalize_rel_dir "$3")"
if [[ "${package_rel_dir}" == "." ]]; then
return 0
fi
local source_cursor="${source_root}"
local destination_cursor="${destination_root}"
local parts=()
local current="${package_rel_dir}"
while [[ -n "${current}" ]]; do
if [[ "${current}" == */* ]]; then
parts+=("${current%%/*}")
current="${current#*/}"
else
parts+=("${current}")
break
fi
done
local index=0
while [[ ${index} -lt $((${#parts[@]} - 1)) ]]; do
local part="${parts[${index}]}"
local next_part="${parts[$((index + 1))]}"
source_cursor="${source_cursor}/${part}"
destination_cursor="${destination_cursor}/${part}"
mkdir -p "${destination_cursor}"
local sibling=""
local sibling_name=""
shopt -s dotglob nullglob
for sibling in "${source_cursor}"/* "${source_cursor}"/.[!.]* "${source_cursor}"/..?*; do
sibling_name="$(basename "${sibling}")"
if [[ "${sibling_name}" == "." || "${sibling_name}" == ".." || "${sibling_name}" == "${next_part}" ]]; then
continue
fi
if [[ ! -e "${destination_cursor}/${sibling_name}" ]]; then
ln -s "${sibling}" "${destination_cursor}/${sibling_name}"
fi
done
shopt -u dotglob nullglob
index=$((index + 1))
done
mkdir -p "${destination_root}/${package_rel_dir}"
}
materialize_directory_entries() {
local source_root="$1"
local destination_root="$2"
local entry=""
local entry_name=""
mkdir -p "${destination_root}"
shopt -s dotglob nullglob
for entry in "${source_root}"/* "${source_root}"/.[!.]* "${source_root}"/..?*; do
entry_name="$(basename "${entry}")"
if [[ "${entry_name}" == "." || "${entry_name}" == ".." ]]; then
continue
fi
rm -rf "${destination_root}/${entry_name}"
ln -s "${entry}" "${destination_root}/${entry_name}"
done
shopt -u dotglob nullglob
}
stage_workspace_view() {
local source_root="$1"
local destination_root="$2"
local package_rel_dir
package_rel_dir="$(normalize_rel_dir "$3")"
local skipped_entry
skipped_entry="$(first_path_component "${package_rel_dir}")"
link_top_level_entries "${source_root}" "${destination_root}" "${skipped_entry}"
if [[ "${package_rel_dir}" == "." ]]; then
return 0
fi
materialize_package_path "${source_root}" "${destination_root}" "${package_rel_dir}"
materialize_directory_entries "${source_root}/${package_rel_dir}" "${destination_root}/${package_rel_dir}"
}
build_workspace_package_map() {
local root="$1"
local out="$2"
python3 - "${root}" >"${out}" <<'PY'
import json
import os
import sys
root = os.path.abspath(sys.argv[1])
for dirpath, dirnames, filenames in os.walk(root):
dirnames[:] = [name for name in dirnames if name != "node_modules"]
if "package.json" not in filenames:
continue
manifest_path = os.path.join(dirpath, "package.json")
try:
with open(manifest_path, "r", encoding="utf-8") as manifest_file:
package_name = json.load(manifest_file).get("name")
except Exception:
continue
if not isinstance(package_name, str):
continue
rel_dir = os.path.relpath(dirpath, root)
if rel_dir == ".":
rel_dir = "."
print(f"{package_name}\t{rel_dir}")
PY
}
workspace_package_rel_dir_for_source() {
local source="$1"
local manifest_path="${source}/package.json"
local package_name=""
if [[ ! -f "${manifest_path}" ]]; then
return 1
fi
package_name="$(python3 - "${manifest_path}" <<'PY'
import json
import sys
try:
with open(sys.argv[1], "r", encoding="utf-8") as manifest_file:
package_name = json.load(manifest_file).get("name", "")
except Exception:
package_name = ""
if isinstance(package_name, str):
print(package_name)
PY
)"
if [[ -z "${package_name}" ]]; then
return 1
fi
awk -F '\t' -v name="${package_name}" '$1 == name { print $2; exit }' "${workspace_package_map}"
}
link_node_modules_entry() {
local source="$1"
local destination="$2"
local workspace_rel_dir=""
rm -rf "${destination}"
workspace_rel_dir="$(workspace_package_rel_dir_for_source "${source}" || true)"
if [[ -n "${workspace_rel_dir}" ]]; then
ln -s "${runtime_workspace}/${workspace_rel_dir}" "${destination}"
return 0
fi
if [[ -L "${source}" ]]; then
ln -s "$(readlink "${source}")" "${destination}"
else
ln -s "${source}" "${destination}"
fi
}
mirror_node_modules_dir() {
local source_dir="$1"
local destination_dir="$2"
local entry=""
local entry_name=""
local scoped_entry=""
local scoped_name=""
rm -rf "${destination_dir}"
mkdir -p "${destination_dir}"
shopt -s dotglob nullglob
for entry in "${source_dir}"/* "${source_dir}"/.[!.]* "${source_dir}"/..?*; do
entry_name="$(basename "${entry}")"
if [[ "${entry_name}" == "." || "${entry_name}" == ".." || "${entry_name}" == ".rules_bun" ]]; then
continue
fi
if [[ -d "${entry}" && ! -L "${entry}" && "${entry_name}" == @* ]]; then
mkdir -p "${destination_dir}/${entry_name}"
for scoped_entry in "${entry}"/* "${entry}"/.[!.]* "${entry}"/..?*; do
scoped_name="$(basename "${scoped_entry}")"
if [[ "${scoped_name}" == "." || "${scoped_name}" == ".." ]]; then
continue
fi
link_node_modules_entry "${scoped_entry}" "${destination_dir}/${entry_name}/${scoped_name}"
done
continue
fi
link_node_modules_entry "${entry}" "${destination_dir}/${entry_name}"
done
shopt -u dotglob nullglob
}
find_install_repo_node_modules() {
local repo_root="$1"
local package_rel_dir
package_rel_dir="$(normalize_rel_dir "$2")"
if [[ "${package_rel_dir}" != "." ]]; then
local candidate="${package_rel_dir}"
while true; do
if [[ -d "${repo_root}/${candidate}/node_modules" ]]; then
echo "${repo_root}/${candidate}/node_modules"
return 0
fi
if [[ "${candidate}" != */* ]]; then
break
fi
candidate="${candidate%/*}"
done
fi
if [[ -d "${repo_root}/node_modules" ]]; then
echo "${repo_root}/node_modules"
return 0
fi
return 1
}
mirror_install_repo_workspace_node_modules() {
local repo_root="$1"
local destination_root="$2"
while IFS= read -r install_node_modules; do
local rel_path="${install_node_modules#${repo_root}/}"
local destination="${destination_root}/${rel_path}"
mkdir -p "$(dirname "${destination}")"
mirror_node_modules_dir "${install_node_modules}" "${destination}"
done < <(find "${repo_root}" \
-path "${repo_root}/node_modules" -prune -o \
-type d -name node_modules -print 2>/dev/null | sort)
}
build_runtime_path() {
local workspace_dir="$1"
local package_dir="$2"
local entries=()
if [[ -d "${package_dir}/node_modules/.bin" ]]; then
entries+=("${package_dir}/node_modules/.bin")
fi
if [[ -d "${workspace_dir}/node_modules/.bin" && "${workspace_dir}/node_modules/.bin" != "${package_dir}/node_modules/.bin" ]]; then
entries+=("${workspace_dir}/node_modules/.bin")
fi
if [[ -n "${PATH:-}" ]]; then
entries+=("${PATH}")
fi
if [[ ${#entries[@]} -eq 0 ]]; then
echo ""
return 0
fi
local path_value=""
local entry=""
for entry in "${entries[@]}"; do
if [[ -z "${path_value}" ]]; then
path_value="${entry}"
else
path_value="${path_value}:${entry}"
fi
done
echo "${path_value}"
}
resolve_package_rel_dir() {
if [[ -n "${package_rel_dir_hint}" && "${package_rel_dir_hint}" != "." ]]; then
normalize_rel_dir "${package_rel_dir_hint}"
return 0
fi
if [[ -n "${package_json}" ]]; then
find_package_rel_dir_for_path "${package_json}"
return 0
fi
if [[ -n "${primary_source}" ]]; then
find_package_rel_dir_for_path "${primary_source}"
return 0
fi
echo "."
}
resolve_execution_rel_dir() {
local package_rel_dir="$1"
case "${working_dir_mode}" in
workspace)
echo "."
;;
package)
echo "${package_rel_dir}"
;;
entry_point)
if [[ -n "${primary_source}" ]]; then
find_working_rel_dir_for_path "${primary_source}"
else
echo "${package_rel_dir}"
fi
;;
*)
echo "${package_rel_dir}"
;;
esac
}
package_rel_dir="$(resolve_package_rel_dir)"
execution_rel_dir="$(resolve_execution_rel_dir "${package_rel_dir}")"
runtime_workspace="$(mktemp -d)"
cleanup_runtime_workspace() {
rm -rf "${runtime_workspace}"
}
stage_workspace_view "${workspace_root}" "${runtime_workspace}" "${package_rel_dir}"
runtime_package_dir="${runtime_workspace}"
if [[ "${package_rel_dir}" != "." ]]; then
runtime_package_dir="${runtime_workspace}/${package_rel_dir}"
fi
runtime_exec_dir="${runtime_workspace}"
if [[ "${execution_rel_dir}" != "." ]]; then
runtime_exec_dir="${runtime_workspace}/${execution_rel_dir}"
fi
workspace_package_map="${runtime_workspace}/.rules_bun_workspace_packages.tsv"
build_workspace_package_map "${runtime_workspace}" "${workspace_package_map}"
primary_node_modules="$(select_primary_node_modules)"
install_repo_root=""
if [[ -n "${primary_node_modules}" ]]; then
install_repo_root="$(dirname "${primary_node_modules}")"
mirror_node_modules_dir "${primary_node_modules}" "${runtime_workspace}/node_modules"
fi
if [[ -n "${install_repo_root}" ]]; then
resolved_install_node_modules="$(find_install_repo_node_modules "${install_repo_root}" "${package_rel_dir}" || true)"
if [[ -n "${resolved_install_node_modules}" && "${resolved_install_node_modules}" != "${install_repo_root}/node_modules" ]]; then
mirror_node_modules_dir "${resolved_install_node_modules}" "${runtime_package_dir}/node_modules"
fi
mirror_install_repo_workspace_node_modules "${install_repo_root}" "${runtime_workspace}"
fi
if [[ ! -e "${runtime_package_dir}/node_modules" && -e "${runtime_workspace}/node_modules" && "${runtime_package_dir}" != "${runtime_workspace}" ]]; then
ln -s "${runtime_workspace}/node_modules" "${runtime_package_dir}/node_modules"
fi
runtime_path="$(build_runtime_path "${runtime_workspace}" "${runtime_package_dir}")"
if [[ -n "${runtime_path}" ]]; then
export PATH="${runtime_path}"
fi
"""
def _shell_quote(value):
return "'" + value.replace("'", "'\"'\"'") + "'"
def _dirname(path):
if not path or path == ".":
return "."
index = path.rfind("/")
if index < 0:
return "."
if index == 0:
return "/"
return path[:index]
def find_install_metadata_file(files):
for file in files:
if file.short_path.endswith("node_modules/.rules_bun/install.json"):
return file
return None
def resolve_node_modules_roots(files, workspace_dir = ""):
install_metadata_file = find_install_metadata_file(files)
shared_node_modules_root = None
workspace_node_modules_root = None
if install_metadata_file:
shared_node_modules_root = _dirname(_dirname(install_metadata_file.path))
workspace_marker = ""
if workspace_dir:
workspace_marker = "/%s/node_modules/" % workspace_dir.strip("/")
shortest_path = None
for src in files:
if workspace_marker and workspace_marker in src.path and workspace_node_modules_root == None:
workspace_node_modules_root = src.path[:src.path.find(workspace_marker) + len(workspace_marker) - 1]
if shortest_path == None or len(src.path) < len(shortest_path):
shortest_path = src.path
if shared_node_modules_root == None and shortest_path:
marker = "/node_modules/"
marker_index = shortest_path.find(marker)
if marker_index >= 0:
shared_node_modules_root = shortest_path[:marker_index + len("/node_modules")]
return struct(
install_metadata_file = install_metadata_file,
node_modules_root = workspace_node_modules_root or shared_node_modules_root,
shared_node_modules_root = shared_node_modules_root,
)
def create_bun_workspace_info(ctx, primary_file = None, package_json = None, package_dir_hint = ".", extra_files = None):
direct_runtime_files = []
if primary_file:
direct_runtime_files.append(primary_file)
if package_json and package_json != primary_file:
direct_runtime_files.append(package_json)
direct_runtime_files.extend(extra_files or [])
node_modules_files = depset()
install_metadata_file = None
if getattr(ctx.attr, "node_modules", None):
node_modules_files = ctx.attr.node_modules[DefaultInfo].files
install_metadata_file = find_install_metadata_file(node_modules_files.to_list())
metadata_file = ctx.actions.declare_file(ctx.label.name + ".bun_workspace.json")
ctx.actions.write(
output = metadata_file,
content = json.encode({
"install_metadata": install_metadata_file.short_path if install_metadata_file else "",
"package_dir_hint": package_dir_hint or ".",
"package_json": package_json.short_path if package_json else "",
"primary_file": primary_file.short_path if primary_file else "",
}) + "\n",
)
direct_runtime_files.append(metadata_file)
runtime_files = depset(
direct = direct_runtime_files,
transitive = [node_modules_files],
)
return BunWorkspaceInfo(
install_metadata_file = install_metadata_file,
metadata_file = metadata_file,
node_modules_files = node_modules_files,
package_dir_hint = package_dir_hint or ".",
package_json = package_json,
primary_file = primary_file,
runtime_files = runtime_files,
)
def workspace_runfiles(ctx, workspace_info, direct_files = None, transitive_files = None):
return ctx.runfiles(
files = direct_files or [],
transitive_files = depset(
transitive = [workspace_info.runtime_files] + (transitive_files or []),
),
)
def render_workspace_setup(
bun_short_path,
working_dir_mode,
primary_source_short_path = "",
package_json_short_path = "",
package_dir_hint = "."):
return _WORKSPACE_SETUP_TEMPLATE.replace("__BUN_SHORT_PATH__", bun_short_path).replace(
"__PRIMARY_SOURCE_SHORT_PATH__",
primary_source_short_path,
).replace(
"__PACKAGE_JSON_SHORT_PATH__",
package_json_short_path,
).replace(
"__PACKAGE_DIR_HINT__",
package_dir_hint or ".",
).replace(
"__WORKING_DIR_MODE__",
working_dir_mode,
)

12
js/BUILD.bazel Normal file
View File

@@ -0,0 +1,12 @@
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
package(default_visibility = ["//visibility:public"])
exports_files(["defs.bzl"])
bzl_library(
name = "defs_bzl",
srcs = ["defs.bzl"],
visibility = ["//visibility:public"],
deps = ["//internal:js_compat_bzl"],
)

12
js/defs.bzl Normal file
View File

@@ -0,0 +1,12 @@
"""rules_js-style public API backed by Bun."""
load("//internal:js_compat.bzl", _JsInfo = "JsInfo", _js_binary = "js_binary", _js_library = "js_library", _js_run_devserver = "js_run_devserver", _js_test = "js_test", _ts_library = "ts_library")
visibility("public")
JsInfo = _JsInfo
js_binary = _js_binary
js_test = _js_test
js_run_devserver = _js_run_devserver
js_library = _js_library
ts_library = _ts_library

22
npm/BUILD.bazel Normal file
View File

@@ -0,0 +1,22 @@
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
package(default_visibility = ["//visibility:public"])
exports_files([
"extensions.bzl",
"repositories.bzl",
])
bzl_library(
name = "extensions_bzl",
srcs = ["extensions.bzl"],
visibility = ["//visibility:public"],
deps = ["//internal:bun_install_bzl"],
)
bzl_library(
name = "repositories_bzl",
srcs = ["repositories.bzl"],
visibility = ["//visibility:public"],
deps = ["//internal:bun_install_bzl"],
)

28
npm/extensions.bzl Normal file
View File

@@ -0,0 +1,28 @@
load("//internal:bun_install.bzl", "bun_install_repository")
_translate = tag_class(
attrs = {
"name": attr.string(mandatory = True),
"package_json": attr.label(mandatory = True),
"lockfile": attr.label(mandatory = True),
"install_inputs": attr.label_list(allow_files = True),
"isolated_home": attr.bool(default = True),
},
)
def _npm_translate_lock_impl(ctx):
for mod in ctx.modules:
for install in mod.tags.translate:
bun_install_repository(
name = install.name,
package_json = install.package_json,
bun_lockfile = install.lockfile,
install_inputs = install.install_inputs,
isolated_home = install.isolated_home,
visible_repo_name = install.name,
)
npm_translate_lock = module_extension(
implementation = _npm_translate_lock_impl,
tag_classes = {"translate": _translate},
)

11
npm/repositories.bzl Normal file
View File

@@ -0,0 +1,11 @@
load("//internal:bun_install.bzl", "bun_install_repository")
def npm_translate_lock(name, package_json, lockfile, install_inputs = [], isolated_home = True):
bun_install_repository(
name = name,
package_json = package_json,
bun_lockfile = lockfile,
install_inputs = install_inputs,
isolated_home = isolated_home,
visible_repo_name = name,
)

View File

@@ -6,3 +6,10 @@ sh_test(
args = ["$(location //bun:extensions.bzl)"],
data = ["//bun:extensions.bzl"],
)
sh_test(
name = "npm_translate_lock_extension_shape_test",
srcs = ["npm_extension_shape_test.sh"],
args = ["$(location //npm:extensions.bzl)"],
data = ["//npm:extensions.bzl"],
)

View File

@@ -0,0 +1,10 @@
#!/usr/bin/env bash
set -euo pipefail
extension_file="$1"
grep -Eq 'npm_translate_lock[[:space:]]*=[[:space:]]*module_extension\(' "${extension_file}"
grep -Eq 'tag_classes[[:space:]]*=[[:space:]]*\{"translate":[[:space:]]*_translate\}' "${extension_file}"
grep -Eq '"name":[[:space:]]*attr\.string\(mandatory[[:space:]]*=[[:space:]]*True\)' "${extension_file}"
grep -Eq '"package_json":[[:space:]]*attr\.label\(mandatory[[:space:]]*=[[:space:]]*True\)' "${extension_file}"
grep -Eq '"lockfile":[[:space:]]*attr\.label\(mandatory[[:space:]]*=[[:space:]]*True\)' "${extension_file}"

View File

@@ -0,0 +1,41 @@
load("//js:defs.bzl", "js_binary", "js_run_devserver", "js_test", "ts_library")
load("@rules_shell//shell:sh_test.bzl", "sh_test")
ts_library(
name = "helper_lib",
srcs = ["helper.ts"],
data = ["payload.txt"],
)
js_binary(
name = "compat_bin",
entry_point = "main.ts",
deps = [":helper_lib"],
args = ["compat-mode"],
)
sh_test(
name = "js_binary_compat_test",
srcs = ["run_binary.sh"],
args = ["$(location :compat_bin)"],
data = [":compat_bin"],
)
js_test(
name = "compat_suite",
entry_point = "app.test.ts",
deps = [":helper_lib"],
)
js_run_devserver(
name = "compat_devserver",
tool = ":compat_bin",
args = ["devserver-mode"],
)
sh_test(
name = "js_run_devserver_compat_test",
srcs = ["run_devserver.sh"],
args = ["$(location :compat_devserver)"],
data = [":compat_devserver"],
)

View File

@@ -0,0 +1,7 @@
import { expect, test } from "bun:test";
import { helperMessage } from "./helper.ts";
test("js_test compatibility layer propagates deps and data", () => {
expect(helperMessage()).toBe("helper:payload-from-lib");
});

View File

@@ -0,0 +1,6 @@
import { readFileSync } from "node:fs";
export function helperMessage(): string {
const payload = readFileSync(new URL("./payload.txt", import.meta.url), "utf8").trim();
return `helper:${payload}`;
}

View File

@@ -0,0 +1,3 @@
import { helperMessage } from "./helper.ts";
console.log(`${helperMessage()} ${Bun.argv.slice(2).join(" ")}`.trim());

View File

@@ -0,0 +1 @@
payload-from-lib

View File

@@ -0,0 +1,10 @@
#!/usr/bin/env bash
set -euo pipefail
binary="$1"
output="$("${binary}")"
if [[ ${output} != "helper:payload-from-lib compat-mode" ]]; then
echo "unexpected output: ${output}" >&2
exit 1
fi

View File

@@ -0,0 +1,10 @@
#!/usr/bin/env bash
set -euo pipefail
binary="$1"
output="$("${binary}")"
if [[ ${output} != "helper:payload-from-lib compat-mode devserver-mode" ]]; then
echo "unexpected output: ${output}" >&2
exit 1
fi

View File

@@ -0,0 +1,119 @@
#!/usr/bin/env bash
set -euo pipefail
nix_cmd="${NIX:-/nix/var/nix/profiles/default/bin/nix}"
if [[ ! -x ${nix_cmd} ]]; then
nix_cmd="$(command -v nix || true)"
fi
if [[ -z ${nix_cmd} || ! -x ${nix_cmd} ]]; then
echo "nix is required to launch bazel from the repo dev shell" >&2
exit 1
fi
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)"
rules_bun_root="$(cd "${script_dir}/../.." && pwd -P)"
workdir="$(mktemp -d)"
trap 'rm -rf "${workdir}"' EXIT
fixture_dir="${workdir}/fixture"
mkdir -p "${fixture_dir}"
cat >"${fixture_dir}/package.json" <<'JSON'
{
"name": "npm-compat-test",
"type": "module",
"dependencies": {
"is-number": "7.0.0"
}
}
JSON
cat >"${fixture_dir}/main.js" <<'JS'
import isNumber from "is-number";
console.log(`compat:${isNumber(42)}`);
JS
(
cd "${rules_bun_root}" &&
"${nix_cmd}" develop -c bash -lc 'bun install --cwd "$1" >/dev/null' bash "${fixture_dir}"
)
rm -rf "${fixture_dir}/node_modules"
cat >"${fixture_dir}/MODULE.bazel" <<EOF
module(
name = "npm_compat_test",
)
bazel_dep(name = "rules_bun", version = "0.2.2")
local_path_override(
module_name = "rules_bun",
path = "${rules_bun_root}",
)
bun_ext = use_extension("@rules_bun//bun:extensions.bzl", "bun")
use_repo(
bun_ext,
"bun_darwin_aarch64",
"bun_darwin_x64",
"bun_linux_aarch64",
"bun_linux_x64",
"bun_windows_x64",
)
npm_ext = use_extension("@rules_bun//npm:extensions.bzl", "npm_translate_lock")
npm_ext.translate(
name = "npm",
package_json = "//:package.json",
lockfile = "//:bun.lock",
)
use_repo(npm_ext, "npm")
register_toolchains(
"@rules_bun//bun:darwin_aarch64_toolchain",
"@rules_bun//bun:darwin_x64_toolchain",
"@rules_bun//bun:linux_aarch64_toolchain",
"@rules_bun//bun:linux_x64_toolchain",
"@rules_bun//bun:windows_x64_toolchain",
)
EOF
cat >"${fixture_dir}/BUILD.bazel" <<'EOF'
load("@npm//:defs.bzl", "npm_link_all_packages")
load("@rules_bun//js:defs.bzl", "js_binary")
exports_files([
"bun.lock",
"main.js",
"package.json",
])
npm_link_all_packages()
js_binary(
name = "app",
entry_point = "main.js",
node_modules = ":node_modules",
)
EOF
output="$(
cd "${rules_bun_root}" &&
"${nix_cmd}" develop -c bash -lc 'cd "$1" && bazel run //:app' bash "${fixture_dir}"
)"
if [[ ${output} != *"compat:true"* ]]; then
echo "unexpected output: ${output}" >&2
exit 1
fi
query_output="$(
cd "${rules_bun_root}" &&
"${nix_cmd}" develop -c bash -lc 'cd "$1" && bazel query //:npm__is_number' bash "${fixture_dir}"
)"
if ! grep -Fxq "//:npm__is_number" <<<"${query_output}"; then
echo "expected npm_link_all_packages to create //:npm__is_number" >&2
exit 1
fi