22 Commits

Author SHA1 Message Date
eric
ef010b3c12 chore(release): v1.0.3
Some checks failed
CI / test (ubuntu-latest, linux-x64) (push) Failing after 1m14s
Docs Pages / deploy (push) Failing after 42s
Copilot Setup Steps / copilot-setup-steps (push) Failing after 39s
CI / test (macos-14, darwin-arm64) (push) Has been cancelled
CI / test (windows-latest, windows) (push) Has been cancelled
2026-03-15 13:58:34 +01:00
eric
88336c869b fix: tests 2026-03-15 13:58:23 +01:00
eric
f975f12553 ci: fix dependencies 2026-03-15 13:54:18 +01:00
eric
54109136ab chore(release): v1.0.2
Some checks failed
CI / test (ubuntu-latest, linux-x64) (push) Failing after 32s
Docs Pages / deploy (push) Failing after 36s
Copilot Setup Steps / copilot-setup-steps (push) Failing after 36s
CI / test (macos-14, darwin-arm64) (push) Has been cancelled
CI / test (windows-latest, windows) (push) Has been cancelled
2026-03-15 13:08:19 +01:00
eric
3e5ed611fe ci: something 2026-03-15 13:08:07 +01:00
eric
3085d3ce98 ci: something 2026-03-15 13:01:47 +01:00
eric
143db9c20e ci: remove redundant go dep 2026-03-15 12:45:59 +01:00
eric
4f9dff66c1 ci: remove redundant go dep 2026-03-15 12:17:25 +01:00
eric
fbe1eb3fc8 ci: pin bazel version 2026-03-15 12:15:40 +01:00
eric
08f2abc60e ci: print logs instead of paths 2026-03-15 12:06:28 +01:00
eric
65404a1883 chore(release): v1.0.1
Some checks failed
CI / test (ubuntu-latest, linux-x64) (push) Failing after 59s
Docs Pages / deploy (push) Failing after 34s
Copilot Setup Steps / copilot-setup-steps (push) Failing after 37s
CI / test (macos-14, darwin-arm64) (push) Has been cancelled
CI / test (windows-latest, windows) (push) Has been cancelled
2026-03-15 11:57:45 +01:00
eric
f317a618b8 fix: context leak 2026-03-15 11:50:25 +01:00
eric
683de60603 fix: windows targets 2026-03-15 11:24:36 +01:00
eric
a58028d063 chore(release): v1.0.0
Some checks failed
CI / test (ubuntu-latest, linux-x64) (push) Failing after 37s
Docs Pages / deploy (push) Failing after 39s
Copilot Setup Steps / copilot-setup-steps (push) Failing after 40s
CI / test (macos-14, darwin-arm64) (push) Has been cancelled
CI / test (windows-latest, windows) (push) Has been cancelled
2026-03-15 11:04:58 +01:00
eric
626a6640f8 feat: proper windows support 2026-03-15 11:04:44 +01:00
eric
4f8e27cd74 chore(release): v0.5.0
Some checks failed
CI / test (ubuntu-latest, linux-x64) (push) Failing after 34s
Docs Pages / deploy (push) Failing after 44s
Copilot Setup Steps / copilot-setup-steps (push) Failing after 25s
CI / test (macos-14, darwin-arm64) (push) Has been cancelled
CI / test (windows-latest, windows) (push) Has been cancelled
2026-03-15 09:34:22 +01:00
eric
31c42a8638 fix: tests 2026-03-15 09:34:07 +01:00
eric
769b95d05b chore(release): v0.4.0
Some checks failed
CI / test (ubuntu-latest, linux-x64) (push) Failing after 31s
Copilot Setup Steps / copilot-setup-steps (push) Failing after 36s
CI / test (macos-14, darwin-arm64) (push) Has been cancelled
CI / test (windows-latest, windows) (push) Has been cancelled
2026-03-15 01:20:57 +01:00
eric
2a25cfb91a fix: include .env files 2026-03-15 01:20:45 +01:00
eric
2a9bd09aa4 fix: include .env files 2026-03-15 01:13:52 +01:00
eric
4b7ebb1536 fix: include .env files 2026-03-15 01:07:36 +01:00
73d4625420 Merge pull request 'feature/add-build-compile-rules' (#4) from feature/add-build-compile-rules into main
Some checks failed
CI / test (ubuntu-latest, linux-x64) (push) Failing after 37s
Docs Pages / deploy (push) Failing after 43s
CI / test (macos-14, darwin-arm64) (push) Has been cancelled
CI / test (windows-latest, windows) (push) Has been cancelled
Reviewed-on: #4
2026-03-15 00:01:19 +00:00
94 changed files with 4241 additions and 1791 deletions

1
.bazelversion Normal file
View File

@@ -0,0 +1 @@
9.0.1

View File

@@ -25,17 +25,58 @@ jobs:
phase8_target: windows
runs-on: ${{ matrix.os }}
env:
USE_BAZEL_VERSION: 9.0.0
USE_BAZEL_VERSION: 9.0.1
steps:
- uses: actions/checkout@v4
- uses: bazel-contrib/setup-bazel@0.15.0
- uses: bazel-contrib/setup-bazel@0.18.0
with:
bazelisk-cache: true
repository-cache: true
external-cache: true
disk-cache: ci-${{ matrix.phase8_target }}
cache-save: ${{ github.event_name != 'pull_request' }}
- name: Install Nix
if: runner.os != 'Windows'
uses: cachix/install-nix-action@v31
with:
extra_nix_config: |
experimental-features = nix-command flakes
- name: Restore and save Nix store cache
if: runner.os != 'Windows'
uses: nix-community/cache-nix-action@v7
with:
primary-key: nix-${{ runner.os }}-${{ hashFiles('**/*.nix', '**/flake.lock') }}
restore-prefixes-first-match: nix-${{ runner.os }}-
- name: Install flake dependencies
if: runner.os != 'Windows'
run: nix develop --accept-flake-config -c true
- name: Set up Python
if: runner.os == 'Windows'
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Provide python3 shim
if: runner.os == 'Windows'
shell: bash
run: |
mkdir -p "$RUNNER_TEMP/bin"
cat >"$RUNNER_TEMP/bin/python3" <<'EOF'
#!/usr/bin/env bash
exec python "$@"
EOF
chmod +x "$RUNNER_TEMP/bin/python3"
echo "$RUNNER_TEMP/bin" >> "$GITHUB_PATH"
- name: Run tests (${{ matrix.phase8_target }})
if: runner.os != 'Windows'
shell: bash
run: |
echo "Phase 8 target: ${{ matrix.phase8_target }}"
bazel test //tests/...
mapfile -t targets < <(./tests/ci_test/phase8_ci_targets.sh "${{ matrix.phase8_target }}")
nix develop --accept-flake-config -c bazel test --test_output=errors "${targets[@]}"
- name: Run tests (${{ matrix.phase8_target }})
if: runner.os == 'Windows'
shell: bash
run: |
echo "Phase 8 target: ${{ matrix.phase8_target }}"
mapfile -t targets < <(./tests/ci_test/phase8_ci_targets.sh "${{ matrix.phase8_target }}")
bazel test --test_output=errors "${targets[@]}"

View File

@@ -23,14 +23,14 @@ jobs:
deploy:
runs-on: ubuntu-latest
env:
USE_BAZEL_VERSION: 9.0.0
USE_BAZEL_VERSION: 9.0.1
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
steps:
- uses: actions/checkout@v4
- uses: bazel-contrib/setup-bazel@0.15.0
- uses: bazel-contrib/setup-bazel@0.18.0
with:
bazelisk-cache: true
repository-cache: true

2
.gitignore vendored
View File

@@ -22,4 +22,6 @@ node_modules/
.env
!tests/.env
!tests/**/.env
!examples/.env
!examples/**/.env

3
.gitleaksignore Normal file
View File

@@ -0,0 +1,3 @@
tests/binary_test/.env:environment-file:1
tests/script_test/.env:environment-file:1
tests/binary_test/env_parent/.env:environment-file:1

View File

@@ -1,6 +1,6 @@
module(
name = "rules_bun",
version = "0.2.2",
version = "1.0.3",
)
# Core ruleset dependencies.

27
MODULE.bazel.lock generated
View File

@@ -34,8 +34,9 @@
"https://bcr.bazel.build/modules/bazel_features/1.3.0/MODULE.bazel": "cdcafe83ec318cda34e02948e81d790aab8df7a929cec6f6969f13a489ccecd9",
"https://bcr.bazel.build/modules/bazel_features/1.30.0/MODULE.bazel": "a14b62d05969a293b80257e72e597c2da7f717e1e69fa8b339703ed6731bec87",
"https://bcr.bazel.build/modules/bazel_features/1.33.0/MODULE.bazel": "8b8dc9d2a4c88609409c3191165bccec0e4cb044cd7a72ccbe826583303459f6",
"https://bcr.bazel.build/modules/bazel_features/1.33.0/source.json": "13617db3930328c2cd2807a0f13d52ca870ac05f96db9668655113265147b2a6",
"https://bcr.bazel.build/modules/bazel_features/1.4.1/MODULE.bazel": "e45b6bb2350aff3e442ae1111c555e27eac1d915e77775f6fdc4b351b758b5d7",
"https://bcr.bazel.build/modules/bazel_features/1.42.1/MODULE.bazel": "275a59b5406ff18c01739860aa70ad7ccb3cfb474579411decca11c93b951080",
"https://bcr.bazel.build/modules/bazel_features/1.42.1/source.json": "fcd4396b2df85f64f2b3bb436ad870793ecf39180f1d796f913cc9276d355309",
"https://bcr.bazel.build/modules/bazel_features/1.9.1/MODULE.bazel": "8f679097876a9b609ad1f60249c49d68bfab783dd9be012faf9d82547b14815a",
"https://bcr.bazel.build/modules/bazel_skylib/1.0.3/MODULE.bazel": "bcb0fd896384802d1ad283b4e4eb4d718eebd8cb820b0a2c3a347fb971afd9d8",
"https://bcr.bazel.build/modules/bazel_skylib/1.1.1/MODULE.bazel": "1add3e7d93ff2e6998f9e118022c84d163917d912f5afafb3058e3d2f1545b5e",
@@ -51,8 +52,8 @@
"https://bcr.bazel.build/modules/bazel_skylib/1.8.1/MODULE.bazel": "88ade7293becda963e0e3ea33e7d54d3425127e0a326e0d17da085a5f1f03ff6",
"https://bcr.bazel.build/modules/bazel_skylib/1.8.2/MODULE.bazel": "69ad6927098316848b34a9142bcc975e018ba27f08c4ff403f50c1b6e646ca67",
"https://bcr.bazel.build/modules/bazel_skylib/1.8.2/source.json": "34a3c8bcf233b835eb74be9d628899bb32999d3e0eadef1947a0a562a2b16ffb",
"https://bcr.bazel.build/modules/buildozer/8.2.1/MODULE.bazel": "61e9433c574c2bd9519cad7fa66b9c1d2b8e8d5f3ae5d6528a2c2d26e68d874d",
"https://bcr.bazel.build/modules/buildozer/8.2.1/source.json": "7c33f6a26ee0216f85544b4bca5e9044579e0219b6898dd653f5fb449cf2e484",
"https://bcr.bazel.build/modules/buildozer/8.5.1/MODULE.bazel": "a35d9561b3fc5b18797c330793e99e3b834a473d5fbd3d7d7634aafc9bdb6f8f",
"https://bcr.bazel.build/modules/buildozer/8.5.1/source.json": "e3386e6ff4529f2442800dee47ad28d3e6487f36a1f75ae39ae56c70f0cd2fbd",
"https://bcr.bazel.build/modules/google_benchmark/1.8.2/MODULE.bazel": "a70cf1bba851000ba93b58ae2f6d76490a9feb74192e57ab8e8ff13c34ec50cb",
"https://bcr.bazel.build/modules/googletest/1.11.0/MODULE.bazel": "3a83f095183f66345ca86aa13c58b59f9f94a2f81999c093d4eeaa2d262d12f4",
"https://bcr.bazel.build/modules/googletest/1.14.0.bcr.1/MODULE.bazel": "22c31a561553727960057361aa33bf20fb2e98584bc4fec007906e27053f80c6",
@@ -112,8 +113,8 @@
"https://bcr.bazel.build/modules/rules_cc/0.1.5/MODULE.bazel": "88dfc9361e8b5ae1008ac38f7cdfd45ad738e4fa676a3ad67d19204f045a1fd8",
"https://bcr.bazel.build/modules/rules_cc/0.2.0/MODULE.bazel": "b5c17f90458caae90d2ccd114c81970062946f49f355610ed89bebf954f5783c",
"https://bcr.bazel.build/modules/rules_cc/0.2.13/MODULE.bazel": "eecdd666eda6be16a8d9dc15e44b5c75133405e820f620a234acc4b1fdc5aa37",
"https://bcr.bazel.build/modules/rules_cc/0.2.14/MODULE.bazel": "353c99ed148887ee89c54a17d4100ae7e7e436593d104b668476019023b58df8",
"https://bcr.bazel.build/modules/rules_cc/0.2.14/source.json": "55d0a4587c5592fad350f6e698530f4faf0e7dd15e69d43f8d87e220c78bea54",
"https://bcr.bazel.build/modules/rules_cc/0.2.17/MODULE.bazel": "1849602c86cb60da8613d2de887f9566a6d354a6df6d7009f9d04a14402f9a84",
"https://bcr.bazel.build/modules/rules_cc/0.2.17/source.json": "3832f45d145354049137c0090df04629d9c2b5493dc5c2bf46f1834040133a07",
"https://bcr.bazel.build/modules/rules_cc/0.2.8/MODULE.bazel": "f1df20f0bf22c28192a794f29b501ee2018fa37a3862a1a2132ae2940a23a642",
"https://bcr.bazel.build/modules/rules_foreign_cc/0.9.0/MODULE.bazel": "c9e8c682bf75b0e7c704166d79b599f93b72cfca5ad7477df596947891feeef6",
"https://bcr.bazel.build/modules/rules_fuzzing/0.5.2/MODULE.bazel": "40c97d1144356f52905566c55811f13b299453a14ac7769dfba2ac38192337a8",
@@ -192,8 +193,8 @@
"moduleExtensions": {
"//bun:extensions.bzl%bun": {
"general": {
"bzlTransitiveDigest": "lzOUyaXDbkH922ruNkkwEF2cnI4m0XpzrOti0qypwtA=",
"usagesDigest": "/0BcCMA6AOzLhQaRK6DquxrCfpPHJUjSUaFz4zmQrsM=",
"bzlTransitiveDigest": "314UOH4dQIGBHGpxCwA7yzI++E2J3bjIc20m5MZhM7U=",
"usagesDigest": "prFfUGEBfgU9euYPzwB9Wux0cLYR+2MKCz+nUIyHYkI=",
"recordedInputs": [
"REPO_MAPPING:,bazel_tools bazel_tools"
],
@@ -283,8 +284,8 @@
},
"//bun:extensions.bzl%bun_install": {
"general": {
"bzlTransitiveDigest": "lzOUyaXDbkH922ruNkkwEF2cnI4m0XpzrOti0qypwtA=",
"usagesDigest": "f9pNm3AOxJDZmpHhL2vrrCo23IW33im/l/VYCTW2BWM=",
"bzlTransitiveDigest": "314UOH4dQIGBHGpxCwA7yzI++E2J3bjIc20m5MZhM7U=",
"usagesDigest": "rhYKu+vVZOHFMZV+ai/X42v6prfhsLITQYyLZaCnX+g=",
"recordedInputs": [
"REPO_MAPPING:,bazel_tools bazel_tools"
],
@@ -300,7 +301,7 @@
"omit": [],
"linker": "",
"backend": "",
"ignore_scripts": false,
"ignore_scripts": true,
"install_flags": [],
"visible_repo_name": "script_test_vite_node_modules"
}
@@ -316,7 +317,7 @@
"omit": [],
"linker": "",
"backend": "",
"ignore_scripts": false,
"ignore_scripts": true,
"install_flags": [],
"visible_repo_name": "script_test_vite_monorepo_node_modules"
}
@@ -332,7 +333,7 @@
"omit": [],
"linker": "",
"backend": "",
"ignore_scripts": false,
"ignore_scripts": true,
"install_flags": [],
"visible_repo_name": "script_test_paraglide_monorepo_node_modules"
}
@@ -348,7 +349,7 @@
"omit": [],
"linker": "",
"backend": "",
"ignore_scripts": false,
"ignore_scripts": true,
"install_flags": [],
"visible_repo_name": "examples_vite_monorepo_node_modules"
}

View File

@@ -24,6 +24,10 @@ This repository follows the standard Bazel ruleset layout:
The public entrypoint for rule authors and users is `@rules_bun//bun:defs.bzl`.
Runtime launcher targets from `bun_binary`, `bun_script`, `bun_test`,
`bun_dev`, and `js_run_devserver` use native platform wrappers. Windows runtime
support is native and does not require Git Bash or MSYS.
## Public API
`rules_bun` exports these primary rules:
@@ -48,6 +52,22 @@ Reference documentation:
- `bun_install` extension docs: [docs/bun_install.md](docs/bun_install.md)
- Docs index: [docs/index.md](docs/index.md)
## Hermeticity
`rules_bun` now draws a sharp line between hermetic rule surfaces and local
workflow helpers.
- Hermetic build/test surfaces: `bun_build`, `bun_bundle`, `bun_compile`, `bun_test`
- Runfiles-only executable surface: `bun_binary`
- Reproducible but non-hermetic repository fetch surface: `bun_install`
- Local workflow helpers: `bun_script`, `bun_dev`, `js_run_devserver`
Strict defaults are enabled by default:
- `bun_install` skips lifecycle scripts unless `ignore_scripts = False`
- `bun_build`, `bun_bundle`, `bun_compile`, and `bun_test` require `install_mode = "disable"`
- Runtime launchers stage hermetic `bun`, `bunx`, and `node` commands on `PATH` and do not inherit the host `PATH` unless `inherit_host_path = True`
To refresh generated rule docs:
```bash
@@ -60,7 +80,7 @@ Release announcements should provide a copy-pasteable module snippet in the
standard ruleset form:
```starlark
bazel_dep(name = "rules_bun", version = "0.2.2")
bazel_dep(name = "rules_bun", version = "1.0.3")
```
Then add the Bun repositories and register the toolchains in `MODULE.bazel`:
@@ -104,9 +124,10 @@ bun_install_ext.install(
name = "bun_deps",
package_json = "//:package.json",
bun_lockfile = "//:bun.lock",
# Optional: include extra install-time files or allow Bun to reuse the
# host HOME/cache.
# Optional: include extra install-time files.
# install_inputs = ["//:.npmrc"],
# Optional non-hermetic opt-in:
# ignore_scripts = False,
# isolated_home = False,
)
@@ -197,9 +218,11 @@ bun_script(
)
```
When `node_modules` is provided, executables from `node_modules/.bin` are added
to `PATH`. This label typically comes from `bun_install`, which still produces a
standard `node_modules/` directory.
Launcher-based runtime rules stage hermetic `bun`, `bunx`, and `node`
commands on `PATH`. When `node_modules` is provided, executables from
`node_modules/.bin` are also added to the runtime `PATH`. The host `PATH` is
not inherited unless `inherit_host_path = True`. This label typically comes
from `bun_install`, which still produces a standard `node_modules/` directory.
### `bun_build` and `bun_compile`
@@ -225,7 +248,9 @@ bun_compile(
`bun_build` exposes a directory output so Bun can emit HTML, CSS, assets, and
split chunks. `bun_compile` produces a single executable artifact and supports
explicit cross-compilation via `compile_executable`.
explicit cross-compilation via `compile_executable`. When `root` is omitted,
`bun_build` derives a stable default from the entry point parent directory so
HTML and asset output stays inside Bazel's declared output tree.
### `bun_dev` for local development

View File

@@ -1,3 +1,3 @@
0.2.2
1.0.3
stable
0

View File

@@ -81,7 +81,7 @@ _install = tag_class(
"omit": attr.string_list(),
"linker": attr.string(),
"backend": attr.string(),
"ignore_scripts": attr.bool(default = False),
"ignore_scripts": attr.bool(default = True),
"install_flags": attr.string_list(),
},
)

View File

@@ -20,6 +20,21 @@ Unlike the build rules in [rules.md](rules.md), `bun_install` is not loaded from
The generated repository can then be passed to rules such as `bun_script`,
`bun_binary`, `bun_bundle`, and `bun_test`.
## Hermeticity
`bun_install` is a repository convenience rule, not a hermetic build action.
It is intended to be reproducible from a checked-in lockfile and pinned Bun
toolchain, but it still performs dependency fetching as part of repository
materialization.
Strict defaults now favor reproducibility:
- `isolated_home = True`
- `ignore_scripts = True`
Set `ignore_scripts = False` only when you explicitly want lifecycle scripts to
run during repository creation.
## Usage
```starlark
@@ -150,6 +165,9 @@ Examples include `hardlink`, `symlink`, and `copyfile`.
Optional boolean controlling whether Bun skips lifecycle scripts in the project
manifest.
- `True` (default): skips lifecycle scripts for stricter, more reproducible installs
- `False`: allows lifecycle scripts to run during repository creation
### `install_flags`
Optional list of additional raw flags forwarded to `bun install`.
@@ -164,3 +182,5 @@ Optional list of additional raw flags forwarded to `bun install`.
`package.json`.
- Additional `install_inputs` must be files under the same package root as the
selected `package_json`.
- `bun_install` does not make the install step remotely hermetic; it makes the
generated repository stricter and more reproducible by default.

View File

@@ -17,6 +17,13 @@ Supporting material lives in:
- [docs/rules.md](rules.md) for generated build rule reference
- [docs/bun_install.md](bun_install.md) for `bun_install` extension docs
## Hermeticity
- Hermetic rule surfaces: `bun_build`, `bun_bundle`, `bun_compile`, `bun_test`
- Runfiles-only executable surface: `bun_binary`
- Reproducible but non-hermetic repository surface: `bun_install`
- Local workflow helpers: `bun_script`, `bun_dev`, `js_run_devserver`
## Rule reference
- [rules.md](rules.md)

View File

@@ -2,6 +2,18 @@
Public API surface for Bun Bazel rules.
## Hermeticity And Determinism
- Hermetic rule surfaces: `bun_build`, `bun_bundle`, `bun_compile`, `bun_test`
- Runfiles-only executable surface: `bun_binary`
- Reproducible but non-hermetic repository surface: `bun_install`
- Local workflow helpers: `bun_script`, `bun_dev`, `js_run_devserver`
Strict defaults:
- `bun_build`, `bun_bundle`, `bun_compile`, and `bun_test` require `install_mode = "disable"`
- Runtime launchers stage hermetic `bun`, `bunx`, and `node` commands on `PATH` and do not inherit the host `PATH` unless `inherit_host_path = True`
<a id="bun_binary"></a>
## bun_binary
@@ -28,7 +40,8 @@ Use this rule for non-test scripts and CLIs that should run via `bazel run`.
| <a id="bun_binary-conditions"></a>conditions | Custom package resolve conditions passed to Bun. | List of strings | optional | `[]` |
| <a id="bun_binary-entry_point"></a>entry_point | Path to the main JS/TS file to execute. | <a href="https://bazel.build/concepts/labels">Label</a> | required | |
| <a id="bun_binary-env_files"></a>env_files | Additional environment files loaded with `--env-file`. | <a href="https://bazel.build/concepts/labels">List of labels</a> | optional | `[]` |
| <a id="bun_binary-install_mode"></a>install_mode | Whether Bun may auto-install missing packages at runtime. | String | optional | `"disable"` |
| <a id="bun_binary-install_mode"></a>install_mode | Whether Bun may auto-install missing packages at runtime. Non-`disable` values are runtime opt-ins and are not hermetic. | String | optional | `"disable"` |
| <a id="bun_binary-inherit_host_path"></a>inherit_host_path | If true, appends the host PATH after the staged Bun runtime tool bin and `node_modules/.bin` entries at runtime. | Boolean | optional | `False` |
| <a id="bun_binary-no_env_file"></a>no_env_file | If true, disables Bun's automatic `.env` loading. | Boolean | optional | `False` |
| <a id="bun_binary-node_modules"></a>node_modules | Optional label providing package files from a `node_modules` tree, typically produced by `bun_install`, in runfiles. | <a href="https://bazel.build/concepts/labels">Label</a> | optional | `None` |
| <a id="bun_binary-preload"></a>preload | Modules to preload with `--preload` before running the entry point. | <a href="https://bazel.build/concepts/labels">List of labels</a> | optional | `[]` |
@@ -82,7 +95,7 @@ may be requested with `metafile` and `metafile_md`.
| <a id="bun_build-feature"></a>feature | Repeated `--feature` values for dead-code elimination. | List of strings | optional | `[]` |
| <a id="bun_build-footer"></a>footer | Optional bundle footer text. | String | optional | `""` |
| <a id="bun_build-format"></a>format | Output module format. | String | optional | `"esm"` |
| <a id="bun_build-install_mode"></a>install_mode | Whether Bun may auto-install missing packages while executing the build. | String | optional | `"disable"` |
| <a id="bun_build-install_mode"></a>install_mode | Whether Bun may auto-install missing packages while executing the build. Hermetic builds require `\"disable\"`, and other values are rejected. | String | optional | `"disable"` |
| <a id="bun_build-jsx_factory"></a>jsx_factory | Optional JSX factory override. | String | optional | `""` |
| <a id="bun_build-jsx_fragment"></a>jsx_fragment | Optional JSX fragment override. | String | optional | `""` |
| <a id="bun_build-jsx_import_source"></a>jsx_import_source | Optional JSX import source override. | String | optional | `""` |
@@ -102,7 +115,7 @@ may be requested with `metafile` and `metafile_md`.
| <a id="bun_build-production"></a>production | If true, sets `NODE_ENV=production` and enables Bun production mode. | Boolean | optional | `False` |
| <a id="bun_build-public_path"></a>public_path | Optional public path prefix for emitted imports. | String | optional | `""` |
| <a id="bun_build-react_fast_refresh"></a>react_fast_refresh | If true, enables Bun's React fast refresh transform. | Boolean | optional | `False` |
| <a id="bun_build-root"></a>root | Optional root directory for multiple entry points. | String | optional | `""` |
| <a id="bun_build-root"></a>root | Optional root directory for multiple entry points. When omitted, `bun_build` derives one from the entry point parent directories to keep emitted files inside the declared output tree. | String | optional | `""` |
| <a id="bun_build-sourcemap"></a>sourcemap | Sourcemap emission mode. | String | optional | `"none"` |
| <a id="bun_build-splitting"></a>splitting | If true, enables code splitting. | Boolean | optional | `False` |
| <a id="bun_build-target"></a>target | Bun build target environment. | String | optional | `"browser"` |
@@ -135,7 +148,7 @@ Each entry point produces one output JavaScript artifact.
| <a id="bun_bundle-entry_points"></a>entry_points | Entry files to bundle. | <a href="https://bazel.build/concepts/labels">List of labels</a> | required | |
| <a id="bun_bundle-external"></a>external | Package names to treat as externals (not bundled). | List of strings | optional | `[]` |
| <a id="bun_bundle-format"></a>format | Output module format. | String | optional | `"esm"` |
| <a id="bun_bundle-install_mode"></a>install_mode | Whether Bun may auto-install missing packages during bundling. | String | optional | `"disable"` |
| <a id="bun_bundle-install_mode"></a>install_mode | Whether Bun may auto-install missing packages during bundling. Hermetic bundles require `\"disable\"`, and other values are rejected. | String | optional | `"disable"` |
| <a id="bun_bundle-minify"></a>minify | If true, minifies bundle output. | Boolean | optional | `False` |
| <a id="bun_bundle-node_modules"></a>node_modules | Optional label providing package files from a `node_modules` tree, typically produced by `bun_install`, for package resolution. | <a href="https://bazel.build/concepts/labels">Label</a> | optional | `None` |
| <a id="bun_bundle-sourcemap"></a>sourcemap | If true, emits source maps. | Boolean | optional | `False` |
@@ -194,7 +207,7 @@ Compiles a Bun program into a standalone executable with `bun build --compile`.
| <a id="bun_compile-feature"></a>feature | Repeated `--feature` values for dead-code elimination. | List of strings | optional | `[]` |
| <a id="bun_compile-footer"></a>footer | Optional bundle footer text. | String | optional | `""` |
| <a id="bun_compile-format"></a>format | Output module format. | String | optional | `"esm"` |
| <a id="bun_compile-install_mode"></a>install_mode | Whether Bun may auto-install missing packages while executing the build. | String | optional | `"disable"` |
| <a id="bun_compile-install_mode"></a>install_mode | Whether Bun may auto-install missing packages while executing the build. Hermetic compile actions require `\"disable\"`, and other values are rejected. | String | optional | `"disable"` |
| <a id="bun_compile-jsx_factory"></a>jsx_factory | Optional JSX factory override. | String | optional | `""` |
| <a id="bun_compile-jsx_fragment"></a>jsx_fragment | Optional JSX fragment override. | String | optional | `""` |
| <a id="bun_compile-jsx_import_source"></a>jsx_import_source | Optional JSX import source override. | String | optional | `""` |
@@ -251,7 +264,8 @@ watch/HMR plus optional full restarts on selected file changes.
| <a id="bun_dev-conditions"></a>conditions | Custom package resolve conditions passed to Bun. | List of strings | optional | `[]` |
| <a id="bun_dev-entry_point"></a>entry_point | Path to the main JS/TS file to execute in dev mode. | <a href="https://bazel.build/concepts/labels">Label</a> | required | |
| <a id="bun_dev-env_files"></a>env_files | Additional environment files loaded with `--env-file`. | <a href="https://bazel.build/concepts/labels">List of labels</a> | optional | `[]` |
| <a id="bun_dev-install_mode"></a>install_mode | Whether Bun may auto-install missing packages in dev mode. | String | optional | `"disable"` |
| <a id="bun_dev-install_mode"></a>install_mode | Whether Bun may auto-install missing packages in dev mode. This is a local workflow helper, not a hermetic execution surface. | String | optional | `"disable"` |
| <a id="bun_dev-inherit_host_path"></a>inherit_host_path | If true, appends the host PATH after the staged Bun runtime tool bin and `node_modules/.bin` entries at runtime. | Boolean | optional | `False` |
| <a id="bun_dev-no_clear_screen"></a>no_clear_screen | If true, disables terminal clearing on Bun reloads. | Boolean | optional | `False` |
| <a id="bun_dev-no_env_file"></a>no_env_file | If true, disables Bun's automatic `.env` loading. | Boolean | optional | `False` |
| <a id="bun_dev-node_modules"></a>node_modules | Optional label providing package files from a `node_modules` tree, typically produced by `bun_install`, in runfiles. | <a href="https://bazel.build/concepts/labels">Label</a> | optional | `None` |
@@ -281,7 +295,7 @@ Use this rule to expose existing package scripts such as `dev`, `build`, or
`check` via `bazel run` without adding wrapper shell scripts. This is a good fit
for Vite-style workflows, where scripts like `vite dev` or `vite build` are
declared in `package.json` and expect to run from the package directory with
`node_modules/.bin` available on `PATH`.
the staged Bun runtime tool bin and `node_modules/.bin` available on `PATH`.
**ATTRIBUTES**
@@ -294,10 +308,11 @@ declared in `package.json` and expect to run from the package directory with
| <a id="bun_script-env_files"></a>env_files | Additional environment files loaded with `--env-file`. | <a href="https://bazel.build/concepts/labels">List of labels</a> | optional | `[]` |
| <a id="bun_script-execution_mode"></a>execution_mode | How Bun should execute matching workspace scripts. | String | optional | `"single"` |
| <a id="bun_script-filters"></a>filters | Workspace package filters passed via repeated `--filter` flags. | List of strings | optional | `[]` |
| <a id="bun_script-install_mode"></a>install_mode | Whether Bun may auto-install missing packages while running the script. | String | optional | `"disable"` |
| <a id="bun_script-install_mode"></a>install_mode | Whether Bun may auto-install missing packages while running the script. This is a local workflow helper, not a hermetic execution surface. | String | optional | `"disable"` |
| <a id="bun_script-inherit_host_path"></a>inherit_host_path | If true, appends the host PATH after the staged Bun runtime tool bin and `node_modules/.bin` entries at runtime. | Boolean | optional | `False` |
| <a id="bun_script-no_env_file"></a>no_env_file | If true, disables Bun's automatic `.env` loading. | Boolean | optional | `False` |
| <a id="bun_script-no_exit_on_error"></a>no_exit_on_error | If true, Bun keeps running other workspace scripts when one fails. | Boolean | optional | `False` |
| <a id="bun_script-node_modules"></a>node_modules | Optional label providing package files from a `node_modules` tree, typically produced by `bun_install`, in runfiles. Executables from `node_modules/.bin` are added to `PATH`, which is useful for scripts such as `vite`. | <a href="https://bazel.build/concepts/labels">Label</a> | optional | `None` |
| <a id="bun_script-node_modules"></a>node_modules | Optional label providing package files from a `node_modules` tree, typically produced by `bun_install`, in runfiles. The staged Bun runtime tool bin and executables from `node_modules/.bin` are added to `PATH`, which is useful for scripts such as `vite`. | <a href="https://bazel.build/concepts/labels">Label</a> | optional | `None` |
| <a id="bun_script-package_json"></a>package_json | Label of the `package.json` file containing the named script. | <a href="https://bazel.build/concepts/labels">Label</a> | required | |
| <a id="bun_script-preload"></a>preload | Modules to preload with `--preload` before running the script. | <a href="https://bazel.build/concepts/labels">List of labels</a> | optional | `[]` |
| <a id="bun_script-run_flags"></a>run_flags | Additional raw flags forwarded to `bun run` before the script name. | List of strings | optional | `[]` |
@@ -340,7 +355,8 @@ Supports Bazel test filtering (`--test_filter`) and coverage integration.
| <a id="bun_test-coverage"></a>coverage | If true, always enables Bun coverage output. | Boolean | optional | `False` |
| <a id="bun_test-coverage_reporters"></a>coverage_reporters | Repeated Bun coverage reporters such as `text` or `lcov`. | List of strings | optional | `[]` |
| <a id="bun_test-env_files"></a>env_files | Additional environment files loaded with `--env-file`. | <a href="https://bazel.build/concepts/labels">List of labels</a> | optional | `[]` |
| <a id="bun_test-install_mode"></a>install_mode | Whether Bun may auto-install missing packages while testing. | String | optional | `"disable"` |
| <a id="bun_test-install_mode"></a>install_mode | Whether Bun may auto-install missing packages while testing. Hermetic tests require `\"disable\"`, and other values are rejected. | String | optional | `"disable"` |
| <a id="bun_test-inherit_host_path"></a>inherit_host_path | If true, appends the host PATH after the staged Bun runtime tool bin and `node_modules/.bin` entries at runtime. | Boolean | optional | `False` |
| <a id="bun_test-max_concurrency"></a>max_concurrency | Optional maximum number of concurrent tests. | Integer | optional | `0` |
| <a id="bun_test-no_env_file"></a>no_env_file | If true, disables Bun's automatic `.env` loading. | Boolean | optional | `False` |
| <a id="bun_test-node_modules"></a>node_modules | Optional label providing package files from a `node_modules` tree, typically produced by `bun_install`, in runfiles. | <a href="https://bazel.build/concepts/labels">Label</a> | optional | `None` |
@@ -411,6 +427,7 @@ the provided tool with any default arguments.
| <a id="js_run_devserver-package_dir_hint"></a>package_dir_hint | Optional package-relative directory hint when package_json is not supplied. | String | optional | `"."` |
| <a id="js_run_devserver-package_json"></a>package_json | Optional package.json used to resolve the package working directory. | <a href="https://bazel.build/concepts/labels">Label</a> | optional | `None` |
| <a id="js_run_devserver-tool"></a>tool | Executable target to launch as the dev server. | <a href="https://bazel.build/concepts/labels">Label</a> | required | |
| <a id="js_run_devserver-inherit_host_path"></a>inherit_host_path | If true, appends the host PATH after the staged Bun runtime tool bin and `node_modules/.bin` entries at runtime. | Boolean | optional | `False` |
| <a id="js_run_devserver-working_dir"></a>working_dir | Working directory at runtime: Bazel runfiles workspace root or the resolved package directory. | String | optional | `"workspace"` |
@@ -480,5 +497,3 @@ js_test(<a href="#js_test-name">name</a>, <a href="#js_test-entry_point">entry_p
| <a id="js_test-entry_point"></a>entry_point | <p align="center"> - </p> | `None` |
| <a id="js_test-srcs"></a>srcs | <p align="center"> - </p> | `None` |
| <a id="js_test-kwargs"></a>kwargs | <p align="center"> - </p> | none |

View File

@@ -15,7 +15,7 @@
...
}:
let
bazelVersion = "9.0.0";
bazelVersion = "9.0.1";
in
repo-lib.lib.mkRepo {
inherit self nixpkgs;
@@ -75,6 +75,11 @@
' "$README" > "$TMP" && mv "$TMP" "$README"
'';
}
{
run.script = ''
bazel cquery //tests/... >/dev/null
'';
}
];
postVersion = ''
@@ -103,12 +108,6 @@
version.args = [ "--version" ];
banner.color = "YELLOW";
})
(repo-lib.lib.tools.fromPackage {
name = "Go";
package = pkgs.go;
version.args = [ "version" ];
banner.color = "CYAN";
})
(repo-lib.lib.tools.fromPackage {
name = "Bazel";
package = bazel9;
@@ -118,20 +117,21 @@
];
shell.packages = [
pkgs.gopls
pkgs.gotools
pkgs.bazel-buildtools
pkgs.curl
pkgs.python3
self.packages.${system}.release
];
checks.tests = {
command = "bazel test //tests/...";
command = "bazelisk test //tests/...";
stage = "pre-push";
passFilenames = false;
runtimeInputs = [
bazel9
pkgs.bun
pkgs.go
pkgs.curl
pkgs.python3
];
};
};

View File

@@ -15,6 +15,8 @@ exports_files([
"js_compat.bzl",
"js_library.bzl",
"js_run_devserver.bzl",
"runtime_launcher.bzl",
"runtime_launcher.js",
"workspace.bzl",
])
@@ -34,6 +36,8 @@ filegroup(
"js_compat.bzl",
"js_library.bzl",
"js_run_devserver.bzl",
"runtime_launcher.bzl",
"runtime_launcher.js",
"workspace.bzl",
],
visibility = ["//visibility:public"],
@@ -59,6 +63,7 @@ bzl_library(
deps = [
":bun_command_bzl",
":js_library_bzl",
":runtime_launcher_bzl",
":workspace_bzl",
],
)
@@ -84,6 +89,7 @@ bzl_library(
srcs = ["bun_dev.bzl"],
deps = [
":bun_command_bzl",
":runtime_launcher_bzl",
":workspace_bzl",
],
)
@@ -98,6 +104,7 @@ bzl_library(
srcs = ["bun_script.bzl"],
deps = [
":bun_command_bzl",
":runtime_launcher_bzl",
":workspace_bzl",
],
)
@@ -108,6 +115,7 @@ bzl_library(
deps = [
":bun_command_bzl",
":js_library_bzl",
":runtime_launcher_bzl",
":workspace_bzl",
],
)
@@ -133,10 +141,16 @@ bzl_library(
srcs = ["js_run_devserver.bzl"],
deps = [
":js_library_bzl",
":runtime_launcher_bzl",
":workspace_bzl",
],
)
bzl_library(
name = "runtime_launcher_bzl",
srcs = ["runtime_launcher.bzl"],
)
bzl_library(
name = "workspace_bzl",
srcs = ["workspace.bzl"],

View File

@@ -1,8 +1,9 @@
"""Rule for running JS/TS scripts with Bun."""
load("//internal:bun_command.bzl", "append_shell_flag", "append_shell_flag_files", "append_shell_flag_values", "append_shell_install_mode", "append_shell_raw_flags", "render_shell_array", "shell_quote")
load("//internal:bun_command.bzl", "append_flag", "append_flag_values", "append_install_mode", "append_raw_flags")
load("//internal:js_library.bzl", "collect_js_runfiles")
load("//internal:workspace.bzl", "create_bun_workspace_info", "render_workspace_setup", "workspace_runfiles")
load("//internal:runtime_launcher.bzl", "declare_runtime_wrapper", "runfiles_path", "runtime_launcher_attrs", "write_launcher_spec")
load("//internal:workspace.bzl", "create_bun_workspace_info", "workspace_runfiles")
def _bun_binary_impl(ctx):
toolchain = ctx.toolchains["//bun:toolchain_type"]
@@ -15,58 +16,55 @@ def _bun_binary_impl(ctx):
primary_file = entry_point,
)
launcher_lines = [render_shell_array("bun_args", ["--bun", "run"])]
append_shell_install_mode(launcher_lines, "bun_args", ctx.attr.install_mode)
append_shell_flag_files(launcher_lines, "bun_args", "--preload", ctx.files.preload)
append_shell_flag_files(launcher_lines, "bun_args", "--env-file", ctx.files.env_files)
append_shell_flag(launcher_lines, "bun_args", "--no-env-file", ctx.attr.no_env_file)
append_shell_flag(launcher_lines, "bun_args", "--smol", ctx.attr.smol)
append_shell_flag_values(launcher_lines, "bun_args", "--conditions", ctx.attr.conditions)
append_shell_raw_flags(launcher_lines, "bun_args", ctx.attr.run_flags)
launcher_lines.append('bun_args+=("${primary_source}")')
for arg in ctx.attr.args:
launcher_lines.append("bun_args+=(%s)" % shell_quote(arg))
argv = ["--bun", "run"]
append_install_mode(argv, ctx.attr.install_mode)
append_flag(argv, "--no-env-file", ctx.attr.no_env_file)
append_flag(argv, "--smol", ctx.attr.smol)
append_flag_values(argv, "--conditions", ctx.attr.conditions)
append_raw_flags(argv, ctx.attr.run_flags)
command = """
trap cleanup_runtime_workspace EXIT
cd "${runtime_exec_dir}"
__BUN_ARGS__
exec "${bun_bin}" "${bun_args[@]}" "$@"
""".replace("__BUN_ARGS__", "\n".join(launcher_lines))
launcher = ctx.actions.declare_file(ctx.label.name)
ctx.actions.write(
output = launcher,
is_executable = True,
content = render_workspace_setup(
bun_short_path = bun_bin.short_path,
install_metadata_short_path = workspace_info.install_metadata_file.short_path if workspace_info.install_metadata_file else "",
primary_source_short_path = entry_point.short_path,
working_dir_mode = ctx.attr.working_dir,
) + command,
)
spec_file = write_launcher_spec(ctx, {
"version": 1,
"kind": "bun_run",
"bun_short_path": runfiles_path(bun_bin),
"primary_source_short_path": runfiles_path(entry_point),
"package_json_short_path": "",
"install_metadata_short_path": runfiles_path(workspace_info.install_metadata_file) if workspace_info.install_metadata_file else "",
"install_repo_runfiles_path": workspace_info.install_repo_runfiles_path,
"node_modules_roots": workspace_info.node_modules_roots,
"package_dir_hint": workspace_info.package_dir_hint,
"working_dir_mode": ctx.attr.working_dir,
"inherit_host_path": ctx.attr.inherit_host_path,
"argv": argv,
"args": ctx.attr.args,
"passthrough_args": True,
"tool_short_path": "",
"restart_on": [],
"watch_mode": "",
"reporter": "",
"coverage": False,
"coverage_reporters": [],
"preload_short_paths": [runfiles_path(file) for file in ctx.files.preload],
"env_file_short_paths": [runfiles_path(file) for file in ctx.files.env_files],
"test_short_paths": [],
})
launcher = declare_runtime_wrapper(ctx, bun_bin, spec_file)
return [
workspace_info,
DefaultInfo(
executable = launcher,
executable = launcher.executable,
runfiles = workspace_runfiles(
ctx,
workspace_info,
direct_files = [launcher],
direct_files = [launcher.executable, launcher.runner, spec_file],
transitive_files = dep_runfiles,
),
),
]
bun_binary = rule(
implementation = _bun_binary_impl,
doc = """Runs a JS/TS entry point with Bun as an executable target.
Use this rule for non-test scripts and CLIs that should run via `bazel run`.
""",
attrs = {
_BUN_BINARY_ATTRS = runtime_launcher_attrs()
_BUN_BINARY_ATTRS.update({
"entry_point": attr.label(
mandatory = True,
allow_single_file = [".js", ".ts", ".jsx", ".tsx", ".mjs", ".cjs"],
@@ -114,7 +112,19 @@ Use this rule for non-test scripts and CLIs that should run via `bazel run`.
values = ["workspace", "entry_point"],
doc = "Working directory at runtime: `workspace` root or nearest `entry_point` ancestor containing `.env`/`package.json`.",
),
},
"inherit_host_path": attr.bool(
default = False,
doc = "If true, appends the host PATH after the staged Bun runtime tool bin and node_modules/.bin entries at runtime.",
),
})
bun_binary = rule(
implementation = _bun_binary_impl,
doc = """Runs a JS/TS entry point with Bun as an executable target.
Use this rule for non-test scripts and CLIs that should run via `bazel run`.
""",
attrs = _BUN_BINARY_ATTRS,
executable = True,
toolchains = ["//bun:toolchain_type"],
)

View File

@@ -3,6 +3,94 @@
load("//internal:bun_command.bzl", "add_flag", "add_flag_value", "add_flag_values", "add_install_mode", "add_raw_flags")
load("//internal:js_library.bzl", "collect_js_sources")
_STAGED_BUILD_RUNNER = """import { spawnSync } from "node:child_process";
import { cpSync, mkdirSync, mkdtempSync, readFileSync, rmSync } from "node:fs";
import { tmpdir } from "node:os";
import { dirname, resolve } from "node:path";
const [, , manifestPath, ...buildArgs] = process.argv;
const execroot = process.cwd();
const stageDir = mkdtempSync(resolve(tmpdir(), "rules_bun_build-"));
function rewriteArgPath(flag, value) {
return `${flag}=${resolve(execroot, value)}`;
}
try {
for (const relpath of readFileSync(manifestPath, "utf8").split(/\\r?\\n/)) {
if (!relpath) {
continue;
}
const src = resolve(execroot, relpath);
const dest = resolve(stageDir, relpath);
mkdirSync(dirname(dest), { recursive: true });
cpSync(src, dest, { dereference: true, force: true, recursive: true });
}
const forwardedArgs = [];
for (let index = 0; index < buildArgs.length; index += 1) {
const arg = buildArgs[index];
if ((arg === "--outdir" || arg === "--outfile") && index + 1 < buildArgs.length) {
forwardedArgs.push(arg, resolve(execroot, buildArgs[index + 1]));
index += 1;
continue;
}
if (arg.startsWith("--metafile=")) {
forwardedArgs.push(rewriteArgPath("--metafile", arg.slice("--metafile=".length)));
continue;
}
if (arg.startsWith("--metafile-md=")) {
forwardedArgs.push(rewriteArgPath("--metafile-md", arg.slice("--metafile-md=".length)));
continue;
}
forwardedArgs.push(arg);
}
const result = spawnSync(process.execPath, forwardedArgs, {
cwd: stageDir,
stdio: "inherit",
});
if (result.error) {
throw result.error;
}
process.exit(typeof result.status === "number" ? result.status : 1);
} finally {
rmSync(stageDir, { recursive: true, force: true });
}
"""
def sort_files_by_short_path(files):
files_by_path = {}
short_paths = []
for file in files:
files_by_path[file.short_path] = file
short_paths.append(file.short_path)
return [files_by_path[short_path] for short_path in sorted(short_paths)]
def validate_hermetic_install_mode(attr, rule_name):
if getattr(attr, "install_mode", "disable") != "disable":
fail("{} requires install_mode = \"disable\" for hermetic execution".format(rule_name))
def infer_entry_point_root(entries):
if not entries:
return None
common_segments = entries[0].path.split("/")[:-1]
for entry in entries[1:]:
entry_segments = entry.path.split("/")[:-1]
common_length = min(len(common_segments), len(entry_segments))
idx = common_length
for segment_idx in range(common_length):
if common_segments[segment_idx] != entry_segments[segment_idx]:
idx = segment_idx
break
common_segments = common_segments[:idx]
if not common_segments:
return "."
return "/".join(common_segments)
def bun_build_transitive_inputs(ctx):
transitive_inputs = []
if getattr(ctx.attr, "node_modules", None):
@@ -11,13 +99,17 @@ def bun_build_transitive_inputs(ctx):
transitive_inputs.append(collect_js_sources(dep))
return transitive_inputs
def add_bun_build_common_flags(args, attr, metafile = None, metafile_md = None):
def add_bun_build_common_flags(args, attr, metafile = None, metafile_md = None, root = None):
build_root = root
if build_root == None:
build_root = getattr(attr, "root", None)
add_install_mode(args, getattr(attr, "install_mode", "disable"))
add_flag_value(args, "--target", getattr(attr, "target", None))
add_flag_value(args, "--format", getattr(attr, "format", None))
add_flag(args, "--production", getattr(attr, "production", False))
add_flag(args, "--splitting", getattr(attr, "splitting", False))
add_flag_value(args, "--root", getattr(attr, "root", None))
add_flag_value(args, "--root", build_root)
sourcemap = getattr(attr, "sourcemap", None)
if sourcemap == True:
@@ -88,3 +180,29 @@ def add_bun_compile_flags(args, attr, compile_executable = None):
add_flag_value(args, "--windows-version", getattr(attr, "windows_version", None))
add_flag_value(args, "--windows-description", getattr(attr, "windows_description", None))
add_flag_value(args, "--windows-copyright", getattr(attr, "windows_copyright", None))
def declare_staged_bun_build_action(ctx, bun_bin, build_args, build_inputs, outputs, mnemonic, progress_message, name_suffix):
sorted_inputs = sort_files_by_short_path(build_inputs.to_list())
input_manifest = ctx.actions.declare_file(ctx.label.name + name_suffix + ".inputs")
runner = ctx.actions.declare_file(ctx.label.name + name_suffix + "_runner.js")
ctx.actions.write(
output = input_manifest,
content = "".join([file.path + "\n" for file in sorted_inputs]),
)
ctx.actions.write(
output = runner,
content = _STAGED_BUILD_RUNNER,
)
ctx.actions.run(
executable = bun_bin,
arguments = ["--bun", runner.path, input_manifest.path, build_args],
inputs = depset(
direct = [input_manifest, runner],
transitive = [build_inputs],
),
outputs = outputs,
mnemonic = mnemonic,
progress_message = progress_message,
)

View File

@@ -1,21 +1,30 @@
"""Rule for bundling JS/TS sources with Bun."""
load("//internal:bun_build_support.bzl", "add_bun_build_common_flags", "bun_build_transitive_inputs")
load("//internal:bun_build_support.bzl", "add_bun_build_common_flags", "bun_build_transitive_inputs", "declare_staged_bun_build_action", "sort_files_by_short_path", "validate_hermetic_install_mode")
def _output_name(target_name, entry):
stem = entry.basename.rsplit(".", 1)[0]
return "{}__{}.js".format(target_name, stem)
stem = entry.short_path.rsplit(".", 1)[0]
sanitized = stem.replace("\\", "_").replace("/", "_").replace("-", "_").replace(".", "_").replace("@", "at_")
sanitized = sanitized.replace("__", "_").replace("__", "_").replace("__", "_")
sanitized = sanitized.strip("_")
if not sanitized:
sanitized = entry.basename.rsplit(".", 1)[0]
return "{}__{}.js".format(target_name, sanitized)
def _bun_bundle_impl(ctx):
validate_hermetic_install_mode(ctx.attr, "bun_bundle")
toolchain = ctx.toolchains["//bun:toolchain_type"]
bun_bin = toolchain.bun.bun_bin
entry_points = sort_files_by_short_path(ctx.files.entry_points)
data_files = sort_files_by_short_path(ctx.files.data)
transitive_inputs = bun_build_transitive_inputs(ctx)
outputs = []
for entry in ctx.files.entry_points:
for entry in entry_points:
output = ctx.actions.declare_file(_output_name(ctx.label.name, entry))
outputs.append(output)
@@ -27,16 +36,18 @@ def _bun_bundle_impl(ctx):
args.add(output.path)
args.add(entry.path)
ctx.actions.run(
executable = bun_bin,
arguments = [args],
inputs = depset(
direct = [entry] + ctx.files.data,
declare_staged_bun_build_action(
ctx,
bun_bin,
args,
depset(
direct = [entry] + data_files,
transitive = transitive_inputs,
),
outputs = [output],
mnemonic = "BunBundle",
progress_message = "Bundling {} with Bun".format(entry.short_path),
name_suffix = "_bundle_{}".format(output.basename.rsplit(".", 1)[0]),
)
return [DefaultInfo(files = depset(outputs))]
@@ -67,7 +78,7 @@ Each entry point produces one output JavaScript artifact.
"install_mode": attr.string(
default = "disable",
values = ["disable", "auto", "fallback", "force"],
doc = "Whether Bun may auto-install missing packages during bundling.",
doc = "Whether Bun may auto-install missing packages during bundling. Hermetic bundle actions require `disable`; other values are rejected.",
),
"target": attr.string(
default = "browser",

View File

@@ -82,3 +82,32 @@ def add_install_mode(args, install_mode):
args.add("--no-install")
elif install_mode in ["fallback", "force"]:
add_flag_value(args, "--install", install_mode)
def append_arg(values, value):
values.append(str(value))
def append_flag(values, flag, enabled):
if enabled:
append_arg(values, flag)
def append_flag_value(values, flag, value):
if value == None:
return
if type(value) == type("") and not value:
return
append_arg(values, flag)
append_arg(values, value)
def append_flag_values(values, flag, items):
for item in items:
append_flag_value(values, flag, item)
def append_raw_flags(values, items):
for item in items:
append_arg(values, item)
def append_install_mode(values, install_mode):
if install_mode == "disable":
append_arg(values, "--no-install")
elif install_mode in ["fallback", "force"]:
append_flag_value(values, "--install", install_mode)

View File

@@ -1,21 +1,37 @@
"""Rules for Bun build outputs and standalone executables."""
load("//internal:bun_build_support.bzl", "add_bun_build_common_flags", "add_bun_compile_flags", "bun_build_transitive_inputs")
load("//internal:bun_build_support.bzl", "add_bun_build_common_flags", "add_bun_compile_flags", "bun_build_transitive_inputs", "declare_staged_bun_build_action", "infer_entry_point_root", "sort_files_by_short_path", "validate_hermetic_install_mode")
def _bun_build_impl(ctx):
validate_hermetic_install_mode(ctx.attr, "bun_build")
toolchain = ctx.toolchains["//bun:toolchain_type"]
bun_bin = toolchain.bun.bun_bin
entry_points = sort_files_by_short_path(ctx.files.entry_points)
data_files = sort_files_by_short_path(ctx.files.data)
output_dir = ctx.actions.declare_directory(ctx.label.name)
metafile = ctx.actions.declare_file(ctx.label.name + ".meta.json") if ctx.attr.metafile else None
metafile_md = ctx.actions.declare_file(ctx.label.name + ".meta.md") if ctx.attr.metafile_md else None
metafile = None
if ctx.attr.metafile:
metafile = ctx.actions.declare_file(ctx.label.name + ".meta.json")
metafile_md = None
if ctx.attr.metafile_md:
metafile_md = ctx.actions.declare_file(ctx.label.name + ".meta.md")
build_root = ctx.attr.root
if not build_root:
build_root = infer_entry_point_root(entry_points)
transitive_inputs = bun_build_transitive_inputs(ctx)
build_inputs = depset(
direct = entry_points + data_files,
transitive = transitive_inputs,
)
args = ctx.actions.args()
args.add("--bun")
args.add("build")
add_bun_build_common_flags(args, ctx.attr, metafile = metafile, metafile_md = metafile_md)
args.add("--outdir")
args.add(output_dir.path)
args.add_all(ctx.files.entry_points)
build_args = ctx.actions.args()
build_args.add("--bun")
build_args.add("build")
add_bun_build_common_flags(build_args, ctx.attr, metafile = metafile, metafile_md = metafile_md, root = build_root)
build_args.add("--outdir")
build_args.add(output_dir.path)
build_args.add_all(entry_points)
outputs = [output_dir]
if metafile:
@@ -23,25 +39,27 @@ def _bun_build_impl(ctx):
if metafile_md:
outputs.append(metafile_md)
ctx.actions.run(
executable = bun_bin,
arguments = [args],
inputs = depset(
direct = ctx.files.entry_points + ctx.files.data,
transitive = bun_build_transitive_inputs(ctx),
),
declare_staged_bun_build_action(
ctx,
bun_bin,
build_args,
build_inputs,
outputs = outputs,
mnemonic = "BunBuild",
progress_message = "Building {} with Bun".format(ctx.label.name),
name_suffix = "_build",
)
return [DefaultInfo(files = depset(outputs))]
def _bun_compile_impl(ctx):
validate_hermetic_install_mode(ctx.attr, "bun_compile")
toolchain = ctx.toolchains["//bun:toolchain_type"]
bun_bin = toolchain.bun.bun_bin
output = ctx.actions.declare_file(ctx.label.name)
compile_executable = ctx.file.compile_executable
data_files = sort_files_by_short_path(ctx.files.data)
args = ctx.actions.args()
args.add("--bun")
@@ -52,20 +70,22 @@ def _bun_compile_impl(ctx):
args.add(output.path)
args.add(ctx.file.entry_point.path)
direct_inputs = [ctx.file.entry_point] + ctx.files.data
direct_inputs = [ctx.file.entry_point] + data_files
if compile_executable:
direct_inputs.append(compile_executable)
ctx.actions.run(
executable = bun_bin,
arguments = [args],
inputs = depset(
declare_staged_bun_build_action(
ctx,
bun_bin,
args,
depset(
direct = direct_inputs,
transitive = bun_build_transitive_inputs(ctx),
),
outputs = [output],
mnemonic = "BunCompile",
progress_message = "Compiling {} with Bun".format(ctx.file.entry_point.short_path),
name_suffix = "_compile",
)
return [
@@ -89,7 +109,7 @@ _COMMON_BUILD_ATTRS = {
"install_mode": attr.string(
default = "disable",
values = ["disable", "auto", "fallback", "force"],
doc = "Whether Bun may auto-install missing packages while executing the build.",
doc = "Whether Bun may auto-install missing packages while executing the build. Hermetic build actions require `disable`; other values are rejected.",
),
"target": attr.string(
default = "browser",

View File

@@ -1,7 +1,8 @@
"""Rule for running JS/TS scripts with Bun in watch mode for development."""
load("//internal:bun_command.bzl", "append_shell_flag", "append_shell_flag_files", "append_shell_flag_values", "append_shell_install_mode", "append_shell_raw_flags", "render_shell_array", "shell_quote")
load("//internal:workspace.bzl", "create_bun_workspace_info", "render_workspace_setup", "workspace_runfiles")
load("//internal:bun_command.bzl", "append_flag", "append_flag_values", "append_install_mode", "append_raw_flags")
load("//internal:runtime_launcher.bzl", "declare_runtime_wrapper", "runfiles_path", "runtime_launcher_attrs", "write_launcher_spec")
load("//internal:workspace.bzl", "create_bun_workspace_info", "workspace_runfiles")
def _bun_dev_impl(ctx):
toolchain = ctx.toolchains["//bun:toolchain_type"]
@@ -13,142 +14,55 @@ def _bun_dev_impl(ctx):
primary_file = entry_point,
)
restart_watch_paths = "\n".join([path.short_path for path in ctx.files.restart_on])
launcher_lines = [render_shell_array("bun_args", ["--bun", "run"])]
append_shell_install_mode(launcher_lines, "bun_args", ctx.attr.install_mode)
append_shell_flag_files(launcher_lines, "bun_args", "--preload", ctx.files.preload)
append_shell_flag_files(launcher_lines, "bun_args", "--env-file", ctx.files.env_files)
append_shell_flag(launcher_lines, "bun_args", "--no-env-file", ctx.attr.no_env_file)
append_shell_flag(launcher_lines, "bun_args", "--smol", ctx.attr.smol)
append_shell_flag_values(launcher_lines, "bun_args", "--conditions", ctx.attr.conditions)
append_shell_flag(launcher_lines, "bun_args", "--no-clear-screen", ctx.attr.no_clear_screen)
append_shell_raw_flags(launcher_lines, "bun_args", ctx.attr.run_flags)
launcher_lines.append('bun_args+=("${primary_source}")')
for arg in ctx.attr.args:
launcher_lines.append("bun_args+=(%s)" % shell_quote(arg))
argv = ["--bun", "run"]
append_install_mode(argv, ctx.attr.install_mode)
append_flag(argv, "--no-env-file", ctx.attr.no_env_file)
append_flag(argv, "--smol", ctx.attr.smol)
append_flag_values(argv, "--conditions", ctx.attr.conditions)
append_flag(argv, "--no-clear-screen", ctx.attr.no_clear_screen)
append_raw_flags(argv, ctx.attr.run_flags)
command = """
__BUN_ARGS__
watch_mode="__WATCH_MODE__"
if [[ "${watch_mode}" == "hot" ]]; then
bun_args+=("--hot")
else
bun_args+=("--watch")
fi
if [[ __RESTART_COUNT__ -eq 0 ]]; then
trap cleanup_runtime_workspace EXIT
cd "${runtime_exec_dir}"
exec "${bun_bin}" "${bun_args[@]}" "$@"
fi
readarray -t restart_paths <<'EOF_RESTART_PATHS'
__RESTART_PATHS__
EOF_RESTART_PATHS
file_mtime() {
local path="$1"
if stat -f '%m' "${path}" >/dev/null 2>&1; then
stat -f '%m' "${path}"
return 0
fi
stat -c '%Y' "${path}"
}
declare -A mtimes
for rel in "${restart_paths[@]}"; do
path="${runfiles_dir}/_main/${rel}"
if [[ -e "${path}" ]]; then
mtimes["${rel}"]="$(file_mtime "${path}")"
else
mtimes["${rel}"]="missing"
fi
done
child_pid=""
restart_child() {
if [[ -n "${child_pid}" ]] && kill -0 "${child_pid}" 2>/dev/null; then
kill "${child_pid}"
wait "${child_pid}" || true
fi
(
cd "${runtime_exec_dir}"
exec "${bun_bin}" "${bun_args[@]}" "$@"
) &
child_pid=$!
}
cleanup() {
if [[ -n "${child_pid}" ]] && kill -0 "${child_pid}" 2>/dev/null; then
kill "${child_pid}"
wait "${child_pid}" || true
fi
cleanup_runtime_workspace
}
trap cleanup EXIT INT TERM
restart_child "$@"
while true; do
sleep 1
changed=0
for rel in "${restart_paths[@]}"; do
path="${runfiles_dir}/_main/${rel}"
if [[ -e "${path}" ]]; then
current="$(file_mtime "${path}")"
else
current="missing"
fi
if [[ "${current}" != "${mtimes[${rel}]}" ]]; then
mtimes["${rel}"]="${current}"
changed=1
fi
done
if [[ "${changed}" -eq 1 ]]; then
restart_child "$@"
fi
done
""".replace("__WATCH_MODE__", ctx.attr.watch_mode).replace(
"__RESTART_COUNT__",
str(len(ctx.files.restart_on)),
).replace(
"__RESTART_PATHS__",
restart_watch_paths,
).replace(
"__BUN_ARGS__",
"\n".join(launcher_lines),
)
launcher = ctx.actions.declare_file(ctx.label.name)
ctx.actions.write(
output = launcher,
is_executable = True,
content = render_workspace_setup(
bun_short_path = bun_bin.short_path,
install_metadata_short_path = workspace_info.install_metadata_file.short_path if workspace_info.install_metadata_file else "",
primary_source_short_path = entry_point.short_path,
working_dir_mode = ctx.attr.working_dir,
) + command,
)
spec_file = write_launcher_spec(ctx, {
"version": 1,
"kind": "bun_run",
"bun_short_path": runfiles_path(bun_bin),
"primary_source_short_path": runfiles_path(entry_point),
"package_json_short_path": "",
"install_metadata_short_path": runfiles_path(workspace_info.install_metadata_file) if workspace_info.install_metadata_file else "",
"install_repo_runfiles_path": workspace_info.install_repo_runfiles_path,
"node_modules_roots": workspace_info.node_modules_roots,
"package_dir_hint": workspace_info.package_dir_hint,
"working_dir_mode": ctx.attr.working_dir,
"inherit_host_path": ctx.attr.inherit_host_path,
"argv": argv,
"args": ctx.attr.args,
"passthrough_args": True,
"tool_short_path": "",
"restart_on": [runfiles_path(file) for file in ctx.files.restart_on],
"watch_mode": ctx.attr.watch_mode,
"reporter": "",
"coverage": False,
"coverage_reporters": [],
"preload_short_paths": [runfiles_path(file) for file in ctx.files.preload],
"env_file_short_paths": [runfiles_path(file) for file in ctx.files.env_files],
"test_short_paths": [],
})
launcher = declare_runtime_wrapper(ctx, bun_bin, spec_file)
return [
workspace_info,
DefaultInfo(
executable = launcher,
runfiles = workspace_runfiles(ctx, workspace_info, direct_files = [launcher]),
executable = launcher.executable,
runfiles = workspace_runfiles(
ctx,
workspace_info,
direct_files = [launcher.executable, launcher.runner, spec_file],
),
),
]
bun_dev = rule(
implementation = _bun_dev_impl,
doc = """Runs a JS/TS entry point in Bun development watch mode.
This rule is intended for local dev loops (`bazel run`) and supports Bun
watch/HMR plus optional full restarts on selected file changes.
""",
attrs = {
_BUN_DEV_ATTRS = runtime_launcher_attrs()
_BUN_DEV_ATTRS.update({
"entry_point": attr.label(
mandatory = True,
allow_single_file = [".js", ".ts", ".jsx", ".tsx", ".mjs", ".cjs"],
@@ -206,7 +120,21 @@ watch/HMR plus optional full restarts on selected file changes.
values = ["workspace", "entry_point"],
doc = "Working directory at runtime: `workspace` root or nearest `entry_point` ancestor containing `.env`/`package.json`.",
),
},
"inherit_host_path": attr.bool(
default = False,
doc = "If true, appends the host PATH after the staged Bun runtime tool bin and node_modules/.bin entries at runtime.",
),
})
bun_dev = rule(
implementation = _bun_dev_impl,
doc = """Runs a JS/TS entry point in Bun development watch mode.
This rule is intended for local dev loops (`bazel run`) and supports Bun
watch/HMR plus optional full restarts on selected file changes. It is a local
workflow helper rather than a hermetic build rule.
""",
attrs = _BUN_DEV_ATTRS,
executable = True,
toolchains = ["//bun:toolchain_type"],
)

View File

@@ -205,8 +205,14 @@ def _materialize_workspace_packages(repository_ctx, package_json):
workspace_packages[relative_dir] = package_name if type(package_name) == type("") else ""
package_dirs = sorted(workspace_packages.keys())
package_names_by_dir = {}
for package_dir in package_dirs:
package_name = workspace_packages[package_dir]
if package_name:
package_names_by_dir[package_dir] = package_name
return struct(
package_dirs = package_dirs,
package_names_by_dir = package_names_by_dir,
package_names = [workspace_packages[package_dir] for package_dir in package_dirs if workspace_packages[package_dir]],
)
@@ -381,8 +387,10 @@ stderr:
"node_modules/.rules_bun/install.json",
json.encode({
"bun_lockfile": lockfile_name,
"install_root_rel_dir": ".",
"package_json": "package.json",
"workspace_package_dirs": workspace_packages.package_dirs,
"workspace_package_names_by_dir": workspace_packages.package_names_by_dir,
}) + "\n",
)
@@ -409,7 +417,7 @@ bun_install_repository = repository_rule(
"omit": attr.string_list(),
"linker": attr.string(),
"backend": attr.string(),
"ignore_scripts": attr.bool(default = False),
"ignore_scripts": attr.bool(default = True),
"install_flags": attr.string_list(),
"visible_repo_name": attr.string(),
"bun_linux_x64": attr.label(default = "@bun_linux_x64//:bun-linux-x64/bun", allow_single_file = True),
@@ -430,7 +438,7 @@ def bun_install(
omit = [],
linker = "",
backend = "",
ignore_scripts = False,
ignore_scripts = True,
install_flags = []):
"""Create an external repository containing installed node_modules.

View File

@@ -1,8 +1,8 @@
"""Rule for running package.json scripts with Bun."""
load("//internal:bun_command.bzl", "append_shell_flag", "append_shell_flag_files", "append_shell_flag_value", "append_shell_flag_values", "append_shell_install_mode", "append_shell_raw_flags", "render_shell_array", "shell_quote")
load("//internal:workspace.bzl", "create_bun_workspace_info", "render_workspace_setup", "workspace_runfiles")
load("//internal:bun_command.bzl", "append_flag", "append_flag_value", "append_flag_values", "append_install_mode", "append_raw_flags")
load("//internal:runtime_launcher.bzl", "declare_runtime_wrapper", "runfiles_path", "runtime_launcher_attrs", "write_launcher_spec")
load("//internal:workspace.bzl", "create_bun_workspace_info", "workspace_runfiles")
def _bun_script_impl(ctx):
toolchain = ctx.toolchains["//bun:toolchain_type"]
@@ -16,68 +16,63 @@ def _bun_script_impl(ctx):
primary_file = package_json,
)
launcher_lines = [render_shell_array("bun_args", ["--bun", "run"])]
append_shell_install_mode(launcher_lines, "bun_args", ctx.attr.install_mode)
append_shell_flag_files(launcher_lines, "bun_args", "--preload", ctx.files.preload)
append_shell_flag_files(launcher_lines, "bun_args", "--env-file", ctx.files.env_files)
append_shell_flag(launcher_lines, "bun_args", "--no-env-file", ctx.attr.no_env_file)
append_shell_flag(launcher_lines, "bun_args", "--smol", ctx.attr.smol)
append_shell_flag_values(launcher_lines, "bun_args", "--conditions", ctx.attr.conditions)
append_shell_flag(launcher_lines, "bun_args", "--workspaces", ctx.attr.workspaces)
append_shell_flag_values(launcher_lines, "bun_args", "--filter", ctx.attr.filters)
argv = ["--bun", "run"]
append_install_mode(argv, ctx.attr.install_mode)
append_flag(argv, "--no-env-file", ctx.attr.no_env_file)
append_flag(argv, "--smol", ctx.attr.smol)
append_flag_values(argv, "--conditions", ctx.attr.conditions)
append_flag(argv, "--workspaces", ctx.attr.workspaces)
append_flag_values(argv, "--filter", ctx.attr.filters)
if ctx.attr.execution_mode == "parallel":
append_shell_flag(launcher_lines, "bun_args", "--parallel", True)
append_flag(argv, "--parallel", True)
elif ctx.attr.execution_mode == "sequential":
append_shell_flag(launcher_lines, "bun_args", "--sequential", True)
append_shell_flag(launcher_lines, "bun_args", "--no-exit-on-error", ctx.attr.no_exit_on_error)
append_shell_flag_value(launcher_lines, "bun_args", "--shell", ctx.attr.shell)
append_shell_flag(launcher_lines, "bun_args", "--silent", ctx.attr.silent)
append_shell_raw_flags(launcher_lines, "bun_args", ctx.attr.run_flags)
launcher_lines.append('bun_args+=(%s)' % shell_quote(ctx.attr.script))
for arg in ctx.attr.args:
launcher_lines.append("bun_args+=(%s)" % shell_quote(arg))
append_flag(argv, "--sequential", True)
append_flag(argv, "--no-exit-on-error", ctx.attr.no_exit_on_error)
append_flag_value(argv, "--shell", ctx.attr.shell)
append_flag(argv, "--silent", ctx.attr.silent)
append_raw_flags(argv, ctx.attr.run_flags)
command = """
trap cleanup_runtime_workspace EXIT
cd "${runtime_exec_dir}"
__BUN_ARGS__
exec "${bun_bin}" "${bun_args[@]}" "$@"
""".replace("__BUN_ARGS__", "\n".join(launcher_lines))
launcher = ctx.actions.declare_file(ctx.label.name)
ctx.actions.write(
output = launcher,
is_executable = True,
content = render_workspace_setup(
bun_short_path = bun_bin.short_path,
package_dir_hint = package_json.dirname or ".",
package_json_short_path = package_json.short_path,
primary_source_short_path = package_json.short_path,
install_metadata_short_path = workspace_info.install_metadata_file.short_path if workspace_info.install_metadata_file else "",
working_dir_mode = ctx.attr.working_dir,
) + command,
)
spec_file = write_launcher_spec(ctx, {
"version": 1,
"kind": "bun_run",
"bun_short_path": runfiles_path(bun_bin),
"primary_source_short_path": "",
"package_json_short_path": runfiles_path(package_json),
"install_metadata_short_path": runfiles_path(workspace_info.install_metadata_file) if workspace_info.install_metadata_file else "",
"install_repo_runfiles_path": workspace_info.install_repo_runfiles_path,
"node_modules_roots": workspace_info.node_modules_roots,
"package_dir_hint": package_json.dirname or ".",
"working_dir_mode": ctx.attr.working_dir,
"inherit_host_path": ctx.attr.inherit_host_path,
"argv": argv,
"args": [ctx.attr.script] + ctx.attr.args,
"passthrough_args": True,
"tool_short_path": "",
"restart_on": [],
"watch_mode": "",
"reporter": "",
"coverage": False,
"coverage_reporters": [],
"preload_short_paths": [runfiles_path(file) for file in ctx.files.preload],
"env_file_short_paths": [runfiles_path(file) for file in ctx.files.env_files],
"test_short_paths": [],
})
launcher = declare_runtime_wrapper(ctx, bun_bin, spec_file)
return [
workspace_info,
DefaultInfo(
executable = launcher,
runfiles = workspace_runfiles(ctx, workspace_info, direct_files = [launcher]),
executable = launcher.executable,
runfiles = workspace_runfiles(
ctx,
workspace_info,
direct_files = [launcher.executable, launcher.runner, spec_file],
),
),
]
bun_script = rule(
implementation = _bun_script_impl,
doc = """Runs a named `package.json` script with Bun as an executable target.
Use this rule to expose existing package scripts such as `dev`, `build`, or
`check` via `bazel run` without adding wrapper shell scripts. This is a good fit
for Vite-style workflows, where scripts like `vite dev` or `vite build` are
declared in `package.json` and expect to run from the package directory with
`node_modules/.bin` available on `PATH`.
""",
attrs = {
_BUN_SCRIPT_ATTRS = runtime_launcher_attrs()
_BUN_SCRIPT_ATTRS.update({
"script": attr.string(
mandatory = True,
doc = "Name of the `package.json` script to execute via `bun run <script>`.",
@@ -88,7 +83,7 @@ declared in `package.json` and expect to run from the package directory with
doc = "Label of the `package.json` file containing the named script.",
),
"node_modules": attr.label(
doc = "Optional label providing package files from a `node_modules` tree, typically produced by `bun_install`, in runfiles. Executables from `node_modules/.bin` are added to `PATH`, which is useful for scripts such as `vite`.",
doc = "Optional label providing package files from a `node_modules` tree, typically produced by `bun_install`, in runfiles. The staged Bun runtime tool bin and executables from `node_modules/.bin` are added to `PATH`, which is useful for scripts such as `vite`.",
),
"data": attr.label_list(
allow_files = True,
@@ -151,7 +146,24 @@ declared in `package.json` and expect to run from the package directory with
values = ["workspace", "package"],
doc = "Working directory at runtime: Bazel runfiles `workspace` root or the directory containing `package.json`. The default `package` mode matches tools such as Vite that resolve config and assets relative to the package directory.",
),
},
"inherit_host_path": attr.bool(
default = False,
doc = "If true, appends the host PATH after the staged Bun runtime tool bin and node_modules/.bin entries at runtime.",
),
})
bun_script = rule(
implementation = _bun_script_impl,
doc = """Runs a named `package.json` script with Bun as an executable target.
Use this rule to expose existing package scripts such as `dev`, `build`, or
`check` via `bazel run` without adding wrapper shell scripts. This is a good fit
for Vite-style workflows, where scripts like `vite dev` or `vite build` are
declared in `package.json` and expect to run from the package directory with
the staged Bun runtime tool bin and `node_modules/.bin` on `PATH`. This is a
local workflow helper rather than a hermetic build rule.
""",
attrs = _BUN_SCRIPT_ATTRS,
executable = True,
toolchains = ["//bun:toolchain_type"],
)

View File

@@ -1,11 +1,14 @@
"""Rule for running test suites with Bun."""
load("//internal:bun_command.bzl", "append_shell_flag", "append_shell_flag_files", "append_shell_flag_value", "append_shell_flag_values", "append_shell_install_mode", "append_shell_raw_flags", "render_shell_array", "shell_quote")
load("//internal:bun_command.bzl", "append_flag", "append_flag_value", "append_install_mode", "append_raw_flags")
load("//internal:js_library.bzl", "collect_js_runfiles")
load("//internal:workspace.bzl", "create_bun_workspace_info", "render_workspace_setup", "workspace_runfiles")
load("//internal:runtime_launcher.bzl", "declare_runtime_wrapper", "runfiles_path", "runtime_launcher_attrs", "write_launcher_spec")
load("//internal:workspace.bzl", "create_bun_workspace_info", "workspace_runfiles")
def _bun_test_impl(ctx):
if ctx.attr.install_mode != "disable":
fail("bun_test requires install_mode = \"disable\" for hermetic test execution")
toolchain = ctx.toolchains["//bun:toolchain_type"]
bun_bin = toolchain.bun.bun_bin
primary_file = ctx.files.srcs[0]
@@ -16,97 +19,66 @@ def _bun_test_impl(ctx):
primary_file = primary_file,
)
launcher_lines = [render_shell_array("bun_args", ["--bun", "test"])]
append_shell_install_mode(launcher_lines, "bun_args", ctx.attr.install_mode)
append_shell_flag_files(launcher_lines, "bun_args", "--preload", ctx.files.preload)
append_shell_flag_files(launcher_lines, "bun_args", "--env-file", ctx.files.env_files)
append_shell_flag(launcher_lines, "bun_args", "--no-env-file", ctx.attr.no_env_file)
append_shell_flag(launcher_lines, "bun_args", "--smol", ctx.attr.smol)
append_shell_flag_value(launcher_lines, "bun_args", "--timeout", str(ctx.attr.timeout_ms) if ctx.attr.timeout_ms > 0 else None)
append_shell_flag(launcher_lines, "bun_args", "--update-snapshots", ctx.attr.update_snapshots)
append_shell_flag_value(launcher_lines, "bun_args", "--rerun-each", str(ctx.attr.rerun_each) if ctx.attr.rerun_each > 0 else None)
append_shell_flag_value(launcher_lines, "bun_args", "--retry", str(ctx.attr.retry) if ctx.attr.retry > 0 else None)
append_shell_flag(launcher_lines, "bun_args", "--todo", ctx.attr.todo)
append_shell_flag(launcher_lines, "bun_args", "--only", ctx.attr.only)
append_shell_flag(launcher_lines, "bun_args", "--pass-with-no-tests", ctx.attr.pass_with_no_tests)
append_shell_flag(launcher_lines, "bun_args", "--concurrent", ctx.attr.concurrent)
append_shell_flag(launcher_lines, "bun_args", "--randomize", ctx.attr.randomize)
append_shell_flag_value(launcher_lines, "bun_args", "--seed", str(ctx.attr.seed) if ctx.attr.seed > 0 else None)
append_shell_flag_value(launcher_lines, "bun_args", "--bail", str(ctx.attr.bail) if ctx.attr.bail > 0 else None)
append_shell_flag_value(launcher_lines, "bun_args", "--max-concurrency", str(ctx.attr.max_concurrency) if ctx.attr.max_concurrency > 0 else None)
append_shell_raw_flags(launcher_lines, "bun_args", ctx.attr.test_flags)
launcher_lines.append('coverage_requested="0"')
launcher_lines.append('coverage_dir=""')
launcher_lines.append('if [[ "${COVERAGE_DIR:-}" != "" ]]; then')
launcher_lines.append(' coverage_requested="1"')
launcher_lines.append(' coverage_dir="${COVERAGE_DIR}"')
launcher_lines.append('elif [[ "%s" == "1" ]]; then' % ("1" if ctx.attr.coverage else "0"))
launcher_lines.append(' coverage_requested="1"')
launcher_lines.append(' coverage_dir="${TEST_UNDECLARED_OUTPUTS_DIR:-${runtime_workspace}/coverage}"')
launcher_lines.append('fi')
launcher_lines.append('if [[ "${coverage_requested}" == "1" ]]; then')
launcher_lines.append(' bun_args+=("--coverage")')
launcher_lines.append(' bun_args+=("--coverage-dir" "${coverage_dir}")')
if ctx.attr.coverage_reporters:
for reporter in ctx.attr.coverage_reporters:
launcher_lines.append(' bun_args+=("--coverage-reporter" %s)' % shell_quote(reporter))
else:
launcher_lines.append(' if [[ "${COVERAGE_DIR:-}" != "" ]]; then')
launcher_lines.append(' bun_args+=("--coverage-reporter" "lcov")')
launcher_lines.append(' fi')
launcher_lines.append('fi')
launcher_lines.append('if [[ -n "${TESTBRIDGE_TEST_ONLY:-}" ]]; then')
launcher_lines.append(' bun_args+=("--test-name-pattern" "${TESTBRIDGE_TEST_ONLY}")')
launcher_lines.append('fi')
if ctx.attr.reporter == "junit":
launcher_lines.append('reporter_out="${XML_OUTPUT_FILE:-${runtime_workspace}/junit.xml}"')
launcher_lines.append('bun_args+=("--reporter" "junit" "--reporter-outfile" "${reporter_out}")')
elif ctx.attr.reporter == "dots":
launcher_lines.append('bun_args+=("--reporter" "dots")')
for src in ctx.files.srcs:
launcher_lines.append("bun_args+=(%s)" % shell_quote(src.short_path))
for arg in ctx.attr.args:
launcher_lines.append("bun_args+=(%s)" % shell_quote(arg))
argv = ["--bun", "test"]
append_install_mode(argv, ctx.attr.install_mode)
append_flag(argv, "--no-env-file", ctx.attr.no_env_file)
append_flag(argv, "--smol", ctx.attr.smol)
append_flag_value(argv, "--timeout", str(ctx.attr.timeout_ms) if ctx.attr.timeout_ms > 0 else None)
append_flag(argv, "--update-snapshots", ctx.attr.update_snapshots)
append_flag_value(argv, "--rerun-each", str(ctx.attr.rerun_each) if ctx.attr.rerun_each > 0 else None)
append_flag_value(argv, "--retry", str(ctx.attr.retry) if ctx.attr.retry > 0 else None)
append_flag(argv, "--todo", ctx.attr.todo)
append_flag(argv, "--only", ctx.attr.only)
append_flag(argv, "--pass-with-no-tests", ctx.attr.pass_with_no_tests)
append_flag(argv, "--concurrent", ctx.attr.concurrent)
append_flag(argv, "--randomize", ctx.attr.randomize)
append_flag_value(argv, "--seed", str(ctx.attr.seed) if ctx.attr.seed > 0 else None)
append_flag_value(argv, "--bail", str(ctx.attr.bail) if ctx.attr.bail > 0 else None)
append_flag_value(argv, "--max-concurrency", str(ctx.attr.max_concurrency) if ctx.attr.max_concurrency > 0 else None)
append_raw_flags(argv, ctx.attr.test_flags)
command = """
trap cleanup_runtime_workspace EXIT
cd "${runtime_workspace}"
__BUN_ARGS__
exec "${bun_bin}" "${bun_args[@]}" "$@"
""".replace("__BUN_ARGS__", "\n".join(launcher_lines))
spec_file = write_launcher_spec(ctx, {
"version": 1,
"kind": "bun_test",
"bun_short_path": runfiles_path(bun_bin),
"primary_source_short_path": runfiles_path(primary_file),
"package_json_short_path": "",
"install_metadata_short_path": runfiles_path(workspace_info.install_metadata_file) if workspace_info.install_metadata_file else "",
"install_repo_runfiles_path": workspace_info.install_repo_runfiles_path,
"node_modules_roots": workspace_info.node_modules_roots,
"package_dir_hint": workspace_info.package_dir_hint,
"working_dir_mode": "workspace",
"inherit_host_path": ctx.attr.inherit_host_path,
"argv": argv,
"args": ctx.attr.args,
"passthrough_args": True,
"tool_short_path": "",
"restart_on": [],
"watch_mode": "",
"reporter": ctx.attr.reporter,
"coverage": ctx.attr.coverage,
"coverage_reporters": ctx.attr.coverage_reporters,
"preload_short_paths": [runfiles_path(file) for file in ctx.files.preload],
"env_file_short_paths": [runfiles_path(file) for file in ctx.files.env_files],
"test_short_paths": [runfiles_path(file) for file in ctx.files.srcs],
})
launcher = declare_runtime_wrapper(ctx, bun_bin, spec_file)
launcher = ctx.actions.declare_file(ctx.label.name)
ctx.actions.write(
output = launcher,
is_executable = True,
content = render_workspace_setup(
bun_short_path = bun_bin.short_path,
install_metadata_short_path = workspace_info.install_metadata_file.short_path if workspace_info.install_metadata_file else "",
primary_source_short_path = primary_file.short_path,
working_dir_mode = "workspace",
) + command,
)
return [
workspace_info,
DefaultInfo(
executable = launcher,
executable = launcher.executable,
runfiles = workspace_runfiles(
ctx,
workspace_info,
direct_files = [launcher],
direct_files = [launcher.executable, launcher.runner, spec_file],
transitive_files = dep_runfiles,
),
),
]
bun_test = rule(
implementation = _bun_test_impl,
doc = """Runs Bun tests as a Bazel test target.
Supports Bazel test filtering (`--test_filter`) and coverage integration.
""",
attrs = {
_BUN_TEST_ATTRS = runtime_launcher_attrs()
_BUN_TEST_ATTRS.update({
"srcs": attr.label_list(
mandatory = True,
allow_files = [".js", ".ts", ".jsx", ".tsx", ".mjs", ".cjs"],
@@ -206,7 +178,21 @@ Supports Bazel test filtering (`--test_filter`) and coverage integration.
"test_flags": attr.string_list(
doc = "Additional raw flags forwarded to `bun test` before the test source list.",
),
},
"inherit_host_path": attr.bool(
default = False,
doc = "If true, appends the host PATH after the staged Bun runtime tool bin and node_modules/.bin entries at runtime.",
),
})
bun_test = rule(
implementation = _bun_test_impl,
doc = """Runs Bun tests as a Bazel test target.
Supports Bazel test filtering (`--test_filter`) and coverage integration. Tests
run with strict install-mode semantics and do not inherit the host PATH unless
explicitly requested.
""",
attrs = _BUN_TEST_ATTRS,
test = True,
toolchains = ["//bun:toolchain_type"],
)

View File

@@ -1,10 +1,8 @@
"""Compatibility rule for running an executable target as a dev server."""
load("//internal:js_library.bzl", "collect_js_runfiles")
load("//internal:workspace.bzl", "create_bun_workspace_info", "render_workspace_setup", "workspace_runfiles")
def _shell_quote(value):
return "'" + value.replace("'", "'\"'\"'") + "'"
load("//internal:runtime_launcher.bzl", "declare_runtime_wrapper", "runfiles_path", "runtime_launcher_attrs", "write_launcher_spec")
load("//internal:workspace.bzl", "create_bun_workspace_info", "workspace_runfiles")
def _js_run_devserver_impl(ctx):
toolchain = ctx.toolchains["//bun:toolchain_type"]
@@ -21,51 +19,48 @@ def _js_run_devserver_impl(ctx):
extra_files = ctx.files.data + [bun_bin, tool_default_info.files_to_run.executable],
)
tool_workspace = ctx.attr.tool.label.workspace_name or "_main"
tool_path = "{}/{}".format(tool_workspace, tool_default_info.files_to_run.executable.short_path)
default_args = " ".join([_shell_quote(arg) for arg in ctx.attr.args])
launcher = ctx.actions.declare_file(ctx.label.name)
ctx.actions.write(
output = launcher,
is_executable = True,
content = render_workspace_setup(
bun_short_path = bun_bin.short_path,
install_metadata_short_path = workspace_info.install_metadata_file.short_path if workspace_info.install_metadata_file else "",
primary_source_short_path = package_json.short_path if package_json else tool_default_info.files_to_run.executable.short_path,
package_json_short_path = package_json.short_path if package_json else "",
package_dir_hint = ctx.attr.package_dir_hint,
working_dir_mode = ctx.attr.working_dir,
) + """
trap cleanup_runtime_workspace EXIT
cd "${runtime_exec_dir}"
tool="${runfiles_dir}/__TOOL_SHORT_PATH__"
exec "${tool}" __DEFAULT_ARGS__ "$@"
""".replace("__TOOL_SHORT_PATH__", tool_path).replace("__DEFAULT_ARGS__", default_args),
)
spec_file = write_launcher_spec(ctx, {
"version": 1,
"kind": "tool_exec",
"bun_short_path": runfiles_path(bun_bin),
"primary_source_short_path": runfiles_path(package_json) if package_json else runfiles_path(tool_default_info.files_to_run.executable),
"package_json_short_path": runfiles_path(package_json) if package_json else "",
"install_metadata_short_path": runfiles_path(workspace_info.install_metadata_file) if workspace_info.install_metadata_file else "",
"install_repo_runfiles_path": workspace_info.install_repo_runfiles_path,
"node_modules_roots": workspace_info.node_modules_roots,
"package_dir_hint": ctx.attr.package_dir_hint,
"working_dir_mode": ctx.attr.working_dir,
"inherit_host_path": ctx.attr.inherit_host_path,
"argv": [],
"args": ctx.attr.args,
"passthrough_args": True,
"tool_short_path": runfiles_path(tool_default_info.files_to_run.executable),
"restart_on": [],
"watch_mode": "",
"reporter": "",
"coverage": False,
"coverage_reporters": [],
"preload_short_paths": [],
"env_file_short_paths": [],
"test_short_paths": [],
})
launcher = declare_runtime_wrapper(ctx, bun_bin, spec_file)
return [
workspace_info,
DefaultInfo(
executable = launcher,
executable = launcher.executable,
runfiles = workspace_runfiles(
ctx,
workspace_info,
direct_files = [launcher, tool_default_info.files_to_run.executable],
direct_files = [launcher.executable, launcher.runner, spec_file, tool_default_info.files_to_run.executable],
transitive_files = dep_runfiles,
).merge(tool_default_info.default_runfiles),
),
]
js_run_devserver = rule(
implementation = _js_run_devserver_impl,
doc = """Runs an executable target from a staged JS workspace.
This is a Bun-backed compatibility adapter for `rules_js`-style devserver
targets. It stages the same runtime workspace as the Bun rules, then executes
the provided tool with any default arguments.
""",
attrs = {
_JS_RUN_DEVSERVER_ATTRS = runtime_launcher_attrs()
_JS_RUN_DEVSERVER_ATTRS.update({
"tool": attr.label(
mandatory = True,
executable = True,
@@ -95,7 +90,22 @@ the provided tool with any default arguments.
values = ["workspace", "package"],
doc = "Working directory at runtime: Bazel runfiles workspace root or the resolved package directory.",
),
},
"inherit_host_path": attr.bool(
default = False,
doc = "If true, appends the host PATH after the staged Bun runtime tool bin and node_modules/.bin entries at runtime.",
),
})
js_run_devserver = rule(
implementation = _js_run_devserver_impl,
doc = """Runs an executable target from a staged JS workspace.
This is a Bun-backed compatibility adapter for `rules_js`-style devserver
targets. It stages the same runtime workspace as the Bun rules, then executes
the provided tool with any default arguments. It is intended for local
development workflows rather than hermetic build execution.
""",
attrs = _JS_RUN_DEVSERVER_ATTRS,
executable = True,
toolchains = ["//bun:toolchain_type"],
)

View File

@@ -0,0 +1,173 @@
"""Shared launcher spec and OS-native wrapper helpers for runtime rules."""
_RUNTIME_LAUNCHER = Label("//internal:runtime_launcher.js")
_WINDOWS_CONSTRAINT = Label("@platforms//os:windows")
_POSIX_WRAPPER_TEMPLATE = """#!/bin/sh
set -eu
self="$0"
runfiles_dir="${RUNFILES_DIR:-}"
manifest="${RUNFILES_MANIFEST_FILE:-}"
if [ -n "${runfiles_dir}" ] && [ -d "${runfiles_dir}" ]; then
:
elif [ -n "${manifest}" ] && [ -f "${manifest}" ]; then
:
elif [ -d "${self}.runfiles" ]; then
runfiles_dir="${self}.runfiles"
elif [ -f "${self}.runfiles_manifest" ]; then
manifest="${self}.runfiles_manifest"
elif [ -f "${self}.exe.runfiles_manifest" ]; then
manifest="${self}.exe.runfiles_manifest"
else
echo "rules_bun: unable to locate runfiles for ${self}" >&2
exit 1
fi
rlocation() {
path="$1"
if [ -n "${runfiles_dir}" ]; then
printf '%s\\n' "${runfiles_dir}/${path}"
return 0
fi
result=""
while IFS= read -r line; do
case "${line}" in
"${path} "*)
result="${line#${path} }"
break
;;
esac
done < "${manifest}"
if [ -z "${result}" ]; then
echo "rules_bun: missing runfile ${path}" >&2
exit 1
fi
printf '%s\\n' "${result}"
}
bun_bin="$(rlocation "__BUN_RUNFILES_PATH__")"
runner="$(rlocation "__RUNNER_RUNFILES_PATH__")"
spec="$(rlocation "__SPEC_RUNFILES_PATH__")"
export RULES_BUN_LAUNCHER_PATH="${self}"
if [ -n "${runfiles_dir}" ]; then
export RULES_BUN_RUNFILES_DIR="${runfiles_dir}"
fi
if [ -n "${manifest}" ]; then
export RULES_BUN_RUNFILES_MANIFEST="${manifest}"
fi
exec "${bun_bin}" --bun "${runner}" "${spec}" "$@"
"""
_CMD_WRAPPER_TEMPLATE = """@echo off
setlocal
set "SELF=%~f0"
set "RUNFILES_DIR_VALUE=%RUNFILES_DIR%"
set "RUNFILES_MANIFEST_VALUE=%RUNFILES_MANIFEST_FILE%"
if defined RUNFILES_DIR_VALUE if exist "%RUNFILES_DIR_VALUE%" goto have_runfiles
if defined RUNFILES_MANIFEST_VALUE if exist "%RUNFILES_MANIFEST_VALUE%" goto have_runfiles
if exist "%SELF%.runfiles" (
set "RUNFILES_DIR_VALUE=%SELF%.runfiles"
goto have_runfiles
)
if exist "%SELF%.runfiles_manifest" (
set "RUNFILES_MANIFEST_VALUE=%SELF%.runfiles_manifest"
goto have_runfiles
)
if exist "%~dpn0.runfiles_manifest" (
set "RUNFILES_MANIFEST_VALUE=%~dpn0.runfiles_manifest"
goto have_runfiles
)
echo rules_bun: unable to locate runfiles for "%SELF%" 1>&2
exit /b 1
:have_runfiles
call :rlocation "__BUN_RUNFILES_PATH__" BUN_BIN || exit /b 1
call :rlocation "__RUNNER_RUNFILES_PATH__" RUNNER || exit /b 1
call :rlocation "__SPEC_RUNFILES_PATH__" SPEC || exit /b 1
set "RULES_BUN_LAUNCHER_PATH=%SELF%"
if defined RUNFILES_DIR_VALUE (
set "RULES_BUN_RUNFILES_DIR=%RUNFILES_DIR_VALUE%"
) else (
set "RULES_BUN_RUNFILES_DIR="
)
if defined RUNFILES_MANIFEST_VALUE (
set "RULES_BUN_RUNFILES_MANIFEST=%RUNFILES_MANIFEST_VALUE%"
) else (
set "RULES_BUN_RUNFILES_MANIFEST="
)
"%BUN_BIN%" --bun "%RUNNER%" "%SPEC%" %*
exit /b %ERRORLEVEL%
:rlocation
set "LOOKUP=%~1"
set "OUTPUT_VAR=%~2"
if defined RUNFILES_DIR_VALUE (
set "%OUTPUT_VAR%=%RUNFILES_DIR_VALUE%\\%LOOKUP:/=\\%"
exit /b 0
)
for /f "tokens=1,* delims= " %%A in ('findstr /b /c:"%LOOKUP% " "%RUNFILES_MANIFEST_VALUE%"') do (
set "%OUTPUT_VAR%=%%B"
exit /b 0
)
echo rules_bun: missing runfile %LOOKUP% 1>&2
exit /b 1
"""
def runfiles_path(file):
workspace_name = file.owner.workspace_name
if workspace_name:
return "{}/{}".format(workspace_name, file.short_path)
return "_main/{}".format(file.short_path)
def runtime_launcher_attrs():
return {
"_runtime_launcher": attr.label(
default = _RUNTIME_LAUNCHER,
allow_single_file = True,
),
"_windows_constraint": attr.label(
default = _WINDOWS_CONSTRAINT,
),
}
def is_windows_target(ctx):
return ctx.target_platform_has_constraint(ctx.attr._windows_constraint[platform_common.ConstraintValueInfo])
def write_launcher_spec(ctx, spec):
spec_file = ctx.actions.declare_file(ctx.label.name + ".launcher.json")
ctx.actions.write(
output = spec_file,
content = json.encode(spec) + "\n",
)
return spec_file
def declare_runtime_wrapper(ctx, bun_bin, spec_file):
runner = ctx.file._runtime_launcher
wrapper = ctx.actions.declare_file(ctx.label.name + (".cmd" if is_windows_target(ctx) else ""))
content = _CMD_WRAPPER_TEMPLATE if is_windows_target(ctx) else _POSIX_WRAPPER_TEMPLATE
content = content.replace("__BUN_RUNFILES_PATH__", runfiles_path(bun_bin)).replace(
"__RUNNER_RUNFILES_PATH__",
runfiles_path(runner),
).replace(
"__SPEC_RUNFILES_PATH__",
runfiles_path(spec_file),
)
ctx.actions.write(
output = wrapper,
content = content,
is_executable = True,
)
return struct(
executable = wrapper,
runner = runner,
)

1259
internal/runtime_launcher.js Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,13 @@
"""Shared Bun workspace metadata and launcher helpers."""
"""Shared Bun workspace metadata helpers."""
BunWorkspaceInfo = provider(
doc = "Workspace/runtime metadata shared by Bun rules and adapters.",
fields = {
"install_metadata_file": "Optional install metadata file from bun_install.",
"install_repo_runfiles_path": "Runfiles root for the node_modules repository when present.",
"metadata_file": "Rule-local metadata file describing the staged workspace inputs.",
"node_modules_files": "Depset of node_modules files from bun_install.",
"node_modules_roots": "Sorted repo-relative node_modules roots available in runfiles.",
"package_dir_hint": "Package-relative directory when known at analysis time.",
"package_json": "Package manifest file when explicitly provided.",
"primary_file": "Primary source file used to resolve the runtime package context.",
@@ -13,690 +15,41 @@ BunWorkspaceInfo = provider(
},
)
_WORKSPACE_SETUP_TEMPLATE = """#!/usr/bin/env bash
set -euo pipefail
runfiles_dir="${RUNFILES_DIR:-$0.runfiles}"
workspace_root="${runfiles_dir}/_main"
workspace_root="$(cd "${workspace_root}" && pwd -P)"
bun_bin="${runfiles_dir}/_main/__BUN_SHORT_PATH__"
primary_source=""
if [[ -n "__PRIMARY_SOURCE_SHORT_PATH__" ]]; then
primary_source="${runfiles_dir}/_main/__PRIMARY_SOURCE_SHORT_PATH__"
fi
package_json=""
if [[ -n "__PACKAGE_JSON_SHORT_PATH__" ]]; then
package_json="${runfiles_dir}/_main/__PACKAGE_JSON_SHORT_PATH__"
fi
package_rel_dir_hint="__PACKAGE_DIR_HINT__"
install_root_rel_dir_hint="__INSTALL_ROOT_REL_DIR__"
install_metadata=""
if [[ -n "__INSTALL_METADATA_SHORT_PATH__" ]]; then
install_metadata="${runfiles_dir}/_main/__INSTALL_METADATA_SHORT_PATH__"
fi
working_dir_mode="__WORKING_DIR_MODE__"
normalize_rel_dir() {
local value="$1"
if [[ -z "${value}" || "${value}" == "." ]]; then
echo "."
else
echo "${value#./}"
fi
}
dirname_rel_dir() {
local value
value="$(normalize_rel_dir "$1")"
if [[ "${value}" == "." || "${value}" != */* ]]; then
echo "."
return 0
fi
echo "${value%/*}"
}
first_path_component() {
local value
value="$(normalize_rel_dir "$1")"
if [[ "${value}" == "." ]]; then
echo ""
return 0
fi
echo "${value%%/*}"
}
rel_dir_from_abs_path() {
local absolute_path="$1"
if [[ "${absolute_path}" == "${workspace_root}" ]]; then
echo "."
return 0
fi
echo "${absolute_path#"${workspace_root}/"}"
}
find_package_rel_dir_for_path() {
local path="$1"
local dir="$1"
if [[ -f "${dir}" ]]; then
dir="$(dirname "${dir}")"
fi
while [[ "${dir}" == "${workspace_root}"* ]]; do
if [[ -f "${dir}/package.json" ]]; then
rel_dir_from_abs_path "${dir}"
return 0
fi
if [[ "${dir}" == "${workspace_root}" ]]; then
break
fi
dir="$(dirname "${dir}")"
done
rel_dir_from_abs_path "$(dirname "${path}")"
}
find_working_rel_dir_for_path() {
local path="$1"
local dir="$1"
if [[ -f "${dir}" ]]; then
dir="$(dirname "${dir}")"
fi
while [[ "${dir}" == "${workspace_root}"* ]]; do
if [[ -f "${dir}/.env" || -f "${dir}/package.json" ]]; then
rel_dir_from_abs_path "${dir}"
return 0
fi
if [[ "${dir}" == "${workspace_root}" ]]; then
break
fi
dir="$(dirname "${dir}")"
done
rel_dir_from_abs_path "$(dirname "${path}")"
}
strip_rel_prefix() {
local child
child="$(normalize_rel_dir "$1")"
local parent
parent="$(normalize_rel_dir "$2")"
if [[ "${parent}" == "." ]]; then
echo "${child}"
return 0
fi
if [[ "${child}" == "${parent}" ]]; then
echo "."
return 0
fi
if [[ "${child}" == "${parent}/"* ]]; then
echo "${child#"${parent}/"}"
return 0
fi
echo "${child}"
}
select_primary_node_modules() {
local selected=""
local fallback=""
while IFS= read -r node_modules_dir; do
if [[ -z "${fallback}" ]]; then
fallback="${node_modules_dir}"
fi
if [[ ! -d "${node_modules_dir}/.bun" ]]; then
continue
fi
if [[ "${node_modules_dir}" != *"/runfiles/_main/"* ]]; then
selected="${node_modules_dir}"
break
fi
if [[ -z "${selected}" ]]; then
selected="${node_modules_dir}"
fi
done < <(find -L "${runfiles_dir}" -type d -name node_modules 2>/dev/null | sort)
if [[ -n "${selected}" ]]; then
echo "${selected}"
else
echo "${fallback}"
fi
}
link_top_level_entries() {
local source_root="$1"
local destination_root="$2"
local skipped_entry="$3"
local entry=""
local entry_name=""
shopt -s dotglob nullglob
for entry in "${source_root}"/* "${source_root}"/.[!.]* "${source_root}"/..?*; do
entry_name="$(basename "${entry}")"
if [[ "${entry_name}" == "." || "${entry_name}" == ".." ]]; then
continue
fi
if [[ -n "${skipped_entry}" && "${entry_name}" == "${skipped_entry}" ]]; then
continue
fi
ln -s "${entry}" "${destination_root}/${entry_name}"
done
shopt -u dotglob nullglob
}
materialize_package_path() {
local source_root="$1"
local destination_root="$2"
local package_rel_dir
package_rel_dir="$(normalize_rel_dir "$3")"
if [[ "${package_rel_dir}" == "." ]]; then
return 0
fi
local source_cursor="${source_root}"
local destination_cursor="${destination_root}"
local parts=()
local current="${package_rel_dir}"
while [[ -n "${current}" ]]; do
if [[ "${current}" == */* ]]; then
parts+=("${current%%/*}")
current="${current#*/}"
else
parts+=("${current}")
break
fi
done
local index=0
while [[ ${index} -lt $((${#parts[@]} - 1)) ]]; do
local part="${parts[${index}]}"
local next_part="${parts[$((index + 1))]}"
source_cursor="${source_cursor}/${part}"
destination_cursor="${destination_cursor}/${part}"
mkdir -p "${destination_cursor}"
local sibling=""
local sibling_name=""
shopt -s dotglob nullglob
for sibling in "${source_cursor}"/* "${source_cursor}"/.[!.]* "${source_cursor}"/..?*; do
sibling_name="$(basename "${sibling}")"
if [[ "${sibling_name}" == "." || "${sibling_name}" == ".." || "${sibling_name}" == "${next_part}" ]]; then
continue
fi
if [[ ! -e "${destination_cursor}/${sibling_name}" ]]; then
ln -s "${sibling}" "${destination_cursor}/${sibling_name}"
fi
done
shopt -u dotglob nullglob
index=$((index + 1))
done
mkdir -p "${destination_root}/${package_rel_dir}"
}
materialize_directory_entries() {
local source_root="$1"
local destination_root="$2"
local entry=""
local entry_name=""
mkdir -p "${destination_root}"
shopt -s dotglob nullglob
for entry in "${source_root}"/* "${source_root}"/.[!.]* "${source_root}"/..?*; do
entry_name="$(basename "${entry}")"
if [[ "${entry_name}" == "." || "${entry_name}" == ".." ]]; then
continue
fi
rm -rf "${destination_root}/${entry_name}"
ln -s "${entry}" "${destination_root}/${entry_name}"
done
shopt -u dotglob nullglob
}
stage_workspace_view() {
local source_root="$1"
local destination_root="$2"
local package_rel_dir
package_rel_dir="$(normalize_rel_dir "$3")"
local skipped_entry
skipped_entry="$(first_path_component "${package_rel_dir}")"
link_top_level_entries "${source_root}" "${destination_root}" "${skipped_entry}"
if [[ "${package_rel_dir}" == "." ]]; then
return 0
fi
materialize_package_path "${source_root}" "${destination_root}" "${package_rel_dir}"
materialize_directory_entries "${source_root}/${package_rel_dir}" "${destination_root}/${package_rel_dir}"
}
materialize_tree_contents() {
local source_root="$1"
local destination_root="$2"
rm -rf "${destination_root}"
mkdir -p "${destination_root}"
cp -RL "${source_root}/." "${destination_root}"
}
build_workspace_package_map() {
local root="$1"
local out="$2"
python3 - "${root}" >"${out}" <<'PY'
import json
import os
import sys
root = os.path.abspath(sys.argv[1])
for dirpath, dirnames, filenames in os.walk(root):
dirnames[:] = [name for name in dirnames if name != "node_modules"]
if "package.json" not in filenames:
continue
manifest_path = os.path.join(dirpath, "package.json")
try:
with open(manifest_path, "r", encoding="utf-8") as manifest_file:
package_name = json.load(manifest_file).get("name")
except Exception:
continue
if not isinstance(package_name, str):
continue
rel_dir = os.path.relpath(dirpath, root)
if rel_dir == ".":
rel_dir = "."
print(f"{package_name}\t{rel_dir}")
PY
}
workspace_package_rel_dir_for_source() {
local source="$1"
local manifest_path="${source}/package.json"
local package_name=""
if [[ ! -f "${manifest_path}" ]]; then
return 1
fi
package_name="$(python3 - "${manifest_path}" <<'PY'
import json
import sys
try:
with open(sys.argv[1], "r", encoding="utf-8") as manifest_file:
package_name = json.load(manifest_file).get("name", "")
except Exception:
package_name = ""
if isinstance(package_name, str):
print(package_name)
PY
)"
if [[ -z "${package_name}" ]]; then
return 1
fi
awk -F '\t' -v name="${package_name}" '$1 == name { print $2; exit }' "${workspace_package_map}"
}
link_node_modules_entry() {
local source="$1"
local destination="$2"
local workspace_rel_dir=""
rm -rf "${destination}"
workspace_rel_dir="$(workspace_package_rel_dir_for_source "${source}" || true)"
if [[ -n "${workspace_rel_dir}" ]]; then
ln -s "${runtime_workspace}/${workspace_rel_dir}" "${destination}"
return 0
fi
if [[ -L "${source}" ]]; then
ln -s "$(readlink "${source}")" "${destination}"
else
ln -s "${source}" "${destination}"
fi
}
mirror_node_modules_dir() {
local source_dir="$1"
local destination_dir="$2"
local entry=""
local entry_name=""
local scoped_entry=""
local scoped_name=""
rm -rf "${destination_dir}"
mkdir -p "${destination_dir}"
shopt -s dotglob nullglob
for entry in "${source_dir}"/* "${source_dir}"/.[!.]* "${source_dir}"/..?*; do
entry_name="$(basename "${entry}")"
if [[ "${entry_name}" == "." || "${entry_name}" == ".." || "${entry_name}" == ".rules_bun" ]]; then
continue
fi
if [[ -d "${entry}" && ! -L "${entry}" && "${entry_name}" == @* ]]; then
mkdir -p "${destination_dir}/${entry_name}"
for scoped_entry in "${entry}"/* "${entry}"/.[!.]* "${entry}"/..?*; do
scoped_name="$(basename "${scoped_entry}")"
if [[ "${scoped_name}" == "." || "${scoped_name}" == ".." ]]; then
continue
fi
link_node_modules_entry "${scoped_entry}" "${destination_dir}/${entry_name}/${scoped_name}"
done
continue
fi
link_node_modules_entry "${entry}" "${destination_dir}/${entry_name}"
done
shopt -u dotglob nullglob
}
find_install_repo_node_modules() {
local repo_root="$1"
local package_rel_dir
package_rel_dir="$(normalize_rel_dir "$2")"
if [[ "${package_rel_dir}" != "." ]]; then
local candidate="${package_rel_dir}"
while true; do
if [[ -d "${repo_root}/${candidate}/node_modules" ]]; then
echo "${repo_root}/${candidate}/node_modules"
return 0
fi
if [[ "${candidate}" != */* ]]; then
break
fi
candidate="${candidate%/*}"
done
fi
if [[ -d "${repo_root}/node_modules" ]]; then
echo "${repo_root}/node_modules"
return 0
fi
return 1
}
mirror_install_repo_workspace_node_modules() {
local repo_root="$1"
local destination_root="$2"
while IFS= read -r install_node_modules; do
local rel_path="${install_node_modules#${repo_root}/}"
local destination="${destination_root}/${rel_path}"
mkdir -p "$(dirname "${destination}")"
mirror_node_modules_dir "${install_node_modules}" "${destination}"
done < <(find "${repo_root}" \
-path "${repo_root}/node_modules" -prune -o \
-type d -name node_modules -print 2>/dev/null | sort)
}
build_runtime_path() {
local workspace_dir="$1"
local package_dir="$2"
local install_root_dir="$3"
local entries=()
if [[ -d "${install_root_dir}/node_modules/.bin" ]]; then
entries+=("${install_root_dir}/node_modules/.bin")
fi
if [[ -d "${package_dir}/node_modules/.bin" ]]; then
if [[ "${package_dir}/node_modules/.bin" != "${install_root_dir}/node_modules/.bin" ]]; then
entries+=("${package_dir}/node_modules/.bin")
fi
fi
if [[ -d "${workspace_dir}/node_modules/.bin" && "${workspace_dir}/node_modules/.bin" != "${package_dir}/node_modules/.bin" && "${workspace_dir}/node_modules/.bin" != "${install_root_dir}/node_modules/.bin" ]]; then
entries+=("${workspace_dir}/node_modules/.bin")
fi
if [[ -n "${PATH:-}" ]]; then
entries+=("${PATH}")
fi
if [[ ${#entries[@]} -eq 0 ]]; then
echo ""
return 0
fi
local path_value=""
local entry=""
for entry in "${entries[@]}"; do
if [[ -z "${path_value}" ]]; then
path_value="${entry}"
else
path_value="${path_value}:${entry}"
fi
done
echo "${path_value}"
}
resolve_package_rel_dir() {
if [[ -n "${package_rel_dir_hint}" && "${package_rel_dir_hint}" != "." ]]; then
normalize_rel_dir "${package_rel_dir_hint}"
return 0
fi
if [[ -n "${package_json}" ]]; then
find_package_rel_dir_for_path "${package_json}"
return 0
fi
if [[ -n "${primary_source}" ]]; then
find_package_rel_dir_for_path "${primary_source}"
return 0
fi
echo "."
}
resolve_execution_rel_dir() {
local package_rel_dir="$1"
case "${working_dir_mode}" in
workspace)
echo "."
;;
package)
echo "${package_rel_dir}"
;;
entry_point)
if [[ -n "${primary_source}" ]]; then
find_working_rel_dir_for_path "${primary_source}"
else
echo "${package_rel_dir}"
fi
;;
*)
echo "${package_rel_dir}"
;;
esac
}
resolve_install_root_rel_dir() {
if [[ -n "${install_metadata}" && -f "${install_metadata}" ]]; then
local resolved_from_metadata=""
resolved_from_metadata="$(
python3 - "${install_metadata}" "${package_rel_dir}" <<'PY'
import json
import sys
install_metadata_path = sys.argv[1]
package_rel_dir = sys.argv[2]
try:
with open(install_metadata_path, "r", encoding="utf-8") as install_metadata_file:
workspace_package_dirs = json.load(install_metadata_file).get("workspace_package_dirs", [])
except Exception:
workspace_package_dirs = []
normalized_package_rel_dir = package_rel_dir.strip("./") or "."
matches = []
for workspace_package_dir in workspace_package_dirs:
normalized_workspace_package_dir = workspace_package_dir.strip("./")
if not normalized_workspace_package_dir:
continue
if normalized_package_rel_dir == normalized_workspace_package_dir:
matches.append((len(normalized_workspace_package_dir), "."))
continue
suffix = "/" + normalized_workspace_package_dir
if normalized_package_rel_dir.endswith(suffix):
prefix = normalized_package_rel_dir[:-len(suffix)].strip("/") or "."
matches.append((len(normalized_workspace_package_dir), prefix))
if matches:
matches.sort(reverse = True)
print(matches[0][1])
PY
)"
if [[ -n "${resolved_from_metadata}" ]]; then
echo "${resolved_from_metadata}"
return 0
fi
fi
if [[ -n "${install_root_rel_dir_hint}" && "${install_root_rel_dir_hint}" != "." ]]; then
normalize_rel_dir "${install_root_rel_dir_hint}"
return 0
fi
if [[ -n "${package_json}" ]]; then
find_package_rel_dir_for_path "${package_json}"
return 0
fi
if [[ -n "${primary_source}" ]]; then
find_package_rel_dir_for_path "${primary_source}"
return 0
fi
echo "."
}
package_rel_dir="$(resolve_package_rel_dir)"
execution_rel_dir="$(resolve_execution_rel_dir "${package_rel_dir}")"
install_root_rel_dir="$(resolve_install_root_rel_dir)"
package_rel_dir_in_install_root="$(strip_rel_prefix "${package_rel_dir}" "${install_root_rel_dir}")"
runtime_workspace="$(mktemp -d)"
cleanup_runtime_workspace() {
rm -rf "${runtime_workspace}"
}
stage_workspace_view "${workspace_root}" "${runtime_workspace}" "${package_rel_dir}"
runtime_package_dir="${runtime_workspace}"
if [[ "${package_rel_dir}" != "." ]]; then
runtime_package_dir="${runtime_workspace}/${package_rel_dir}"
fi
runtime_install_root="${runtime_workspace}"
if [[ "${install_root_rel_dir}" != "." ]]; then
runtime_install_root="${runtime_workspace}/${install_root_rel_dir}"
fi
runtime_exec_dir="${runtime_workspace}"
if [[ "${execution_rel_dir}" != "." ]]; then
runtime_exec_dir="${runtime_workspace}/${execution_rel_dir}"
fi
if [[ -n "${primary_source}" ]]; then
materialize_tree_contents "${workspace_root}/${package_rel_dir}" "${runtime_package_dir}"
fi
if [[ -n "${package_json}" ]]; then
materialize_tree_contents "${workspace_root}/${install_root_rel_dir}" "${runtime_install_root}"
fi
if [[ -n "${primary_source}" && "${primary_source}" == "${workspace_root}"* ]]; then
primary_source="${runtime_workspace}/$(rel_dir_from_abs_path "${primary_source}")"
fi
if [[ -n "${package_json}" && "${package_json}" == "${workspace_root}"* ]]; then
package_json="${runtime_workspace}/$(rel_dir_from_abs_path "${package_json}")"
fi
workspace_package_map="${runtime_workspace}/.rules_bun_workspace_packages.tsv"
build_workspace_package_map "${runtime_workspace}" "${workspace_package_map}"
primary_node_modules="$(select_primary_node_modules)"
install_repo_root=""
if [[ -n "${primary_node_modules}" ]]; then
install_repo_root="$(dirname "${primary_node_modules}")"
mkdir -p "${runtime_install_root}"
mirror_node_modules_dir "${primary_node_modules}" "${runtime_install_root}/node_modules"
fi
if [[ -n "${install_repo_root}" ]]; then
resolved_install_node_modules="$(find_install_repo_node_modules "${install_repo_root}" "${package_rel_dir_in_install_root}" || true)"
if [[ -n "${resolved_install_node_modules}" && "${resolved_install_node_modules}" != "${install_repo_root}/node_modules" ]]; then
mirror_node_modules_dir "${resolved_install_node_modules}" "${runtime_package_dir}/node_modules"
fi
mirror_install_repo_workspace_node_modules "${install_repo_root}" "${runtime_install_root}"
fi
if [[ ! -e "${runtime_package_dir}/node_modules" && -e "${runtime_install_root}/node_modules" && "${runtime_package_dir}" != "${runtime_install_root}" ]]; then
ln -s "${runtime_install_root}/node_modules" "${runtime_package_dir}/node_modules"
fi
runtime_path="$(build_runtime_path "${runtime_workspace}" "${runtime_package_dir}" "${runtime_install_root}")"
if [[ -n "${runtime_path}" ]]; then
export PATH="${runtime_path}"
fi
"""
def _shell_quote(value):
return "'" + value.replace("'", "'\"'\"'") + "'"
def _dirname(path):
if not path or path == ".":
return "."
index = path.rfind("/")
if index < 0:
return "."
if index == 0:
return "/"
return path[:index]
def find_install_metadata_file(files):
for file in files:
if file.short_path.endswith("node_modules/.rules_bun/install.json"):
return file
return None
def resolve_node_modules_roots(files, workspace_dir = ""):
install_metadata_file = find_install_metadata_file(files)
shared_node_modules_root = None
workspace_node_modules_root = None
def _runfiles_workspace(file):
workspace_name = file.owner.workspace_name
if workspace_name:
return workspace_name
return "_main"
if install_metadata_file:
shared_node_modules_root = _dirname(_dirname(install_metadata_file.path))
def _repo_relative_short_path(file):
short_path = file.short_path.replace("\\", "/")
workspace_name = _runfiles_workspace(file)
external_prefix = "../{}/".format(workspace_name)
if short_path.startswith(external_prefix):
return short_path[len(external_prefix):]
if short_path == "../{}".format(workspace_name):
return "."
return short_path
workspace_marker = ""
if workspace_dir:
workspace_marker = "/%s/node_modules/" % workspace_dir.strip("/")
shortest_path = None
for src in files:
if workspace_marker and workspace_marker in src.path and workspace_node_modules_root == None:
workspace_node_modules_root = src.path[:src.path.find(workspace_marker) + len(workspace_marker) - 1]
if shortest_path == None or len(src.path) < len(shortest_path):
shortest_path = src.path
if shared_node_modules_root == None and shortest_path:
def resolve_node_modules_roots(files):
roots = {}
marker = "/node_modules/"
marker_index = shortest_path.find(marker)
if marker_index >= 0:
shared_node_modules_root = shortest_path[:marker_index + len("/node_modules")]
for file in files:
short_path = _repo_relative_short_path(file)
if short_path == "node_modules" or short_path.startswith("node_modules/"):
roots["node_modules"] = True
return struct(
install_metadata_file = install_metadata_file,
node_modules_root = workspace_node_modules_root or shared_node_modules_root,
shared_node_modules_root = shared_node_modules_root,
)
marker_index = short_path.find(marker)
if marker_index >= 0:
roots[short_path[:marker_index + len("/node_modules")]] = True
return sorted(roots.keys())
def create_bun_workspace_info(ctx, primary_file = None, package_json = None, package_dir_hint = ".", extra_files = None):
direct_runtime_files = []
@@ -708,15 +61,25 @@ def create_bun_workspace_info(ctx, primary_file = None, package_json = None, pac
node_modules_files = depset()
install_metadata_file = None
install_repo_runfiles_path = ""
node_modules_roots = []
if getattr(ctx.attr, "node_modules", None):
node_modules_files = ctx.attr.node_modules[DefaultInfo].files
install_metadata_file = find_install_metadata_file(node_modules_files.to_list())
node_modules_file_list = node_modules_files.to_list()
install_metadata_file = find_install_metadata_file(node_modules_file_list)
node_modules_roots = resolve_node_modules_roots(node_modules_file_list)
if install_metadata_file:
install_repo_runfiles_path = _runfiles_workspace(install_metadata_file)
elif node_modules_file_list:
install_repo_runfiles_path = _runfiles_workspace(node_modules_file_list[0])
metadata_file = ctx.actions.declare_file(ctx.label.name + ".bun_workspace.json")
ctx.actions.write(
output = metadata_file,
content = json.encode({
"install_metadata": install_metadata_file.short_path if install_metadata_file else "",
"install_repo_runfiles_path": install_repo_runfiles_path,
"node_modules_roots": node_modules_roots,
"package_dir_hint": package_dir_hint or ".",
"package_json": package_json.short_path if package_json else "",
"primary_file": primary_file.short_path if primary_file else "",
@@ -731,8 +94,10 @@ def create_bun_workspace_info(ctx, primary_file = None, package_json = None, pac
return BunWorkspaceInfo(
install_metadata_file = install_metadata_file,
install_repo_runfiles_path = install_repo_runfiles_path,
metadata_file = metadata_file,
node_modules_files = node_modules_files,
node_modules_roots = node_modules_roots,
package_dir_hint = package_dir_hint or ".",
package_json = package_json,
primary_file = primary_file,
@@ -746,31 +111,3 @@ def workspace_runfiles(ctx, workspace_info, direct_files = None, transitive_file
transitive = [workspace_info.runtime_files] + (transitive_files or []),
),
)
def render_workspace_setup(
bun_short_path,
working_dir_mode,
primary_source_short_path = "",
package_json_short_path = "",
package_dir_hint = ".",
install_root_rel_dir = ".",
install_metadata_short_path = ""):
return _WORKSPACE_SETUP_TEMPLATE.replace("__BUN_SHORT_PATH__", bun_short_path).replace(
"__PRIMARY_SOURCE_SHORT_PATH__",
primary_source_short_path,
).replace(
"__PACKAGE_JSON_SHORT_PATH__",
package_json_short_path,
).replace(
"__PACKAGE_DIR_HINT__",
package_dir_hint or ".",
).replace(
"__INSTALL_ROOT_REL_DIR__",
install_root_rel_dir or ".",
).replace(
"__INSTALL_METADATA_SHORT_PATH__",
install_metadata_short_path,
).replace(
"__WORKING_DIR_MODE__",
working_dir_mode,
)

View File

@@ -1 +1,3 @@
package(default_visibility = ["//visibility:public"])
exports_files(["nested_bazel_test.sh"])

1
tests/binary_test/.env Normal file
View File

@@ -0,0 +1 @@
BUN_ENV_CWD_TEST=from-dotenv

View File

@@ -1,5 +1,5 @@
load("//bun:defs.bzl", "bun_binary")
load("@rules_shell//shell:sh_test.bzl", "sh_test")
load("//bun:defs.bzl", "bun_binary")
bun_binary(
name = "hello_js_bin",
@@ -8,8 +8,12 @@ bun_binary(
sh_test(
name = "bun_binary_js_test",
size = "small",
srcs = ["run_binary.sh"],
args = ["$(location :hello_js_bin)", "hello-js"],
args = [
"$(location :hello_js_bin)",
"hello-js",
],
data = [":hello_js_bin"],
)
@@ -20,23 +24,28 @@ bun_binary(
sh_test(
name = "bun_binary_ts_test",
size = "small",
srcs = ["run_binary.sh"],
args = ["$(location :hello_ts_bin)", "hello-ts"],
args = [
"$(location :hello_ts_bin)",
"hello-ts",
],
data = [":hello_ts_bin"],
)
bun_binary(
name = "hello_js_with_data_bin",
entry_point = "hello.js",
data = ["payload.txt"],
entry_point = "hello.js",
)
sh_test(
name = "bun_binary_data_test",
size = "small",
srcs = ["verify_data_shape.sh"],
args = [
"$(location //internal:bun_binary.bzl)",
"$(location //tests/binary_test:BUILD.bazel)",
"$(rlocationpath //internal:bun_binary.bzl)",
"$(rlocationpath //tests/binary_test:BUILD.bazel)",
],
data = [
"//internal:bun_binary.bzl",
@@ -46,13 +55,14 @@ sh_test(
bun_binary(
name = "env_cwd_bin",
entry_point = "env.ts",
data = [".env"],
entry_point = "env.ts",
working_dir = "entry_point",
)
sh_test(
name = "bun_binary_env_cwd_test",
size = "small",
srcs = ["run_env_binary.sh"],
args = ["$(location :env_cwd_bin)"],
data = [":env_cwd_bin"],
@@ -60,13 +70,14 @@ sh_test(
bun_binary(
name = "env_parent_cwd_bin",
entry_point = "env_parent/src/main.ts",
data = ["env_parent/.env"],
entry_point = "env_parent/src/main.ts",
working_dir = "entry_point",
)
sh_test(
name = "bun_binary_env_parent_cwd_test",
size = "small",
srcs = ["run_parent_env_binary.sh"],
args = ["$(location :env_parent_cwd_bin)"],
data = [":env_parent_cwd_bin"],
@@ -74,14 +85,18 @@ sh_test(
bun_binary(
name = "runtime_flag_bin",
args = [
"one",
"two",
],
entry_point = "flag_probe.ts",
args = ["one", "two"],
preload = ["preload.ts"],
env_files = ["runtime.env"],
preload = ["preload.ts"],
)
sh_test(
name = "bun_binary_runtime_flags_test",
size = "small",
srcs = ["run_flag_binary.sh"],
args = ["$(location :runtime_flag_bin)"],
data = [":runtime_flag_bin"],
@@ -89,6 +104,7 @@ sh_test(
sh_test(
name = "bun_binary_runtime_flags_shape_test",
size = "small",
srcs = ["verify_runtime_flags_shape.sh"],
args = ["$(location :runtime_flag_bin)"],
data = [":runtime_flag_bin"],
@@ -96,24 +112,52 @@ sh_test(
bun_binary(
name = "configured_launcher_bin",
entry_point = "hello.ts",
node_modules = "@script_test_vite_node_modules//:node_modules",
smol = True,
conditions = [
"browser",
"development",
],
entry_point = "hello.ts",
inherit_host_path = True,
install_mode = "force",
node_modules = "@script_test_vite_node_modules//:node_modules",
run_flags = [
"--hot",
"--console-depth",
"4",
],
smol = True,
visibility = ["//tests/ci_test:__pkg__"],
)
sh_test(
name = "bun_binary_configured_launcher_shape_test",
size = "small",
srcs = ["verify_configured_launcher_shape.sh"],
args = ["$(location :configured_launcher_bin)"],
data = [":configured_launcher_bin"],
)
bun_binary(
name = "path_default_bin",
entry_point = "path_probe.ts",
)
bun_binary(
name = "path_inherit_bin",
entry_point = "path_probe.ts",
inherit_host_path = True,
)
sh_test(
name = "bun_binary_host_path_test",
size = "small",
srcs = ["run_path_binary.sh"],
args = [
"$(location :path_default_bin)",
"$(location :path_inherit_bin)",
],
data = [
":path_default_bin",
":path_inherit_bin",
],
)

View File

@@ -0,0 +1 @@
BUN_ENV_PARENT_TEST=from-parent-dotenv

View File

@@ -0,0 +1,18 @@
import { spawnSync } from "node:child_process";
const pathValue = process.env.PATH ?? "";
function commandSucceeds(command: string, args: string[]): boolean {
const result = spawnSync(command, args, {
encoding: "utf8",
env: process.env,
});
return result.status === 0;
}
console.log(JSON.stringify({
hasHostSentinel: pathValue.includes("rules_bun_host_path_sentinel"),
canRunBun: commandSucceeds("bun", ["-e", "process.exit(0)"]),
canRunBunx: commandSucceeds("bunx", ["--version"]),
canRunNode: commandSucceeds("node", ["-e", "process.exit(0)"]),
}));

View File

@@ -3,7 +3,18 @@ set -euo pipefail
binary="$1"
expected="$2"
output="$(${binary})"
run_launcher() {
local launcher="$1"
shift
if [[ ${launcher} == *.cmd ]]; then
cmd.exe /c call "${launcher}" "$@" | tr -d '\r'
return 0
fi
"${launcher}" "$@"
}
output="$(run_launcher "${binary}")"
if [[ ${output} != "${expected}" ]]; then
echo "Unexpected output from ${binary}: ${output}" >&2

View File

@@ -2,7 +2,18 @@
set -euo pipefail
binary="$1"
output="$(${binary})"
run_launcher() {
local launcher="$1"
shift
if [[ ${launcher} == *.cmd ]]; then
cmd.exe /c call "${launcher}" "$@" | tr -d '\r'
return 0
fi
"${launcher}" "$@"
}
output="$(run_launcher "${binary}")"
if [[ ${output} != "from-dotenv" ]]; then
echo "Expected .env value from entry-point directory, got: ${output}" >&2

View File

@@ -2,7 +2,18 @@
set -euo pipefail
binary="$1"
output="$(${binary})"
run_launcher() {
local launcher="$1"
shift
if [[ ${launcher} == *.cmd ]]; then
cmd.exe /c call "${launcher}" "$@" | tr -d '\r'
return 0
fi
"${launcher}" "$@"
}
output="$(run_launcher "${binary}")"
expected='{"preloaded":"yes","env":"from-env-file","argv":["one","two"]}'

View File

@@ -2,7 +2,18 @@
set -euo pipefail
binary="$1"
output="$(${binary})"
run_launcher() {
local launcher="$1"
shift
if [[ ${launcher} == *.cmd ]]; then
cmd.exe /c call "${launcher}" "$@" | tr -d '\r'
return 0
fi
"${launcher}" "$@"
}
output="$(run_launcher "${binary}")"
if [[ ${output} != "from-parent-dotenv" ]]; then
echo "Expected .env value from parent directory, got: ${output}" >&2

View File

@@ -0,0 +1,28 @@
#!/usr/bin/env bash
set -euo pipefail
default_binary="$1"
inherit_binary="$2"
run_launcher() {
local launcher="$1"
shift
if [[ ${launcher} == *.cmd ]]; then
env PATH="rules_bun_host_path_sentinel:${PATH:-}" cmd.exe /c call "${launcher}" "$@" | tr -d '\r'
return 0
fi
env PATH="rules_bun_host_path_sentinel:${PATH:-}" "${launcher}" "$@"
}
default_output="$(run_launcher "${default_binary}")"
inherit_output="$(run_launcher "${inherit_binary}")"
if [[ ${default_output} != '{"hasHostSentinel":false,"canRunBun":true,"canRunBunx":true,"canRunNode":true}' ]]; then
echo "Expected default launcher to hide host PATH, got: ${default_output}" >&2
exit 1
fi
if [[ ${inherit_output} != '{"hasHostSentinel":true,"canRunBun":true,"canRunBunx":true,"canRunNode":true}' ]]; then
echo "Expected inherit_host_path launcher to preserve host PATH, got: ${inherit_output}" >&2
exit 1
fi

View File

@@ -1,16 +1,23 @@
#!/usr/bin/env bash
set -euo pipefail
binary="$1"
launcher="$1"
grep -Fq -- 'install_metadata="${runfiles_dir}/_main/' "${binary}"
grep -Fq -- 'node_modules/.rules_bun/install.json' "${binary}"
grep -Fq -- "--smol" "${binary}"
grep -Fq -- "--conditions" "${binary}"
grep -Fq -- "'browser'" "${binary}"
grep -Fq -- "'development'" "${binary}"
grep -Fq -- "--install" "${binary}"
grep -Fq -- "'force'" "${binary}"
grep -Fq -- "'--hot'" "${binary}"
grep -Fq -- "'--console-depth'" "${binary}"
grep -Fq -- "'4'" "${binary}"
python3 - "${launcher}" <<'PY'
import json
import pathlib
import sys
path = pathlib.Path(sys.argv[1])
if path.suffix.lower() == ".cmd":
path = pathlib.Path(str(path)[:-4])
spec = json.loads(pathlib.Path(f"{path}.launcher.json").read_text())
argv = spec["argv"]
assert spec["install_metadata_short_path"].endswith("node_modules/.rules_bun/install.json"), spec
assert spec["inherit_host_path"] is True, spec
assert spec["node_modules_roots"], spec
assert all(not root.startswith("../") for root in spec["node_modules_roots"]), spec
for value in ["--smol", "--conditions", "browser", "development", "--install", "force", "--hot", "--console-depth", "4"]:
assert value in argv, (value, spec)
PY

View File

@@ -1,8 +1,64 @@
#!/usr/bin/env bash
set -euo pipefail
rule_file="$1"
build_file="$2"
if [[ -z ${RUNFILES_DIR:-} && -n ${TEST_SRCDIR:-} && -d ${TEST_SRCDIR} ]]; then
RUNFILES_DIR="${TEST_SRCDIR}"
fi
if [[ -z ${RUNFILES_DIR:-} && -z ${RUNFILES_MANIFEST_FILE:-} ]]; then
if [[ -d "$0.runfiles" ]]; then
RUNFILES_DIR="$0.runfiles"
elif [[ -f "$0.runfiles_manifest" ]]; then
RUNFILES_MANIFEST_FILE="$0.runfiles_manifest"
elif [[ -f "$0.exe.runfiles_manifest" ]]; then
RUNFILES_MANIFEST_FILE="$0.exe.runfiles_manifest"
fi
fi
resolve_runfile() {
local path="${1:-}"
local candidate
local resolved
if [[ -z ${path} ]]; then
echo "Error: missing runfile path" >&2
exit 1
fi
if [[ ${path} == /* || ${path} =~ ^[A-Za-z]:[\\/] ]]; then
printf '%s\n' "${path}"
return 0
fi
if [[ -e ${path} ]]; then
printf '%s\n' "${path}"
return 0
fi
for candidate in \
"${path}" \
"${TEST_WORKSPACE:-}/${path}" \
"_main/${path}"; do
[[ -z ${candidate} ]] && continue
if [[ -n ${RUNFILES_DIR:-} && -e "${RUNFILES_DIR}/${candidate}" ]]; then
printf '%s\n' "${RUNFILES_DIR}/${candidate}"
return 0
fi
if [[ -n ${RUNFILES_MANIFEST_FILE:-} ]]; then
resolved="$(
awk -v key="${candidate}" 'index($0, key " ") == 1 { print substr($0, length(key) + 2); exit }' \
"${RUNFILES_MANIFEST_FILE}"
)"
if [[ -n ${resolved} ]]; then
printf '%s\n' "${resolved}"
return 0
fi
fi
done
echo "Error: unable to resolve runfile: ${path}" >&2
exit 1
}
rule_file="$(resolve_runfile "${1:-}")"
build_file="$(resolve_runfile "${2:-}")"
grep -Eq 'extra_files = ctx\.files\.data \+ ctx\.files\.preload \+ ctx\.files\.env_files \+ \[bun_bin\]' "${rule_file}"
grep -Eq 'name = "hello_js_with_data_bin"' "${build_file}"

View File

@@ -1,8 +1,20 @@
#!/usr/bin/env bash
set -euo pipefail
binary="$1"
launcher="$1"
grep -Fq -- '--no-install' "${binary}"
grep -Fq -- '--preload' "${binary}"
grep -Fq -- '--env-file' "${binary}"
python3 - "${launcher}" <<'PY'
import json
import pathlib
import sys
path = pathlib.Path(sys.argv[1])
if path.suffix.lower() == ".cmd":
path = pathlib.Path(str(path)[:-4])
spec = json.loads(pathlib.Path(f"{path}.launcher.json").read_text())
assert "--no-install" in spec["argv"], spec
assert spec["inherit_host_path"] is False, spec
assert spec["preload_short_paths"] and spec["preload_short_paths"][0].endswith("tests/binary_test/preload.ts"), spec
assert spec["env_file_short_paths"] and spec["env_file_short_paths"][0].endswith("tests/binary_test/runtime.env"), spec
PY

View File

@@ -3,77 +3,87 @@ load("//bun:defs.bzl", "bun_test")
bun_test(
name = "passing_suite",
size = "small",
srcs = ["passing.test.ts"],
)
bun_test(
name = "failing_suite",
size = "small",
srcs = ["failing.test.ts"],
)
bun_test(
name = "configured_suite",
size = "small",
srcs = ["passing.test.ts"],
preload = ["preload.ts"],
env_files = ["test.env"],
no_env_file = True,
timeout_ms = 250,
update_snapshots = True,
rerun_each = 2,
concurrent = True,
randomize = True,
seed = 7,
bail = 1,
reporter = "junit",
max_concurrency = 4,
concurrent = True,
coverage = True,
coverage_reporters = ["lcov"],
env_files = ["test.env"],
max_concurrency = 4,
no_env_file = True,
preload = ["preload.ts"],
randomize = True,
reporter = "junit",
rerun_each = 2,
seed = 7,
test_flags = ["--only-failures"],
timeout_ms = 250,
update_snapshots = True,
visibility = ["//tests/ci_test:__pkg__"],
)
bun_test(
name = "configured_retry_suite",
size = "small",
srcs = ["passing.test.ts"],
retry = 3,
)
sh_test(
name = "bun_test_failing_suite_test",
srcs = ["failing_suite_shape.sh"],
args = ["$(location //tests/bun_test_test:BUILD.bazel)"],
data = ["//tests/bun_test_test:BUILD.bazel"],
)
sh_test(
name = "bun_test_cache_hit_test",
srcs = ["cache_hit_shape.sh"],
args = ["$(location //internal:bun_test.bzl)"],
data = ["//internal:bun_test.bzl"],
)
sh_test(
name = "bun_test_cache_miss_test",
srcs = ["cache_miss_shape.sh"],
args = ["$(location //internal:bun_test.bzl)"],
data = ["//internal:bun_test.bzl"],
)
sh_test(
name = "bun_test_junit_output_test",
srcs = ["junit_shape.sh"],
args = ["$(location //internal:bun_test.bzl)"],
data = ["//internal:bun_test.bzl"],
)
sh_test(
name = "bun_test_configured_suite_shape_test",
size = "small",
srcs = ["configured_suite_shape.sh"],
args = [
"$(location :configured_suite)",
"$(location :configured_retry_suite)",
],
data = [
":configured_suite",
":configured_retry_suite",
":configured_suite",
],
)
sh_test(
name = "bun_test_failing_suite_test",
size = "small",
srcs = ["failing_suite_shape.sh"],
args = ["$(rlocationpath //tests/bun_test_test:BUILD.bazel)"],
data = ["//tests/bun_test_test:BUILD.bazel"],
)
sh_test(
name = "bun_test_cache_hit_test",
size = "small",
srcs = ["cache_hit_shape.sh"],
args = ["$(location :passing_suite)"],
data = [":passing_suite"],
)
sh_test(
name = "bun_test_cache_miss_test",
size = "small",
srcs = ["cache_miss_shape.sh"],
args = ["$(location :configured_suite)"],
data = [":configured_suite"],
)
sh_test(
name = "bun_test_junit_output_test",
size = "small",
srcs = ["junit_shape.sh"],
args = ["$(location :configured_suite)"],
data = [":configured_suite"],
)

View File

@@ -1,7 +1,19 @@
#!/usr/bin/env bash
set -euo pipefail
rule_file="$1"
launcher="$1"
grep -Fq 'launcher_lines = [render_shell_array("bun_args", ["--bun", "test"])]' "${rule_file}"
grep -Fq 'exec "${bun_bin}" "${bun_args[@]}" "$@"' "${rule_file}"
python3 - "${launcher}" <<'PY'
import json
import pathlib
import sys
path = pathlib.Path(sys.argv[1])
if path.suffix.lower() == ".cmd":
path = pathlib.Path(str(path)[:-4])
spec = json.loads(pathlib.Path(f"{path}.launcher.json").read_text())
assert spec["kind"] == "bun_test", spec
assert spec["argv"][:2] == ["--bun", "test"], spec
assert spec["test_short_paths"], spec
PY

View File

@@ -1,8 +1,20 @@
#!/usr/bin/env bash
set -euo pipefail
rule_file="$1"
launcher="$1"
grep -Fq 'extra_files = ctx.files.srcs + ctx.files.data + ctx.files.preload + ctx.files.env_files + [bun_bin]' "${rule_file}"
grep -Eq '"srcs": attr\.label_list\(' "${rule_file}"
grep -Eq '"coverage": attr\.bool\(' "${rule_file}"
python3 - "${launcher}" <<'PY'
import json
import pathlib
import sys
path = pathlib.Path(sys.argv[1])
if path.suffix.lower() == ".cmd":
path = pathlib.Path(str(path)[:-4])
spec = json.loads(pathlib.Path(f"{path}.launcher.json").read_text())
assert spec["coverage"] is True, spec
assert spec["preload_short_paths"], spec
assert spec["env_file_short_paths"], spec
assert spec["test_short_paths"], spec
PY

View File

@@ -4,21 +4,39 @@ set -euo pipefail
launcher="$1"
retry_launcher="$2"
grep -Fq -- '--no-install' "${launcher}"
grep -Fq -- '--preload' "${launcher}"
grep -Fq -- '--env-file' "${launcher}"
grep -Fq -- '--no-env-file' "${launcher}"
grep -Fq -- '--timeout' "${launcher}"
grep -Fq -- '--update-snapshots' "${launcher}"
grep -Fq -- '--rerun-each' "${launcher}"
grep -Fq -- '--concurrent' "${launcher}"
grep -Fq -- '--randomize' "${launcher}"
grep -Fq -- '--seed' "${launcher}"
grep -Fq -- '--bail' "${launcher}"
grep -Fq -- '--max-concurrency' "${launcher}"
grep -Fq -- '--reporter' "${launcher}"
grep -Fq -- '--reporter-outfile' "${launcher}"
grep -Fq -- '--coverage' "${launcher}"
grep -Fq -- '--coverage-dir' "${launcher}"
grep -Fq -- '--coverage-reporter' "${launcher}"
grep -Fq -- '--retry' "${retry_launcher}"
python3 - "${launcher}" "${retry_launcher}" <<'PY'
import json
import pathlib
import sys
def read_spec(launcher: str):
path = pathlib.Path(launcher)
if path.suffix.lower() == ".cmd":
path = pathlib.Path(str(path)[:-4])
return json.loads(pathlib.Path(f"{path}.launcher.json").read_text())
launcher_spec = read_spec(sys.argv[1])
retry_spec = read_spec(sys.argv[2])
for value in [
"--no-install",
"--no-env-file",
"--timeout",
"--update-snapshots",
"--rerun-each",
"--concurrent",
"--randomize",
"--seed",
"--bail",
"--max-concurrency",
]:
assert value in launcher_spec["argv"], (value, launcher_spec)
assert launcher_spec["preload_short_paths"], launcher_spec
assert launcher_spec["env_file_short_paths"], launcher_spec
assert launcher_spec["reporter"] == "junit", launcher_spec
assert launcher_spec["coverage"] is True, launcher_spec
assert launcher_spec["coverage_reporters"] == ["lcov"], launcher_spec
assert "--retry" in retry_spec["argv"], retry_spec
assert "3" in retry_spec["argv"], retry_spec
PY

View File

@@ -1,7 +1,63 @@
#!/usr/bin/env bash
set -euo pipefail
build_file="$1"
if [[ -z ${RUNFILES_DIR:-} && -n ${TEST_SRCDIR:-} && -d ${TEST_SRCDIR} ]]; then
RUNFILES_DIR="${TEST_SRCDIR}"
fi
if [[ -z ${RUNFILES_DIR:-} && -z ${RUNFILES_MANIFEST_FILE:-} ]]; then
if [[ -d "$0.runfiles" ]]; then
RUNFILES_DIR="$0.runfiles"
elif [[ -f "$0.runfiles_manifest" ]]; then
RUNFILES_MANIFEST_FILE="$0.runfiles_manifest"
elif [[ -f "$0.exe.runfiles_manifest" ]]; then
RUNFILES_MANIFEST_FILE="$0.exe.runfiles_manifest"
fi
fi
resolve_runfile() {
local path="${1:-}"
local candidate
local resolved
if [[ -z ${path} ]]; then
echo "Error: missing runfile path" >&2
exit 1
fi
if [[ ${path} == /* || ${path} =~ ^[A-Za-z]:[\\/] ]]; then
printf '%s\n' "${path}"
return 0
fi
if [[ -e ${path} ]]; then
printf '%s\n' "${path}"
return 0
fi
for candidate in \
"${path}" \
"${TEST_WORKSPACE:-}/${path}" \
"_main/${path}"; do
[[ -z ${candidate} ]] && continue
if [[ -n ${RUNFILES_DIR:-} && -e "${RUNFILES_DIR}/${candidate}" ]]; then
printf '%s\n' "${RUNFILES_DIR}/${candidate}"
return 0
fi
if [[ -n ${RUNFILES_MANIFEST_FILE:-} ]]; then
resolved="$(
awk -v key="${candidate}" 'index($0, key " ") == 1 { print substr($0, length(key) + 2); exit }' \
"${RUNFILES_MANIFEST_FILE}"
)"
if [[ -n ${resolved} ]]; then
printf '%s\n' "${resolved}"
return 0
fi
fi
done
echo "Error: unable to resolve runfile: ${path}" >&2
exit 1
}
build_file="$(resolve_runfile "${1:-}")"
grep -Eq 'name = "failing_suite"' "${build_file}"
if grep -Eq 'tags = \["manual"\]' "${build_file}"; then

View File

@@ -1,7 +1,17 @@
#!/usr/bin/env bash
set -euo pipefail
rule_file="$1"
launcher="$1"
grep -Fq 'reporter_out="${XML_OUTPUT_FILE:-${runtime_workspace}/junit.xml}"' "${rule_file}"
grep -Fq 'bun_args+=("--reporter" "junit" "--reporter-outfile" "${reporter_out}")' "${rule_file}"
python3 - "${launcher}" <<'PY'
import json
import pathlib
import sys
path = pathlib.Path(sys.argv[1])
if path.suffix.lower() == ".cmd":
path = pathlib.Path(str(path)[:-4])
spec = json.loads(pathlib.Path(f"{path}.launcher.json").read_text())
assert spec["reporter"] == "junit", spec
PY

View File

@@ -1,5 +1,5 @@
load("//bun:defs.bzl", "bun_build", "bun_bundle", "bun_compile")
load("@rules_shell//shell:sh_test.bzl", "sh_test")
load("//bun:defs.bzl", "bun_build", "bun_bundle", "bun_compile")
bun_bundle(
name = "simple_bundle",
@@ -18,64 +18,53 @@ bun_bundle(
external = ["left-pad"],
)
bun_bundle(
name = "collision_bundle",
entry_points = [
"collision_case/a/main.ts",
"collision_case/b/main.ts",
],
)
bun_build(
name = "site_build",
entry_points = ["site/index.html"],
data = [
"site/main.ts",
"site/styles.css",
],
entry_points = ["site/index.html"],
splitting = True,
)
bun_build(
name = "site_build_with_meta",
entry_points = ["site/index.html"],
data = [
"site/main.ts",
"site/styles.css",
],
entry_points = ["site/index.html"],
metafile = True,
metafile_md = True,
)
bun_build(
name = "advanced_site_build",
tags = ["manual"],
entry_points = ["site/index.html"],
data = [
"site/main.ts",
"site/styles.css",
],
install_mode = "fallback",
target = "node",
format = "cjs",
production = True,
splitting = True,
root = "tests/bundle_test/site",
sourcemap = "linked",
banner = "/* bundle banner */",
footer = "// bundle footer",
public_path = "/static/",
packages = "external",
external = [
"left-pad",
"react",
],
entry_naming = "entries/[name]-[hash].[ext]",
chunk_naming = "chunks/[name]-[hash].[ext]",
asset_naming = "assets/[name]-[hash].[ext]",
minify = True,
minify_syntax = True,
minify_whitespace = True,
minify_identifiers = True,
keep_names = True,
css_chunking = True,
banner = "/* bundle banner */",
build_flags = [
"--app",
"--server-components",
],
chunk_naming = "chunks/[name]-[hash].[ext]",
conditions = [
"browser",
"custom",
],
env = "PUBLIC_*",
css_chunking = True,
data = [
"site/main.ts",
"site/styles.css",
],
define = [
"process.env.NODE_ENV:\"production\"",
"__DEV__:false",
@@ -84,26 +73,44 @@ bun_build(
"console",
"debugger",
],
emit_dce_annotations = True,
entry_naming = "entries/[name]-[hash].[ext]",
entry_points = ["site/index.html"],
env = "PUBLIC_*",
external = [
"left-pad",
"react",
],
feature = [
"react_fast_refresh",
"server_components",
],
loader = [
".svg:file",
".txt:text",
],
footer = "// bundle footer",
format = "cjs",
jsx_factory = "h",
jsx_fragment = "Fragment",
jsx_import_source = "preact",
jsx_runtime = "automatic",
jsx_side_effects = True,
react_fast_refresh = True,
emit_dce_annotations = True,
no_bundle = True,
build_flags = [
"--app",
"--server-components",
keep_names = True,
loader = [
".svg:file",
".txt:text",
],
minify = True,
minify_identifiers = True,
minify_syntax = True,
minify_whitespace = True,
no_bundle = True,
packages = "external",
production = True,
public_path = "/static/",
react_fast_refresh = True,
root = "tests/bundle_test/site",
sourcemap = "linked",
splitting = True,
tags = ["manual"],
target = "node",
)
bun_compile(
@@ -113,29 +120,30 @@ bun_compile(
bun_compile(
name = "compiled_cli_with_flags",
tags = ["manual"],
entry_point = "cli.ts",
bytecode = True,
compile_autoload_bunfig = False,
compile_autoload_dotenv = False,
compile_autoload_package_json = True,
compile_autoload_tsconfig = True,
compile_exec_argv = [
"--smol",
"--inspect-wait",
],
compile_executable = "fake_cross_bun.bin",
compile_autoload_dotenv = False,
compile_autoload_bunfig = False,
compile_autoload_tsconfig = True,
compile_autoload_package_json = True,
entry_point = "cli.ts",
tags = ["manual"],
windows_copyright = "(c) rules_bun",
windows_description = "compile flag coverage",
windows_hide_console = True,
windows_icon = "branding/icon.ico",
windows_title = "Rules Bun Test App",
windows_publisher = "rules_bun",
windows_title = "Rules Bun Test App",
windows_version = "1.2.3.4",
windows_description = "compile flag coverage",
windows_copyright = "(c) rules_bun",
)
sh_test(
name = "bundle_output_test",
size = "small",
srcs = ["verify_bundle.sh"],
args = ["$(location :simple_bundle)"],
data = [":simple_bundle"],
@@ -143,19 +151,21 @@ sh_test(
sh_test(
name = "bundle_minify_test",
size = "small",
srcs = ["verify_minify.sh"],
args = [
"$(location :simple_bundle)",
"$(location :minified_bundle)",
],
data = [
":simple_bundle",
":minified_bundle",
":simple_bundle",
],
)
sh_test(
name = "bundle_hermetic_digest_test",
size = "small",
srcs = ["verify_hermetic_shape.sh"],
args = ["$(location //internal:bun_bundle.bzl)"],
data = ["//internal:bun_bundle.bzl"],
@@ -163,6 +173,7 @@ sh_test(
sh_test(
name = "bundle_external_exclusion_test",
size = "small",
srcs = ["verify_external_shape.sh"],
args = [
"$(location //internal:bun_bundle.bzl)",
@@ -174,22 +185,37 @@ sh_test(
],
)
sh_test(
name = "bundle_collision_output_test",
size = "small",
srcs = ["verify_collision_outputs.sh"],
args = ["$(locations :collision_bundle)"],
data = [":collision_bundle"],
)
sh_test(
name = "bundle_sourcemap_shape_test",
size = "small",
srcs = ["verify_sourcemap_shape.sh"],
env_inherit = ["PATH"],
tags = [
"exclusive",
"no-sandbox",
],
data = [
"BUILD.bazel",
"//:repo_runtime_files",
"//bun:repo_runtime_files",
"//internal:repo_runtime_files",
"BUILD.bazel",
"//tests:nested_bazel_test.sh",
"//tests/bundle_test/sourcemap_case:BUILD.bazel",
"//tests/bundle_test/sourcemap_case:entry.ts",
],
env_inherit = ["PATH"],
)
sh_test(
name = "bun_build_site_output_test",
size = "small",
srcs = ["verify_site_build.sh"],
args = ["$(location :site_build)"],
data = [":site_build"],
@@ -197,6 +223,7 @@ sh_test(
sh_test(
name = "bun_build_site_meta_test",
size = "small",
srcs = ["verify_site_build_meta.sh"],
args = ["$(locations :site_build_with_meta)"],
data = [":site_build_with_meta"],
@@ -204,6 +231,7 @@ sh_test(
sh_test(
name = "bun_compile_output_test",
size = "small",
srcs = ["run_compiled_binary.sh"],
args = ["$(location :compiled_cli)"],
data = [":compiled_cli"],
@@ -211,17 +239,23 @@ sh_test(
sh_test(
name = "bun_build_compile_flag_shape_test",
size = "small",
srcs = ["verify_flag_aquery.sh"],
env_inherit = ["PATH"],
tags = [
"exclusive",
"no-sandbox",
],
data = [
"//:repo_runtime_files",
"//bun:repo_runtime_files",
"//internal:repo_runtime_files",
"BUILD.bazel",
"cli.ts",
"fake_cross_bun.bin",
"site/index.html",
"site/main.ts",
"site/styles.css",
"//:repo_runtime_files",
"//bun:repo_runtime_files",
"//internal:repo_runtime_files",
"//tests:nested_bazel_test.sh",
],
env_inherit = ["PATH"],
)

View File

@@ -0,0 +1 @@
console.log("a");

View File

@@ -0,0 +1 @@
console.log("b");

View File

@@ -0,0 +1,25 @@
#!/usr/bin/env bash
set -euo pipefail
first_output="$1"
second_output="$2"
if [[ ${first_output} == "${second_output}" ]]; then
echo "Expected distinct bundle outputs for same-basename entry points" >&2
exit 1
fi
if [[ ! -f ${first_output} || ! -f ${second_output} ]]; then
echo "Expected both bundle outputs to exist" >&2
exit 1
fi
if [[ ${first_output} != *"collision_bundle__tests_bundle_test_collision_case_a_main.js" ]]; then
echo "Unexpected first output path: ${first_output}" >&2
exit 1
fi
if [[ ${second_output} != *"collision_bundle__tests_bundle_test_collision_case_b_main.js" ]]; then
echo "Unexpected second output path: ${second_output}" >&2
exit 1
fi

View File

@@ -1,49 +1,20 @@
#!/usr/bin/env bash
set -euo pipefail
if command -v bazel >/dev/null 2>&1; then
bazel_cmd=(bazel)
elif command -v bazelisk >/dev/null 2>&1; then
bazel_cmd=(bazelisk)
else
echo "bazel or bazelisk is required on PATH" >&2
exit 1
fi
find_workspace_root() {
local candidate
local module_path
local script_dir
for candidate in \
"${TEST_SRCDIR:-}/${TEST_WORKSPACE:-}" \
"${TEST_SRCDIR:-}/_main"; do
if [[ -n ${candidate} && -f "${candidate}/MODULE.bazel" ]]; then
printf '%s\n' "${candidate}"
return 0
fi
done
if [[ -n ${TEST_SRCDIR:-} ]]; then
module_path="$(find "${TEST_SRCDIR}" -maxdepth 3 -name MODULE.bazel -print -quit 2>/dev/null || true)"
if [[ -n ${module_path} ]]; then
dirname "${module_path}"
return 0
fi
fi
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)"
candidate="$(cd "${script_dir}/../.." && pwd -P)"
if [[ -f "${candidate}/MODULE.bazel" ]]; then
printf '%s\n' "${candidate}"
return 0
fi
# shellcheck source=../nested_bazel_test.sh
source "${script_dir}/../nested_bazel_test.sh"
setup_nested_bazel_cmd
echo "Unable to locate rules_bun workspace root" >&2
exit 1
rules_bun_root="$(find_nested_bazel_workspace_root "${BASH_SOURCE[0]}")"
cleanup() {
local status="$1"
trap - EXIT
shutdown_nested_bazel_workspace "${rules_bun_root}"
exit "${status}"
}
rules_bun_root="$(find_workspace_root)"
trap 'cleanup $?' EXIT
run_aquery() {
local mnemonic="$1"
@@ -68,8 +39,6 @@ expect_line() {
build_output="$(run_aquery "BunBuild" "//tests/bundle_test:advanced_site_build")"
for expected in \
'arguments: "--install"' \
'arguments: "fallback"' \
'arguments: "--target"' \
'arguments: "node"' \
'arguments: "--format"' \
@@ -132,6 +101,14 @@ for expected in \
expect_line "${build_output}" "${expected}"
done
default_root_output="$(run_aquery "BunBuild" "//tests/bundle_test:site_build_with_meta")"
for expected in \
'arguments: "--root"' \
'arguments: "tests/bundle_test/site"'; do
expect_line "${default_root_output}" "${expected}"
done
compile_output="$(run_aquery "BunCompile" "//tests/bundle_test:compiled_cli_with_flags")"
for expected in \

View File

@@ -4,6 +4,6 @@ set -euo pipefail
rule_file="$1"
grep -Fq 'def _output_name(target_name, entry):' "${rule_file}"
grep -Fq 'return "{}__{}.js".format(target_name, stem)' "${rule_file}"
grep -Fq 'inputs = depset(' "${rule_file}"
grep -Fq 'direct = [entry] + ctx.files.data' "${rule_file}"
grep -Fq 'stem = entry.short_path.rsplit(".", 1)[0]' "${rule_file}"
grep -Fq 'validate_hermetic_install_mode(ctx.attr, "bun_bundle")' "${rule_file}"
grep -Fq 'declare_staged_bun_build_action(' "${rule_file}"

View File

@@ -1,49 +1,20 @@
#!/usr/bin/env bash
set -euo pipefail
if command -v bazel >/dev/null 2>&1; then
bazel_cmd=(bazel)
elif command -v bazelisk >/dev/null 2>&1; then
bazel_cmd=(bazelisk)
else
echo "bazel or bazelisk is required on PATH" >&2
exit 1
fi
find_workspace_root() {
local candidate
local module_path
local script_dir
for candidate in \
"${TEST_SRCDIR:-}/${TEST_WORKSPACE:-}" \
"${TEST_SRCDIR:-}/_main"; do
if [[ -n ${candidate} && -f "${candidate}/MODULE.bazel" ]]; then
printf '%s\n' "${candidate}"
return 0
fi
done
if [[ -n ${TEST_SRCDIR:-} ]]; then
module_path="$(find "${TEST_SRCDIR}" -maxdepth 3 -name MODULE.bazel -print -quit 2>/dev/null || true)"
if [[ -n ${module_path} ]]; then
dirname "${module_path}"
return 0
fi
fi
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)"
candidate="$(cd "${script_dir}/../.." && pwd -P)"
if [[ -f "${candidate}/MODULE.bazel" ]]; then
printf '%s\n' "${candidate}"
return 0
fi
# shellcheck source=../nested_bazel_test.sh
source "${script_dir}/../nested_bazel_test.sh"
setup_nested_bazel_cmd
echo "Unable to locate rules_bun workspace root" >&2
exit 1
rules_bun_root="$(find_nested_bazel_workspace_root "${BASH_SOURCE[0]}")"
cleanup() {
local status="$1"
trap - EXIT
shutdown_nested_bazel_workspace "${rules_bun_root}"
exit "${status}"
}
rules_bun_root="$(find_workspace_root)"
trap 'cleanup $?' EXIT
bundle_output="$(
cd "${rules_bun_root}" &&

View File

@@ -2,7 +2,34 @@ load("@rules_shell//shell:sh_test.bzl", "sh_test")
sh_test(
name = "phase8_ci_matrix_shape_test",
size = "small",
srcs = ["phase8_ci_matrix_shape_test.sh"],
args = ["$(location //.github/workflows:ci.yml)"],
args = ["$(rlocationpath //.github/workflows:ci.yml)"],
data = ["//.github/workflows:ci.yml"],
)
sh_test(
name = "phase8_ci_targets_test",
size = "small",
srcs = ["phase8_ci_targets_test.sh"],
args = ["$(rlocationpath :phase8_ci_targets.sh)"],
data = [":phase8_ci_targets.sh"],
)
sh_test(
name = "native_wrapper_shape_test",
size = "small",
srcs = ["verify_native_wrapper_shape.sh"],
args = [
"$(location //tests/binary_test:configured_launcher_bin)",
"$(location //tests/script_test:workspace_flagged_script)",
"$(location //tests/js_compat_test:compat_devserver)",
"$(location //tests/bun_test_test:configured_suite)",
],
data = [
"//tests/binary_test:configured_launcher_bin",
"//tests/bun_test_test:configured_suite",
"//tests/js_compat_test:compat_devserver",
"//tests/script_test:workspace_flagged_script",
],
)

View File

@@ -1,7 +1,63 @@
#!/usr/bin/env bash
set -euo pipefail
workflow_file="$1"
if [[ -z ${RUNFILES_DIR:-} && -n ${TEST_SRCDIR:-} && -d ${TEST_SRCDIR} ]]; then
RUNFILES_DIR="${TEST_SRCDIR}"
fi
if [[ -z ${RUNFILES_DIR:-} && -z ${RUNFILES_MANIFEST_FILE:-} ]]; then
if [[ -d "$0.runfiles" ]]; then
RUNFILES_DIR="$0.runfiles"
elif [[ -f "$0.runfiles_manifest" ]]; then
RUNFILES_MANIFEST_FILE="$0.runfiles_manifest"
elif [[ -f "$0.exe.runfiles_manifest" ]]; then
RUNFILES_MANIFEST_FILE="$0.exe.runfiles_manifest"
fi
fi
resolve_runfile() {
local path="${1:-}"
local candidate
local resolved
if [[ -z ${path} ]]; then
echo "Error: missing runfile path" >&2
exit 1
fi
if [[ ${path} == /* || ${path} =~ ^[A-Za-z]:[\\/] ]]; then
printf '%s\n' "${path}"
return 0
fi
if [[ -e ${path} ]]; then
printf '%s\n' "${path}"
return 0
fi
for candidate in \
"${path}" \
"${TEST_WORKSPACE:-}/${path}" \
"_main/${path}"; do
[[ -z ${candidate} ]] && continue
if [[ -n ${RUNFILES_DIR:-} && -e "${RUNFILES_DIR}/${candidate}" ]]; then
printf '%s\n' "${RUNFILES_DIR}/${candidate}"
return 0
fi
if [[ -n ${RUNFILES_MANIFEST_FILE:-} ]]; then
resolved="$(
awk -v key="${candidate}" 'index($0, key " ") == 1 { print substr($0, length(key) + 2); exit }' \
"${RUNFILES_MANIFEST_FILE}"
)"
if [[ -n ${resolved} ]]; then
printf '%s\n' "${resolved}"
return 0
fi
fi
done
echo "Error: unable to resolve runfile: ${path}" >&2
exit 1
}
workflow_file="$(resolve_runfile "${1:-}")"
if [ -z "${workflow_file}" ]; then
echo "Error: workflow file path required as first argument" >&2
exit 1
@@ -17,11 +73,22 @@ check_pattern() {
}
check_pattern '^name:[[:space:]]+CI$' "missing workflow name CI"
check_pattern 'USE_BAZEL_VERSION:[[:space:]]+9\.0\.0' "missing Bazel 9.0.0 pin"
check_pattern 'USE_BAZEL_VERSION:[[:space:]]+9\.0\.1' "missing Bazel 9.0.1 pin"
check_pattern 'os:[[:space:]]+ubuntu-latest' "missing ubuntu matrix entry"
check_pattern 'phase8_target:[[:space:]]+linux-x64' "missing linux-x64 matrix target"
check_pattern 'os:[[:space:]]+macos-14' "missing macos matrix entry"
check_pattern 'phase8_target:[[:space:]]+darwin-arm64' "missing darwin-arm64 matrix target"
check_pattern 'os:[[:space:]]+windows-latest' "missing windows matrix entry"
check_pattern 'phase8_target:[[:space:]]+windows' "missing windows matrix target"
has_windows_os=0
has_windows_target=0
if grep -Eq 'os:[[:space:]]+windows-latest' "${workflow_file}"; then
has_windows_os=1
fi
if grep -Eq 'phase8_target:[[:space:]]+windows' "${workflow_file}"; then
has_windows_target=1
fi
if [[ ${has_windows_os} -ne ${has_windows_target} ]]; then
echo "Error: windows matrix entry and windows phase8 target must be added or removed together" >&2
exit 1
fi
echo "CI matrix shape checks passed"

View File

@@ -0,0 +1,27 @@
#!/usr/bin/env bash
set -euo pipefail
phase8_target="${1:-}"
if [[ -z ${phase8_target} ]]; then
echo "Error: phase8 target required as first argument" >&2
exit 1
fi
case "${phase8_target}" in
linux-x64 | darwin-arm64)
printf '%s\n' "//tests/..."
;;
windows)
printf '%s\n' \
"//tests/binary_test/..." \
"//tests/bun_test_test/..." \
"//tests/ci_test/..." \
"//tests/js_compat_test/..." \
"//tests/script_test/..." \
"//tests/toolchain_test/..."
;;
*)
echo "Error: unsupported phase8 target: ${phase8_target}" >&2
exit 1
;;
esac

View File

@@ -0,0 +1,100 @@
#!/usr/bin/env bash
set -euo pipefail
if [[ -z ${RUNFILES_DIR:-} && -n ${TEST_SRCDIR:-} && -d ${TEST_SRCDIR} ]]; then
RUNFILES_DIR="${TEST_SRCDIR}"
fi
if [[ -z ${RUNFILES_DIR:-} && -z ${RUNFILES_MANIFEST_FILE:-} ]]; then
if [[ -d "$0.runfiles" ]]; then
RUNFILES_DIR="$0.runfiles"
elif [[ -f "$0.runfiles_manifest" ]]; then
RUNFILES_MANIFEST_FILE="$0.runfiles_manifest"
elif [[ -f "$0.exe.runfiles_manifest" ]]; then
RUNFILES_MANIFEST_FILE="$0.exe.runfiles_manifest"
fi
fi
resolve_runfile() {
local path="${1:-}"
local candidate
local resolved
if [[ -z ${path} ]]; then
echo "Error: missing runfile path" >&2
exit 1
fi
if [[ ${path} == /* || ${path} =~ ^[A-Za-z]:[\\/] ]]; then
printf '%s\n' "${path}"
return 0
fi
if [[ -e ${path} ]]; then
printf '%s\n' "${path}"
return 0
fi
for candidate in \
"${path}" \
"${TEST_WORKSPACE:-}/${path}" \
"_main/${path}"; do
[[ -z ${candidate} ]] && continue
if [[ -n ${RUNFILES_DIR:-} && -e "${RUNFILES_DIR}/${candidate}" ]]; then
printf '%s\n' "${RUNFILES_DIR}/${candidate}"
return 0
fi
if [[ -n ${RUNFILES_MANIFEST_FILE:-} ]]; then
resolved="$(
awk -v key="${candidate}" 'index($0, key " ") == 1 { print substr($0, length(key) + 2); exit }' \
"${RUNFILES_MANIFEST_FILE}"
)"
if [[ -n ${resolved} ]]; then
printf '%s\n' "${resolved}"
return 0
fi
fi
done
echo "Error: unable to resolve runfile: ${path}" >&2
exit 1
}
resolver="$(resolve_runfile "${1:-}")"
if [[ -z ${resolver} ]]; then
echo "Error: resolver path required as first argument" >&2
exit 1
fi
linux_targets="$("${resolver}" linux-x64)"
if [[ ${linux_targets} != "//tests/..." ]]; then
echo "Error: linux-x64 should resolve to //tests/..." >&2
exit 1
fi
darwin_targets="$("${resolver}" darwin-arm64)"
if [[ ${darwin_targets} != "//tests/..." ]]; then
echo "Error: darwin-arm64 should resolve to //tests/..." >&2
exit 1
fi
windows_targets="$("${resolver}" windows)"
expected_windows_targets="$(
cat <<'EOF'
//tests/binary_test/...
//tests/bun_test_test/...
//tests/ci_test/...
//tests/js_compat_test/...
//tests/script_test/...
//tests/toolchain_test/...
EOF
)"
if [[ ${windows_targets} != "${expected_windows_targets}" ]]; then
echo "Error: unexpected windows targets" >&2
printf 'Expected:\n%s\nActual:\n%s\n' "${expected_windows_targets}" "${windows_targets}" >&2
exit 1
fi
if "${resolver}" unsupported >/dev/null 2>&1; then
echo "Error: unsupported phase8 target should fail" >&2
exit 1
fi
echo "Phase 8 CI targets resolve correctly"

View File

@@ -0,0 +1,17 @@
#!/usr/bin/env bash
set -euo pipefail
python3 - "$@" <<'PY'
import pathlib
import sys
windows = sys.platform.startswith("win")
for launcher in sys.argv[1:]:
suffix = pathlib.Path(launcher).suffix.lower()
if windows:
if suffix != ".cmd":
raise SystemExit(f"expected .cmd launcher on Windows: {launcher}")
elif suffix == ".sh":
raise SystemExit(f"unexpected .sh launcher executable: {launcher}")
PY

View File

@@ -2,6 +2,7 @@ load("@rules_shell//shell:sh_test.bzl", "sh_test")
sh_test(
name = "bun_install_extension_shape_test",
size = "small",
srcs = ["extension_shape_test.sh"],
args = ["$(location //bun:extensions.bzl)"],
data = ["//bun:extensions.bzl"],
@@ -9,6 +10,7 @@ sh_test(
sh_test(
name = "npm_translate_lock_extension_shape_test",
size = "small",
srcs = ["npm_extension_shape_test.sh"],
args = ["$(location //npm:extensions.bzl)"],
data = ["//npm:extensions.bzl"],

View File

@@ -10,3 +10,4 @@ grep -Eq '"package_json":[[:space:]]*attr\.label\(mandatory[[:space:]]*=[[:space
grep -Eq '"bun_lockfile":[[:space:]]*attr\.label\(mandatory[[:space:]]*=[[:space:]]*True\)' "${extension_file}"
grep -Eq '"install_inputs":[[:space:]]*attr\.label_list\(allow_files[[:space:]]*=[[:space:]]*True\)' "${extension_file}"
grep -Eq '"isolated_home":[[:space:]]*attr\.bool\(default[[:space:]]*=[[:space:]]*True\)' "${extension_file}"
grep -Eq '"ignore_scripts":[[:space:]]*attr\.bool\(default[[:space:]]*=[[:space:]]*True\)' "${extension_file}"

View File

@@ -32,14 +32,24 @@ config_setting(
],
)
config_setting(
name = "windows_x86_64",
constraint_values = [
"@platforms//os:windows",
"@platforms//cpu:x86_64",
],
)
sh_test(
name = "bun_install_clean_install_test",
size = "small",
srcs = ["clean_install.sh"],
args = select({
":linux_x86_64": ["$(location @bun_linux_x64//:bun)"],
":linux_aarch64": ["$(location @bun_linux_aarch64//:bun)"],
":darwin_x86_64": ["$(location @bun_darwin_x64//:bun)"],
":darwin_aarch64": ["$(location @bun_darwin_aarch64//:bun)"],
":windows_x86_64": ["$(location @bun_windows_x64//:bun)"],
"//conditions:default": ["$(location @bun_linux_x64//:bun)"],
}),
data = select({
@@ -47,18 +57,21 @@ sh_test(
":linux_aarch64": ["@bun_linux_aarch64//:bun"],
":darwin_x86_64": ["@bun_darwin_x64//:bun"],
":darwin_aarch64": ["@bun_darwin_aarch64//:bun"],
":windows_x86_64": ["@bun_windows_x64//:bun"],
"//conditions:default": ["@bun_linux_x64//:bun"],
}),
)
sh_test(
name = "bun_install_stale_lockfile_test",
size = "small",
srcs = ["stale_lockfile.sh"],
args = select({
":linux_x86_64": ["$(location @bun_linux_x64//:bun)"],
":linux_aarch64": ["$(location @bun_linux_aarch64//:bun)"],
":darwin_x86_64": ["$(location @bun_darwin_x64//:bun)"],
":darwin_aarch64": ["$(location @bun_darwin_aarch64//:bun)"],
":windows_x86_64": ["$(location @bun_windows_x64//:bun)"],
"//conditions:default": ["$(location @bun_linux_x64//:bun)"],
}),
data = select({
@@ -66,12 +79,14 @@ sh_test(
":linux_aarch64": ["@bun_linux_aarch64//:bun"],
":darwin_x86_64": ["@bun_darwin_x64//:bun"],
":darwin_aarch64": ["@bun_darwin_aarch64//:bun"],
":windows_x86_64": ["@bun_windows_x64//:bun"],
"//conditions:default": ["@bun_linux_x64//:bun"],
}),
)
sh_test(
name = "bun_install_determinism_test",
size = "small",
srcs = ["determinism.sh"],
args = ["$(location //internal:bun_install.bzl)"],
data = ["//internal:bun_install.bzl"],
@@ -79,6 +94,7 @@ sh_test(
sh_test(
name = "bun_install_environment_shape_test",
size = "small",
srcs = ["environment_shape.sh"],
args = ["$(location //internal:bun_install.bzl)"],
data = ["//internal:bun_install.bzl"],
@@ -86,12 +102,14 @@ sh_test(
sh_test(
name = "bun_install_workspaces_test",
size = "small",
srcs = ["workspaces.sh"],
args = select({
":linux_x86_64": ["$(location @bun_linux_x64//:bun)"],
":linux_aarch64": ["$(location @bun_linux_aarch64//:bun)"],
":darwin_x86_64": ["$(location @bun_darwin_x64//:bun)"],
":darwin_aarch64": ["$(location @bun_darwin_aarch64//:bun)"],
":windows_x86_64": ["$(location @bun_windows_x64//:bun)"],
"//conditions:default": ["$(location @bun_linux_x64//:bun)"],
}),
data = select({
@@ -99,18 +117,21 @@ sh_test(
":linux_aarch64": ["@bun_linux_aarch64//:bun"],
":darwin_x86_64": ["@bun_darwin_x64//:bun"],
":darwin_aarch64": ["@bun_darwin_aarch64//:bun"],
":windows_x86_64": ["@bun_windows_x64//:bun"],
"//conditions:default": ["@bun_linux_x64//:bun"],
}),
)
sh_test(
name = "bun_install_workspaces_catalog_test",
size = "small",
srcs = ["workspaces_catalog.sh"],
args = select({
":linux_x86_64": ["$(location @bun_linux_x64//:bun)"],
":linux_aarch64": ["$(location @bun_linux_aarch64//:bun)"],
":darwin_x86_64": ["$(location @bun_darwin_x64//:bun)"],
":darwin_aarch64": ["$(location @bun_darwin_aarch64//:bun)"],
":windows_x86_64": ["$(location @bun_windows_x64//:bun)"],
"//conditions:default": ["$(location @bun_linux_x64//:bun)"],
}),
data = select({
@@ -118,19 +139,25 @@ sh_test(
":linux_aarch64": ["@bun_linux_aarch64//:bun"],
":darwin_x86_64": ["@bun_darwin_x64//:bun"],
":darwin_aarch64": ["@bun_darwin_aarch64//:bun"],
":windows_x86_64": ["@bun_windows_x64//:bun"],
"//conditions:default": ["@bun_linux_x64//:bun"],
}),
)
sh_test(
name = "bun_install_workspace_parity_test",
size = "large",
srcs = ["workspace_parity.sh"],
env_inherit = ["PATH"],
tags = [
"exclusive",
"no-sandbox",
],
args = select({
":linux_x86_64": ["$(location @bun_linux_x64//:bun)"],
":linux_aarch64": ["$(location @bun_linux_aarch64//:bun)"],
":darwin_x86_64": ["$(location @bun_darwin_x64//:bun)"],
":darwin_aarch64": ["$(location @bun_darwin_aarch64//:bun)"],
":windows_x86_64": ["$(location @bun_windows_x64//:bun)"],
"//conditions:default": ["$(location @bun_linux_x64//:bun)"],
}),
data = select({
@@ -138,17 +165,85 @@ sh_test(
":linux_aarch64": ["@bun_linux_aarch64//:bun"],
":darwin_x86_64": ["@bun_darwin_x64//:bun"],
":darwin_aarch64": ["@bun_darwin_aarch64//:bun"],
":windows_x86_64": ["@bun_windows_x64//:bun"],
"//conditions:default": ["@bun_linux_x64//:bun"],
}) + [
"//:repo_runtime_files",
"//bun:repo_runtime_files",
"//internal:repo_runtime_files",
"//tests:nested_bazel_test.sh",
],
env_inherit = ["PATH"],
)
sh_test(
name = "bun_install_install_flags_shape_test",
size = "small",
srcs = ["install_flags_shape.sh"],
args = ["$(location //internal:bun_install.bzl)"],
data = ["//internal:bun_install.bzl"],
)
sh_test(
name = "bun_install_repeatability_test",
size = "small",
srcs = ["repeatability.sh"],
tags = [
"exclusive",
"no-sandbox",
],
args = select({
":linux_x86_64": ["$(location @bun_linux_x64//:bun)"],
":linux_aarch64": ["$(location @bun_linux_aarch64//:bun)"],
":darwin_x86_64": ["$(location @bun_darwin_x64//:bun)"],
":darwin_aarch64": ["$(location @bun_darwin_aarch64//:bun)"],
":windows_x86_64": ["$(location @bun_windows_x64//:bun)"],
"//conditions:default": ["$(location @bun_linux_x64//:bun)"],
}),
data = select({
":linux_x86_64": ["@bun_linux_x64//:bun"],
":linux_aarch64": ["@bun_linux_aarch64//:bun"],
":darwin_x86_64": ["@bun_darwin_x64//:bun"],
":darwin_aarch64": ["@bun_darwin_aarch64//:bun"],
":windows_x86_64": ["@bun_windows_x64//:bun"],
"//conditions:default": ["@bun_linux_x64//:bun"],
}) + [
"//:repo_runtime_files",
"//bun:repo_runtime_files",
"//internal:repo_runtime_files",
"//tests:nested_bazel_test.sh",
],
env_inherit = ["PATH"],
)
sh_test(
name = "bun_install_lifecycle_scripts_test",
size = "small",
srcs = ["lifecycle_scripts.sh"],
tags = [
"exclusive",
"no-sandbox",
],
args = select({
":linux_x86_64": ["$(location @bun_linux_x64//:bun)"],
":linux_aarch64": ["$(location @bun_linux_aarch64//:bun)"],
":darwin_x86_64": ["$(location @bun_darwin_x64//:bun)"],
":darwin_aarch64": ["$(location @bun_darwin_aarch64//:bun)"],
":windows_x86_64": ["$(location @bun_windows_x64//:bun)"],
"//conditions:default": ["$(location @bun_linux_x64//:bun)"],
}),
data = select({
":linux_x86_64": ["@bun_linux_x64//:bun"],
":linux_aarch64": ["@bun_linux_aarch64//:bun"],
":darwin_x86_64": ["@bun_darwin_x64//:bun"],
":darwin_aarch64": ["@bun_darwin_aarch64//:bun"],
":windows_x86_64": ["@bun_windows_x64//:bun"],
"//conditions:default": ["@bun_linux_x64//:bun"],
}) + [
"//:repo_runtime_files",
"//bun:repo_runtime_files",
"//internal:repo_runtime_files",
"//tests:nested_bazel_test.sh",
],
env_inherit = ["PATH"],
)

View File

@@ -0,0 +1,120 @@
#!/usr/bin/env bash
set -euo pipefail
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)"
# shellcheck source=../nested_bazel_test.sh
source "${script_dir}/../nested_bazel_test.sh"
setup_nested_bazel_cmd
bun_path="$1"
rules_bun_root="$(find_nested_bazel_workspace_root "${BASH_SOURCE[0]}")"
workdir="$(mktemp -d)"
cleanup() {
local status="$1"
trap - EXIT
shutdown_nested_bazel_workspace "${fixture_dir:-}"
rm -rf "${workdir}"
exit "${status}"
}
trap 'cleanup $?' EXIT
fixture_dir="${workdir}/fixture"
mkdir -p "${fixture_dir}"
cat >"${fixture_dir}/package.json" <<'JSON'
{
"name": "lifecycle-script-test",
"version": "1.0.0",
"dependencies": {
"is-number": "7.0.0"
},
"scripts": {
"postinstall": "bun -e \"require('node:fs').writeFileSync('postinstall.txt', 'ran')\""
}
}
JSON
"${bun_path}" install --cwd "${fixture_dir}" >/dev/null
rm -rf "${fixture_dir}/node_modules" "${fixture_dir}/postinstall.txt"
cat >"${fixture_dir}/MODULE.bazel" <<EOF
module(
name = "bun_install_lifecycle_scripts_test",
)
bazel_dep(name = "rules_bun", version = "0.2.2")
local_path_override(
module_name = "rules_bun",
path = "${rules_bun_root}",
)
bun_ext = use_extension("@rules_bun//bun:extensions.bzl", "bun")
use_repo(
bun_ext,
"bun_darwin_aarch64",
"bun_darwin_x64",
"bun_linux_aarch64",
"bun_linux_x64",
"bun_windows_x64",
)
bun_install_ext = use_extension("@rules_bun//bun:extensions.bzl", "bun_install")
bun_install_ext.install(
name = "scripts_blocked",
package_json = "//:package.json",
bun_lockfile = "//:bun.lock",
)
bun_install_ext.install(
name = "scripts_allowed",
package_json = "//:package.json",
bun_lockfile = "//:bun.lock",
ignore_scripts = False,
)
use_repo(
bun_install_ext,
"scripts_allowed",
"scripts_blocked",
)
register_toolchains(
"@rules_bun//bun:darwin_aarch64_toolchain",
"@rules_bun//bun:darwin_x64_toolchain",
"@rules_bun//bun:linux_aarch64_toolchain",
"@rules_bun//bun:linux_x64_toolchain",
"@rules_bun//bun:windows_x64_toolchain",
)
EOF
cat >"${fixture_dir}/BUILD.bazel" <<'EOF'
exports_files([
"package.json",
"bun.lock",
])
EOF
(
cd "${fixture_dir}"
"${bazel_cmd[@]}" build @scripts_blocked//:node_modules @scripts_allowed//:node_modules >/dev/null
)
output_base="$(cd "${fixture_dir}" && "${bazel_cmd[@]}" info output_base)"
blocked_repo="$(find "${output_base}/external" -maxdepth 1 -type d -name '*+scripts_blocked' | head -n 1)"
allowed_repo="$(find "${output_base}/external" -maxdepth 1 -type d -name '*+scripts_allowed' | head -n 1)"
if [[ -z ${blocked_repo} || -z ${allowed_repo} ]]; then
echo "Unable to locate generated lifecycle test repositories" >&2
exit 1
fi
if [[ -e "${blocked_repo}/postinstall.txt" ]]; then
echo "Lifecycle scripts should be disabled by default" >&2
exit 1
fi
if [[ ! -f "${allowed_repo}/postinstall.txt" ]]; then
echo "Lifecycle scripts should run when ignore_scripts = False" >&2
exit 1
fi

View File

@@ -0,0 +1,130 @@
#!/usr/bin/env bash
set -euo pipefail
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)"
# shellcheck source=../nested_bazel_test.sh
source "${script_dir}/../nested_bazel_test.sh"
setup_nested_bazel_cmd
bun_path="$1"
rules_bun_root="$(find_nested_bazel_workspace_root "${BASH_SOURCE[0]}")"
workdir="$(mktemp -d)"
cleanup() {
local status="$1"
trap - EXIT
shutdown_nested_bazel_workspace "${fixture_dir:-}"
rm -rf "${workdir}"
exit "${status}"
}
trap 'cleanup $?' EXIT
fixture_dir="${workdir}/fixture"
mkdir -p "${fixture_dir}"
cat >"${fixture_dir}/package.json" <<'JSON'
{
"name": "repeatability-test",
"version": "1.0.0",
"dependencies": {
"is-number": "7.0.0"
}
}
JSON
"${bun_path}" install --cwd "${fixture_dir}" >/dev/null
rm -rf "${fixture_dir}/node_modules"
cat >"${fixture_dir}/MODULE.bazel" <<EOF
module(
name = "bun_install_repeatability_test",
)
bazel_dep(name = "rules_bun", version = "0.2.2")
local_path_override(
module_name = "rules_bun",
path = "${rules_bun_root}",
)
bun_ext = use_extension("@rules_bun//bun:extensions.bzl", "bun")
use_repo(
bun_ext,
"bun_darwin_aarch64",
"bun_darwin_x64",
"bun_linux_aarch64",
"bun_linux_x64",
"bun_windows_x64",
)
bun_install_ext = use_extension("@rules_bun//bun:extensions.bzl", "bun_install")
bun_install_ext.install(
name = "node_modules_a",
package_json = "//:package.json",
bun_lockfile = "//:bun.lock",
)
bun_install_ext.install(
name = "node_modules_b",
package_json = "//:package.json",
bun_lockfile = "//:bun.lock",
)
use_repo(
bun_install_ext,
"node_modules_a",
"node_modules_b",
)
register_toolchains(
"@rules_bun//bun:darwin_aarch64_toolchain",
"@rules_bun//bun:darwin_x64_toolchain",
"@rules_bun//bun:linux_aarch64_toolchain",
"@rules_bun//bun:linux_x64_toolchain",
"@rules_bun//bun:windows_x64_toolchain",
)
EOF
cat >"${fixture_dir}/BUILD.bazel" <<'EOF'
exports_files([
"package.json",
"bun.lock",
])
EOF
(
cd "${fixture_dir}"
"${bazel_cmd[@]}" build @node_modules_a//:node_modules @node_modules_b//:node_modules >/dev/null
)
output_base="$(cd "${fixture_dir}" && "${bazel_cmd[@]}" info output_base)"
repo_a="$(find "${output_base}/external" -maxdepth 1 -type d -name '*+node_modules_a' | head -n 1)"
repo_b="$(find "${output_base}/external" -maxdepth 1 -type d -name '*+node_modules_b' | head -n 1)"
if [[ -z ${repo_a} || -z ${repo_b} ]]; then
echo "Unable to locate generated node_modules repositories" >&2
exit 1
fi
snapshot_tree() {
local root="$1"
(
cd "${root}"
while IFS= read -r -d '' path; do
local rel="${path#./}"
if [[ -L ${path} ]]; then
local target
target="$(readlink "${path}")"
target="${target//node_modules_a/node_modules}"
target="${target//node_modules_b/node_modules}"
printf 'L %s %s\n' "${rel}" "${target}"
else
printf 'F %s %s\n' "${rel}" "$(shasum -a 256 "${path}" | awk '{print $1}')"
fi
done < <(find . \( -type f -o -type l \) -print0 | sort -z)
)
}
snapshot_tree "${repo_a}/node_modules" >"${workdir}/repo_a.snapshot"
snapshot_tree "${repo_b}/node_modules" >"${workdir}/repo_b.snapshot"
diff -u "${workdir}/repo_a.snapshot" "${workdir}/repo_b.snapshot"

View File

@@ -1,22 +1,24 @@
#!/usr/bin/env bash
set -euo pipefail
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)"
# shellcheck source=../nested_bazel_test.sh
source "${script_dir}/../nested_bazel_test.sh"
setup_nested_bazel_cmd
bun_path="${1:-bun}"
if command -v bazel >/dev/null 2>&1; then
bazel_cmd=(bazel)
elif command -v bazelisk >/dev/null 2>&1; then
bazel_cmd=(bazelisk)
else
echo "bazel or bazelisk is required on PATH" >&2
exit 1
fi
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)"
rules_bun_root="$(cd "${script_dir}/../.." && pwd -P)"
rules_bun_root="$(find_nested_bazel_workspace_root "${BASH_SOURCE[0]}")"
workdir="$(mktemp -d)"
trap 'rm -rf "${workdir}"' EXIT
cleanup() {
local status="$1"
trap - EXIT
shutdown_nested_bazel_workspace "${bazel_dir:-}"
rm -rf "${workdir}"
exit "${status}"
}
trap 'cleanup $?' EXIT
fixture_dir="${workdir}/fixture"
plain_dir="${workdir}/plain"

View File

@@ -2,6 +2,9 @@
set -euo pipefail
bun_path="$1"
if [[ ${bun_path} != /* ]]; then
bun_path="$(cd "$(dirname "${bun_path}")" && pwd -P)/$(basename "${bun_path}")"
fi
workdir="$(mktemp -d)"
trap 'rm -rf "${workdir}"' EXIT

View File

@@ -2,6 +2,9 @@
set -euo pipefail
bun_path="$1"
if [[ ${bun_path} != /* ]]; then
bun_path="$(cd "$(dirname "${bun_path}")" && pwd -P)/$(basename "${bun_path}")"
fi
workdir="$(mktemp -d)"
trap 'rm -rf "${workdir}"' EXIT

View File

@@ -3,17 +3,18 @@ load("@rules_shell//shell:sh_test.bzl", "sh_test")
test_suite(
name = "examples_test",
tests = [
":examples_basic_run_e2e_test",
":examples_basic_hot_restart_shape_test",
":examples_workspace_bundle_e2e_test",
":examples_workspace_catalog_shape_test",
":examples_basic_run_e2e_test",
":examples_vite_monorepo_catalog_shape_test",
":examples_vite_monorepo_e2e_test",
":examples_workspace_bundle_e2e_test",
":examples_workspace_catalog_shape_test",
],
)
sh_test(
name = "examples_basic_e2e_build_test",
size = "small",
srcs = ["examples_basic_e2e_build_test.sh"],
args = [
"$(location //examples/basic:BUILD.bazel)",
@@ -27,6 +28,7 @@ sh_test(
sh_test(
name = "examples_basic_run_e2e_test",
size = "small",
srcs = ["examples_basic_run_e2e_test.sh"],
args = ["$(location //examples/basic:web_dev)"],
data = ["//examples/basic:web_dev"],
@@ -34,6 +36,7 @@ sh_test(
sh_test(
name = "examples_basic_hot_restart_shape_test",
size = "small",
srcs = ["examples_basic_hot_restart_shape_test.sh"],
args = ["$(location //examples/basic:web_dev_hot_restart)"],
data = ["//examples/basic:web_dev_hot_restart"],
@@ -41,6 +44,7 @@ sh_test(
sh_test(
name = "examples_workspace_bundle_e2e_test",
size = "small",
srcs = ["examples_workspace_bundle_e2e_test.sh"],
args = ["$(location //examples/workspace:pkg_b_bundle)"],
data = ["//examples/workspace:pkg_b_bundle"],
@@ -48,6 +52,7 @@ sh_test(
sh_test(
name = "examples_workspace_catalog_shape_test",
size = "small",
srcs = ["examples_workspace_catalog_shape_test.sh"],
args = [
"$(location //examples/workspace:package.json)",
@@ -63,6 +68,7 @@ sh_test(
sh_test(
name = "examples_vite_monorepo_catalog_shape_test",
size = "small",
srcs = ["examples_vite_monorepo_catalog_shape_test.sh"],
args = [
"$(location //examples/vite_monorepo:package.json)",
@@ -70,14 +76,15 @@ sh_test(
"$(location //examples/vite_monorepo:apps/app-b/package.json)",
],
data = [
"//examples/vite_monorepo:package.json",
"//examples/vite_monorepo:apps/app-a/package.json",
"//examples/vite_monorepo:apps/app-b/package.json",
"//examples/vite_monorepo:package.json",
],
)
sh_test(
name = "examples_vite_monorepo_e2e_test",
size = "small",
srcs = ["examples_vite_monorepo_e2e_test.sh"],
args = [
"$(location //examples/vite_monorepo:app_a_dev)",
@@ -91,6 +98,7 @@ sh_test(
sh_test(
name = "repo_all_targets_test",
size = "small",
srcs = ["repo_all_targets_test.sh"],
args = ["$(location //.github/workflows:ci.yml)"],
data = ["//.github/workflows:ci.yml"],

View File

@@ -1,11 +1,21 @@
#!/usr/bin/env bash
set -euo pipefail
binary="$1"
launcher="$1"
grep -Fq -- 'watch_mode="hot"' "${binary}"
grep -Fq -- 'bun_args+=("--hot")' "${binary}"
grep -Fq -- '--no-clear-screen' "${binary}"
grep -Fq -- 'if [[ 1 -eq 0 ]]; then' "${binary}"
grep -Fq -- 'readarray -t restart_paths' "${binary}"
grep -Fq -- 'examples/basic/README.md' "${binary}"
python3 - "${launcher}" <<'PY'
import json
import pathlib
import sys
path = pathlib.Path(sys.argv[1])
if path.suffix.lower() == ".cmd":
path = pathlib.Path(str(path)[:-4])
spec = json.loads(pathlib.Path(f"{path}.launcher.json").read_text())
assert spec["kind"] == "bun_run", spec
assert spec["watch_mode"] == "hot", spec
assert "--no-clear-screen" in spec["argv"], spec
assert spec["restart_on"], spec
assert spec["restart_on"][0].endswith("examples/basic/README.md"), spec
PY

View File

@@ -14,8 +14,19 @@ cleanup() {
}
trap cleanup EXIT
"${binary}" >"${log_file}" 2>&1 &
start_launcher() {
local launcher="$1"
local log_target="$2"
shift 2
if [[ ${launcher} == *.cmd ]]; then
cmd.exe /c call "${launcher}" "$@" >"${log_target}" 2>&1 &
else
"${launcher}" "$@" >"${log_target}" 2>&1 &
fi
server_pid=$!
}
start_launcher "${binary}" "${log_file}"
for _ in {1..20}; do
if grep -Fq "rules_bun bun_dev example" "${log_file}"; then

View File

@@ -17,6 +17,18 @@ cleanup() {
}
trap cleanup EXIT
start_launcher() {
local launcher="$1"
local log_target="$2"
shift 2
if [[ ${launcher} == *.cmd ]]; then
cmd.exe /c call "${launcher}" "$@" >"${log_target}" 2>&1 &
else
"${launcher}" "$@" >"${log_target}" 2>&1 &
fi
server_pid=$!
}
pick_port() {
python3 - <<'PY'
import socket
@@ -37,8 +49,7 @@ verify_vite_app() {
port="$(pick_port)"
log_file="${workdir}/${log_name}.log"
"${binary}" --host 127.0.0.1 --port "${port}" --strictPort >"${log_file}" 2>&1 &
server_pid=$!
start_launcher "${binary}" "${log_file}" --host 127.0.0.1 --port "${port}" --strictPort
for _ in {1..60}; do
if ! kill -0 "${server_pid}" 2>/dev/null; then

View File

@@ -3,4 +3,17 @@ set -euo pipefail
workflow_file="$1"
grep -Eq 'bazel test //(tests/)?\.\.\.' "${workflow_file}"
assert_contains() {
local expected="$1"
if ! grep -Fq "${expected}" "${workflow_file}"; then
echo "Error: expected workflow snippet not found:" >&2
printf ' %s\n' "${expected}" >&2
exit 1
fi
}
assert_contains './tests/ci_test/phase8_ci_targets.sh "${{ matrix.phase8_target }}"'
assert_contains 'mapfile -t targets < <(./tests/ci_test/phase8_ci_targets.sh "${{ matrix.phase8_target }}")'
assert_contains 'nix develop --accept-flake-config -c bazel test --test_output=errors "${targets[@]}"'
assert_contains 'bazel test --test_output=errors "${targets[@]}"'

View File

@@ -1,5 +1,5 @@
load("//js:defs.bzl", "js_binary", "js_run_devserver", "js_test", "ts_library")
load("@rules_shell//shell:sh_test.bzl", "sh_test")
load("//js:defs.bzl", "js_binary", "js_run_devserver", "js_test", "ts_library")
ts_library(
name = "helper_lib",
@@ -9,13 +9,14 @@ ts_library(
js_binary(
name = "compat_bin",
args = ["compat-mode"],
entry_point = "main.ts",
deps = [":helper_lib"],
args = ["compat-mode"],
)
sh_test(
name = "js_binary_compat_test",
size = "small",
srcs = ["run_binary.sh"],
args = ["$(location :compat_bin)"],
data = [":compat_bin"],
@@ -23,18 +24,21 @@ sh_test(
js_test(
name = "compat_suite",
size = "small",
entry_point = "app.test.ts",
deps = [":helper_lib"],
)
js_run_devserver(
name = "compat_devserver",
tool = ":compat_bin",
args = ["devserver-mode"],
tool = ":compat_bin",
visibility = ["//tests/ci_test:__pkg__"],
)
sh_test(
name = "js_run_devserver_compat_test",
size = "small",
srcs = ["run_devserver.sh"],
args = ["$(location :compat_devserver)"],
data = [":compat_devserver"],
@@ -42,27 +46,28 @@ sh_test(
js_run_devserver(
name = "compat_devserver_with_package_json",
tool = ":compat_bin",
package_json = "app/package.json",
tool = ":compat_bin",
working_dir = "package",
)
js_run_devserver(
name = "compat_devserver_with_package_dir_hint",
tool = ":compat_bin",
package_dir_hint = "app",
tool = ":compat_bin",
working_dir = "package",
)
sh_test(
name = "js_run_devserver_workspace_shape_test",
size = "small",
srcs = ["verify_workspace_shape.sh"],
args = [
"$(location :compat_devserver_with_package_json)",
"$(location :compat_devserver_with_package_dir_hint)",
],
data = [
":compat_devserver_with_package_json",
":compat_devserver_with_package_dir_hint",
":compat_devserver_with_package_json",
],
)

View File

@@ -2,7 +2,18 @@
set -euo pipefail
binary="$1"
output="$("${binary}")"
run_launcher() {
local launcher="$1"
shift
if [[ ${launcher} == *.cmd ]]; then
cmd.exe /c call "${launcher}" "$@" | tr -d '\r'
return 0
fi
"${launcher}" "$@"
}
output="$(run_launcher "${binary}")"
if [[ ${output} != "helper:payload-from-lib compat-mode" ]]; then
echo "unexpected output: ${output}" >&2

View File

@@ -2,7 +2,18 @@
set -euo pipefail
binary="$1"
output="$("${binary}")"
run_launcher() {
local launcher="$1"
shift
if [[ ${launcher} == *.cmd ]]; then
cmd.exe /c call "${launcher}" "$@" | tr -d '\r'
return 0
fi
"${launcher}" "$@"
}
output="$(run_launcher "${binary}")"
if [[ ${output} != "helper:payload-from-lib compat-mode devserver-mode" ]]; then
echo "unexpected output: ${output}" >&2

View File

@@ -4,10 +4,25 @@ set -euo pipefail
package_json_launcher="$1"
package_dir_hint_launcher="$2"
grep -Fq -- 'package_json="${runfiles_dir}/_main/tests/js_compat_test/app/package.json"' "${package_json_launcher}"
grep -Fq -- 'package_rel_dir_hint="."' "${package_json_launcher}"
grep -Fq -- 'working_dir_mode="package"' "${package_json_launcher}"
python3 - "${package_json_launcher}" "${package_dir_hint_launcher}" <<'PY'
import json
import pathlib
import sys
grep -Fq -- 'package_json=""' "${package_dir_hint_launcher}"
grep -Fq -- 'package_rel_dir_hint="app"' "${package_dir_hint_launcher}"
grep -Fq -- 'working_dir_mode="package"' "${package_dir_hint_launcher}"
def read_spec(launcher: str):
path = pathlib.Path(launcher)
if path.suffix.lower() == ".cmd":
path = pathlib.Path(str(path)[:-4])
return json.loads(pathlib.Path(f"{path}.launcher.json").read_text())
package_json_spec = read_spec(sys.argv[1])
package_dir_hint_spec = read_spec(sys.argv[2])
assert package_json_spec["package_json_short_path"].endswith("tests/js_compat_test/app/package.json"), package_json_spec
assert package_json_spec["package_dir_hint"] == ".", package_json_spec
assert package_json_spec["working_dir_mode"] == "package", package_json_spec
assert package_dir_hint_spec["package_json_short_path"] == "", package_dir_hint_spec
assert package_dir_hint_spec["package_dir_hint"] == "app", package_dir_hint_spec
assert package_dir_hint_spec["working_dir_mode"] == "package", package_dir_hint_spec
PY

View File

@@ -1,5 +1,5 @@
load("//bun:defs.bzl", "bun_bundle", "bun_test", "ts_library")
load("@rules_shell//shell:sh_test.bzl", "sh_test")
load("//bun:defs.bzl", "bun_bundle", "bun_test", "ts_library")
ts_library(
name = "helper_lib",
@@ -14,6 +14,7 @@ bun_bundle(
sh_test(
name = "bundle_dep_propagation_test",
size = "small",
srcs = ["verify_bundle.sh"],
args = ["$(location :bundle_with_deps)"],
data = [":bundle_with_deps"],
@@ -21,6 +22,7 @@ sh_test(
bun_test(
name = "test_with_deps",
size = "small",
srcs = ["app.test.ts"],
deps = [":helper_lib"],
)

108
tests/nested_bazel_test.sh Normal file
View File

@@ -0,0 +1,108 @@
#!/usr/bin/env bash
setup_nested_bazel_cmd() {
local bazel_bin
local nested_bazel_base
local -a scrubbed_env_vars
if bazel_bin="$(command -v bazel 2>/dev/null)"; then
:
elif bazel_bin="$(command -v bazelisk 2>/dev/null)"; then
:
else
echo "bazel or bazelisk is required on PATH" >&2
exit 1
fi
nested_bazel_base="${TEST_TMPDIR:-${TMPDIR:-/tmp}}/rules_bun_nested_bazel"
mkdir -p "${nested_bazel_base}"
nested_bazel_root="$(mktemp -d "${nested_bazel_base}/session.XXXXXX")"
mkdir -p "${nested_bazel_root}/tmp"
scrubbed_env_vars=(
BAZEL_TEST
BUILD_EXECROOT
COVERAGE_DIR
GTEST_OUTPUT
GTEST_SHARD_INDEX
GTEST_SHARD_STATUS_FILE
GTEST_TMP_DIR
GTEST_TOTAL_SHARDS
JAVA_RUNFILES
PYTHON_RUNFILES
RUNFILES_DIR
RUNFILES_MANIFEST_FILE
RUNFILES_MANIFEST_ONLY
TEST_BINARY
TEST_INFRASTRUCTURE_FAILURE_FILE
TEST_LOGSPLITTER_OUTPUT_FILE
TEST_PREMATURE_EXIT_FILE
TEST_SHARD_INDEX
TEST_SHARD_STATUS_FILE
TEST_SRCDIR
TEST_TARGET
TEST_TMPDIR
TEST_TOTAL_SHARDS
TEST_UNDECLARED_OUTPUTS_ANNOTATIONS
TEST_UNDECLARED_OUTPUTS_ANNOTATIONS_DIR
TEST_UNDECLARED_OUTPUTS_DIR
TEST_UNDECLARED_OUTPUTS_MANIFEST
TEST_UNDECLARED_OUTPUTS_ZIP
TEST_UNUSED_RUNFILES_LOG_FILE
TEST_WARNINGS_OUTPUT_FILE
XML_OUTPUT_FILE
)
nested_bazel_env=(env)
for env_var in "${scrubbed_env_vars[@]}"; do
nested_bazel_env+=("-u" "${env_var}")
done
nested_bazel_env+=("TMPDIR=${nested_bazel_root}/tmp")
bazel_cmd=(
"${nested_bazel_env[@]}"
"${bazel_bin}"
"--batch"
"--ignore_all_rc_files"
"--output_user_root=${nested_bazel_root}/output_user_root"
)
}
find_nested_bazel_workspace_root() {
local script_path="${1:-${BASH_SOURCE[0]}}"
local candidate
local script_dir
for candidate in \
"${TEST_SRCDIR:-}/${TEST_WORKSPACE:-}" \
"${TEST_SRCDIR:-}/_main"; do
if [[ -n ${candidate} && -f "${candidate}/MODULE.bazel" ]]; then
printf '%s\n' "${candidate}"
return 0
fi
done
script_dir="$(cd "$(dirname "${script_path}")" && pwd -P)"
candidate="$(cd "${script_dir}/../.." && pwd -P)"
if [[ -f "${candidate}/MODULE.bazel" ]]; then
printf '%s\n' "${candidate}"
return 0
fi
echo "Unable to locate rules_bun workspace root" >&2
exit 1
}
shutdown_nested_bazel_workspace() {
local workspace_dir="${1:-}"
if [[ -n ${workspace_dir} && -d ${workspace_dir} ]]; then
(
cd "${workspace_dir}"
"${bazel_cmd[@]}" shutdown >/dev/null 2>&1
) || true
fi
if [[ -n ${nested_bazel_root:-} && -d ${nested_bazel_root} ]]; then
rm -rf "${nested_bazel_root}"
fi
}

View File

@@ -32,15 +32,28 @@ config_setting(
],
)
config_setting(
name = "windows_x86_64",
constraint_values = [
"@platforms//os:windows",
"@platforms//cpu:x86_64",
],
)
sh_test(
name = "npm_translate_lock_workspace_test",
size = "small",
srcs = ["npm_translate_lock_workspace_test.sh"],
env_inherit = ["PATH"],
tags = [
"exclusive",
"no-sandbox",
],
args = select({
":linux_x86_64": ["$(location @bun_linux_x64//:bun)"],
":linux_aarch64": ["$(location @bun_linux_aarch64//:bun)"],
":darwin_x86_64": ["$(location @bun_darwin_x64//:bun)"],
":darwin_aarch64": ["$(location @bun_darwin_aarch64//:bun)"],
":windows_x86_64": ["$(location @bun_windows_x64//:bun)"],
"//conditions:default": ["$(location @bun_linux_x64//:bun)"],
}),
data = select({
@@ -48,6 +61,7 @@ sh_test(
":linux_aarch64": ["@bun_linux_aarch64//:bun"],
":darwin_x86_64": ["@bun_darwin_x64//:bun"],
":darwin_aarch64": ["@bun_darwin_aarch64//:bun"],
":windows_x86_64": ["@bun_windows_x64//:bun"],
"//conditions:default": ["@bun_linux_x64//:bun"],
}) + [
"//:repo_runtime_files",
@@ -55,5 +69,7 @@ sh_test(
"//internal:repo_runtime_files",
"//js:repo_runtime_files",
"//npm:repo_runtime_files",
"//tests:nested_bazel_test.sh",
],
env_inherit = ["PATH"],
)

View File

@@ -1,22 +1,24 @@
#!/usr/bin/env bash
set -euo pipefail
if command -v bazel >/dev/null 2>&1; then
bazel_bin="$(command -v bazel)"
elif command -v bazelisk >/dev/null 2>&1; then
bazel_bin="$(command -v bazelisk)"
else
echo "bazel or bazelisk is required on PATH" >&2
exit 1
fi
bun_path="${1:-bun}"
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)"
rules_bun_root="$(cd "${script_dir}/../.." && pwd -P)"
# shellcheck source=../nested_bazel_test.sh
source "${script_dir}/../nested_bazel_test.sh"
setup_nested_bazel_cmd
rules_bun_root="$(find_nested_bazel_workspace_root "${BASH_SOURCE[0]}")"
workdir="$(mktemp -d)"
trap 'rm -rf "${workdir}"' EXIT
cleanup() {
local status="$1"
trap - EXIT
shutdown_nested_bazel_workspace "${fixture_dir:-}"
rm -rf "${workdir}"
exit "${status}"
}
trap 'cleanup $?' EXIT
fixture_dir="${workdir}/fixture"
mkdir -p "${fixture_dir}"
@@ -100,7 +102,7 @@ EOF
output="$(
cd "${fixture_dir}" &&
"${bazel_bin}" run //:app
"${bazel_cmd[@]}" run //:app
)"
if [[ ${output} != *"compat:true"* ]]; then
@@ -110,7 +112,7 @@ fi
query_output="$(
cd "${fixture_dir}" &&
"${bazel_bin}" query //:npm__is_number
"${bazel_cmd[@]}" query //:npm__is_number
)"
if ! grep -Fxq "//:npm__is_number" <<<"${query_output}"; then
echo "expected npm_link_all_packages to create //:npm__is_number" >&2

1
tests/script_test/.env Normal file
View File

@@ -0,0 +1 @@
BUN_SCRIPT_ENV_TEST=from-dotenv

View File

@@ -1,32 +1,37 @@
load("//bun:defs.bzl", "bun_script")
load("@rules_shell//shell:sh_test.bzl", "sh_test")
load("//bun:defs.bzl", "bun_script")
bun_script(
name = "hello_script",
script = "hello",
package_json = "package.json",
data = ["hello.ts"],
package_json = "package.json",
script = "hello",
)
sh_test(
name = "bun_script_ts_test",
size = "small",
srcs = ["run_script.sh"],
args = ["$(location :hello_script)", "hello-script"],
args = [
"$(location :hello_script)",
"hello-script",
],
data = [":hello_script"],
)
bun_script(
name = "env_script",
script = "print-env",
package_json = "package.json",
data = [
".env",
"env.ts",
],
package_json = "package.json",
script = "print-env",
)
sh_test(
name = "bun_script_package_cwd_test",
size = "small",
srcs = ["run_env_script.sh"],
args = ["$(location :env_script)"],
data = [":env_script"],
@@ -34,17 +39,18 @@ sh_test(
bun_script(
name = "vite_dev_server",
script = "dev",
package_json = "vite_app/package.json",
node_modules = "@script_test_vite_node_modules//:node_modules",
data = [
"vite_app/index.html",
"vite_app/main.js",
],
node_modules = "@script_test_vite_node_modules//:node_modules",
package_json = "vite_app/package.json",
script = "dev",
)
sh_test(
name = "bun_script_vite_app_test",
size = "small",
srcs = ["run_vite_app.sh"],
args = ["$(location :vite_dev_server)"],
data = [":vite_dev_server"],
@@ -52,28 +58,29 @@ sh_test(
bun_script(
name = "vite_monorepo_app_a_dev_server",
script = "dev",
package_json = "vite_monorepo/apps/app-a/package.json",
node_modules = "@script_test_vite_monorepo_node_modules//:node_modules",
data = [
"vite_monorepo/apps/app-a/index.html",
"vite_monorepo/apps/app-a/main.js",
],
node_modules = "@script_test_vite_monorepo_node_modules//:node_modules",
package_json = "vite_monorepo/apps/app-a/package.json",
script = "dev",
)
bun_script(
name = "vite_monorepo_app_b_dev_server",
script = "dev",
package_json = "vite_monorepo/apps/app-b/package.json",
node_modules = "@script_test_vite_monorepo_node_modules//:node_modules",
data = [
"vite_monorepo/apps/app-b/index.html",
"vite_monorepo/apps/app-b/main.js",
],
node_modules = "@script_test_vite_monorepo_node_modules//:node_modules",
package_json = "vite_monorepo/apps/app-b/package.json",
script = "dev",
)
sh_test(
name = "bun_script_vite_monorepo_apps_test",
size = "small",
srcs = ["run_vite_monorepo_apps.sh"],
args = [
"$(location :vite_monorepo_app_a_dev_server)",
@@ -85,54 +92,69 @@ sh_test(
],
)
sh_test(
name = "bun_script_monorepo_launcher_shape_test",
size = "small",
srcs = ["verify_monorepo_launcher_shape.sh"],
args = [
"$(location :vite_monorepo_app_a_dev_server)",
"$(location :paraglide_monorepo_app_a_build)",
],
data = [
":paraglide_monorepo_app_a_build",
":vite_monorepo_app_a_dev_server",
],
)
bun_script(
name = "paraglide_monorepo_app_a_build",
script = "build:app-a",
package_json = "paraglide_monorepo/package.json",
node_modules = "@script_test_paraglide_monorepo_node_modules//:node_modules",
data = [
"paraglide_monorepo/scripts/build-app-a.mjs",
"paraglide_monorepo/scripts/build-app-b.mjs",
"paraglide_monorepo/packages/i18n/package.json",
"paraglide_monorepo/packages/i18n/project.inlang/settings.json",
"paraglide_monorepo/packages/i18n/messages/en.json",
"paraglide_monorepo/packages/i18n/messages/sv.json",
"paraglide_monorepo/packages/app-a/package.json",
"paraglide_monorepo/packages/app-a/index.html",
"paraglide_monorepo/packages/app-a/main.js",
"paraglide_monorepo/packages/app-a/package.json",
"paraglide_monorepo/packages/app-a/vite.config.js",
"paraglide_monorepo/packages/app-b/package.json",
"paraglide_monorepo/packages/app-b/index.html",
"paraglide_monorepo/packages/app-b/main.js",
"paraglide_monorepo/packages/app-b/package.json",
"paraglide_monorepo/packages/app-b/vite.config.js",
"paraglide_monorepo/packages/i18n/messages/en.json",
"paraglide_monorepo/packages/i18n/messages/sv.json",
"paraglide_monorepo/packages/i18n/package.json",
"paraglide_monorepo/packages/i18n/project.inlang/settings.json",
"paraglide_monorepo/scripts/build-app-a.mjs",
"paraglide_monorepo/scripts/build-app-b.mjs",
],
node_modules = "@script_test_paraglide_monorepo_node_modules//:node_modules",
package_json = "paraglide_monorepo/package.json",
script = "build:app-a",
)
bun_script(
name = "paraglide_monorepo_app_b_build",
script = "build:app-b",
package_json = "paraglide_monorepo/package.json",
node_modules = "@script_test_paraglide_monorepo_node_modules//:node_modules",
data = [
"paraglide_monorepo/scripts/build-app-a.mjs",
"paraglide_monorepo/scripts/build-app-b.mjs",
"paraglide_monorepo/packages/i18n/package.json",
"paraglide_monorepo/packages/i18n/project.inlang/settings.json",
"paraglide_monorepo/packages/i18n/messages/en.json",
"paraglide_monorepo/packages/i18n/messages/sv.json",
"paraglide_monorepo/packages/app-a/package.json",
"paraglide_monorepo/packages/app-a/index.html",
"paraglide_monorepo/packages/app-a/main.js",
"paraglide_monorepo/packages/app-a/package.json",
"paraglide_monorepo/packages/app-a/vite.config.js",
"paraglide_monorepo/packages/app-b/package.json",
"paraglide_monorepo/packages/app-b/index.html",
"paraglide_monorepo/packages/app-b/main.js",
"paraglide_monorepo/packages/app-b/package.json",
"paraglide_monorepo/packages/app-b/vite.config.js",
"paraglide_monorepo/packages/i18n/messages/en.json",
"paraglide_monorepo/packages/i18n/messages/sv.json",
"paraglide_monorepo/packages/i18n/package.json",
"paraglide_monorepo/packages/i18n/project.inlang/settings.json",
"paraglide_monorepo/scripts/build-app-a.mjs",
"paraglide_monorepo/scripts/build-app-b.mjs",
],
node_modules = "@script_test_paraglide_monorepo_node_modules//:node_modules",
package_json = "paraglide_monorepo/package.json",
script = "build:app-b",
)
sh_test(
name = "bun_script_paraglide_monorepo_build_test",
size = "small",
srcs = ["run_paraglide_monorepo_builds.sh"],
args = [
"$(location :paraglide_monorepo_app_a_build)",
@@ -146,21 +168,22 @@ sh_test(
bun_script(
name = "workspace_filtered_script",
script = "say",
package_json = "workspace_run/package.json",
data = [
"workspace_run/packages/pkg-a/package.json",
"workspace_run/packages/pkg-a/say.ts",
"workspace_run/packages/pkg-b/package.json",
"workspace_run/packages/pkg-b/say.ts",
],
filters = ["./packages/pkg-a"],
execution_mode = "sequential",
filters = ["./packages/pkg-a"],
package_json = "workspace_run/package.json",
script = "say",
silent = True,
)
sh_test(
name = "bun_script_workspace_filter_test",
size = "small",
srcs = ["run_workspace_script.sh"],
args = ["$(location :workspace_filtered_script)"],
data = [":workspace_filtered_script"],
@@ -168,20 +191,21 @@ sh_test(
bun_script(
name = "workspace_parallel_script",
script = "say",
package_json = "workspace_run/package.json",
data = [
"workspace_run/packages/pkg-a/package.json",
"workspace_run/packages/pkg-a/say.ts",
"workspace_run/packages/pkg-b/package.json",
"workspace_run/packages/pkg-b/say.ts",
],
workspaces = True,
execution_mode = "parallel",
package_json = "workspace_run/package.json",
script = "say",
workspaces = True,
)
sh_test(
name = "bun_script_workspace_parallel_test",
size = "small",
srcs = ["run_workspace_parallel.sh"],
args = ["$(location :workspace_parallel_script)"],
data = [":workspace_parallel_script"],
@@ -189,22 +213,24 @@ sh_test(
bun_script(
name = "workspace_flagged_script",
script = "say",
package_json = "workspace_run/package.json",
data = [
"workspace_run/packages/pkg-a/package.json",
"workspace_run/packages/pkg-a/say.ts",
"workspace_run/packages/pkg-b/package.json",
"workspace_run/packages/pkg-b/say.ts",
],
workspaces = True,
execution_mode = "parallel",
no_exit_on_error = True,
package_json = "workspace_run/package.json",
script = "say",
shell = "system",
visibility = ["//tests/ci_test:__pkg__"],
workspaces = True,
)
sh_test(
name = "bun_script_workspace_flag_shape_test",
size = "small",
srcs = ["verify_launcher_flags.sh"],
args = [
"$(location :workspace_flagged_script)",

View File

@@ -2,7 +2,18 @@
set -euo pipefail
binary="$1"
output="$(${binary})"
run_launcher() {
local launcher="$1"
shift
if [[ ${launcher} == *.cmd ]]; then
cmd.exe /c call "${launcher}" "$@" | tr -d '\r'
return 0
fi
"${launcher}" "$@"
}
output="$(run_launcher "${binary}")"
if [[ ${output} != "from-dotenv" ]]; then
echo "Expected .env value from package directory, got: ${output}" >&2

View File

@@ -10,13 +10,23 @@ cleanup() {
}
trap cleanup EXIT
run_launcher() {
local launcher="$1"
shift
if [[ ${launcher} == *.cmd ]]; then
cmd.exe /c call "${launcher}" "$@" | tr -d '\r'
return 0
fi
"${launcher}" "$@"
}
verify_build() {
local binary="$1"
local out_dir="$2"
local expected_title="$3"
local expected_text="$4"
"${binary}" --outDir "${out_dir}" >/dev/null
run_launcher "${binary}" --outDir "${out_dir}" >/dev/null
if [[ ! -f "${out_dir}/index.html" ]]; then
echo "missing build output index.html for ${binary}" >&2

View File

@@ -3,7 +3,18 @@ set -euo pipefail
binary="$1"
expected="$2"
output="$(${binary})"
run_launcher() {
local launcher="$1"
shift
if [[ ${launcher} == *.cmd ]]; then
cmd.exe /c call "${launcher}" "$@" | tr -d '\r'
return 0
fi
"${launcher}" "$@"
}
output="$(run_launcher "${binary}")"
if [[ ${output} != "${expected}" ]]; then
echo "Unexpected output from ${binary}: ${output}" >&2

View File

@@ -14,6 +14,18 @@ cleanup() {
}
trap cleanup EXIT
start_launcher() {
local launcher="$1"
local log_target="$2"
shift 2
if [[ ${launcher} == *.cmd ]]; then
cmd.exe /c call "${launcher}" "$@" >"${log_target}" 2>&1 &
else
"${launcher}" "$@" >"${log_target}" 2>&1 &
fi
server_pid=$!
}
port="$(
python3 - <<'PY'
import socket
@@ -24,8 +36,7 @@ sock.close()
PY
)"
"${binary}" --host 127.0.0.1 --port "${port}" --strictPort >"${log_file}" 2>&1 &
server_pid=$!
start_launcher "${binary}" "${log_file}" --host 127.0.0.1 --port "${port}" --strictPort
for _ in {1..60}; do
if ! kill -0 "${server_pid}" 2>/dev/null; then

View File

@@ -17,6 +17,18 @@ cleanup() {
}
trap cleanup EXIT
start_launcher() {
local launcher="$1"
local log_target="$2"
shift 2
if [[ ${launcher} == *.cmd ]]; then
cmd.exe /c call "${launcher}" "$@" >"${log_target}" 2>&1 &
else
"${launcher}" "$@" >"${log_target}" 2>&1 &
fi
server_pid=$!
}
pick_port() {
python3 - <<'PY'
import socket
@@ -37,8 +49,7 @@ verify_vite_app() {
port="$(pick_port)"
log_file="${workdir}/${log_name}.log"
"${binary}" --host 127.0.0.1 --port "${port}" --strictPort >"${log_file}" 2>&1 &
server_pid=$!
start_launcher "${binary}" "${log_file}" --host 127.0.0.1 --port "${port}" --strictPort
for _ in {1..60}; do
if ! kill -0 "${server_pid}" 2>/dev/null; then

View File

@@ -2,7 +2,18 @@
set -euo pipefail
script_bin="$1"
output="$(${script_bin})"
run_launcher() {
local launcher="$1"
shift
if [[ ${launcher} == *.cmd ]]; then
cmd.exe /c call "${launcher}" "$@" | tr -d '\r'
return 0
fi
"${launcher}" "$@"
}
output="$(run_launcher "${script_bin}")"
if [[ ${output} != *"pkg-a"* ]]; then
echo "Expected workspace parallel run output to include pkg-a: ${output}" >&2

View File

@@ -2,7 +2,18 @@
set -euo pipefail
script_bin="$1"
output="$(${script_bin})"
run_launcher() {
local launcher="$1"
shift
if [[ ${launcher} == *.cmd ]]; then
cmd.exe /c call "${launcher}" "$@" | tr -d '\r'
return 0
fi
"${launcher}" "$@"
}
output="$(run_launcher "${script_bin}")"
if [[ ${output} != *"pkg-a"* ]]; then
echo "Expected workspace run output to include pkg-a: ${output}" >&2

View File

@@ -1,12 +1,21 @@
#!/usr/bin/env bash
set -euo pipefail
binary="$1"
launcher="$1"
shift
for expected in "$@"; do
if ! grep -Fq -- "${expected}" "${binary}"; then
echo "Expected ${binary} to contain ${expected}" >&2
exit 1
fi
done
python3 - "${launcher}" "$@" <<'PY'
import json
import pathlib
import sys
path = pathlib.Path(sys.argv[1])
if path.suffix.lower() == ".cmd":
path = pathlib.Path(str(path)[:-4])
spec = json.loads(pathlib.Path(f"{path}.launcher.json").read_text())
argv = spec["argv"]
for value in sys.argv[2:]:
if value not in argv:
raise SystemExit(f"missing {value!r} in argv {argv!r}")
PY

View File

@@ -0,0 +1,27 @@
#!/usr/bin/env bash
set -euo pipefail
vite_launcher="$1"
paraglide_launcher="$2"
python3 - "${vite_launcher}" "${paraglide_launcher}" <<'PY'
import json
import pathlib
import sys
def read_spec(launcher: str):
path = pathlib.Path(launcher)
if path.suffix.lower() == ".cmd":
path = pathlib.Path(str(path)[:-4])
return json.loads(pathlib.Path(f"{path}.launcher.json").read_text())
vite_spec = read_spec(sys.argv[1])
paraglide_spec = read_spec(sys.argv[2])
assert all(not root.startswith("../") for root in vite_spec["node_modules_roots"]), vite_spec
assert "node_modules" in vite_spec["node_modules_roots"], vite_spec
assert all(not root.startswith("../") for root in paraglide_spec["node_modules_roots"]), paraglide_spec
assert "node_modules" in paraglide_spec["node_modules_roots"], paraglide_spec
assert "packages/i18n/node_modules" in paraglide_spec["node_modules_roots"], paraglide_spec
PY

View File

@@ -32,28 +32,40 @@ config_setting(
],
)
config_setting(
name = "windows_x86_64",
constraint_values = [
"@platforms//os:windows",
"@platforms//cpu:x86_64",
],
)
sh_test(
name = "bun_version_test",
size = "small",
srcs = ["toolchain_version.sh"],
args = select({
":linux_x86_64": ["$(location @bun_linux_x64//:bun)"],
":linux_aarch64": ["$(location @bun_linux_aarch64//:bun)"],
":darwin_x86_64": ["$(location @bun_darwin_x64//:bun)"],
":darwin_aarch64": ["$(location @bun_darwin_aarch64//:bun)"],
"//conditions:default": ["$(location @bun_linux_x64//:bun)"],
":linux_x86_64": ["$(rlocationpath @bun_linux_x64//:bun)"],
":linux_aarch64": ["$(rlocationpath @bun_linux_aarch64//:bun)"],
":darwin_x86_64": ["$(rlocationpath @bun_darwin_x64//:bun)"],
":darwin_aarch64": ["$(rlocationpath @bun_darwin_aarch64//:bun)"],
":windows_x86_64": ["$(rlocationpath @bun_windows_x64//:bun)"],
"//conditions:default": ["$(rlocationpath @bun_linux_x64//:bun)"],
}),
data = select({
":linux_x86_64": ["@bun_linux_x64//:bun"],
":linux_aarch64": ["@bun_linux_aarch64//:bun"],
":darwin_x86_64": ["@bun_darwin_x64//:bun"],
":darwin_aarch64": ["@bun_darwin_aarch64//:bun"],
":windows_x86_64": ["@bun_windows_x64//:bun"],
"//conditions:default": ["@bun_linux_x64//:bun"],
}),
)
sh_test(
name = "toolchain_resolution_matrix_test",
size = "small",
srcs = ["toolchain_resolution_matrix.sh"],
args = ["$(location //tests/toolchain_test:BUILD.bazel)"],
args = ["$(rlocationpath //tests/toolchain_test:BUILD.bazel)"],
data = ["//tests/toolchain_test:BUILD.bazel"],
)

View File

@@ -1,7 +1,63 @@
#!/usr/bin/env bash
set -euo pipefail
build_file="$1"
if [[ -z ${RUNFILES_DIR:-} && -n ${TEST_SRCDIR:-} && -d ${TEST_SRCDIR} ]]; then
RUNFILES_DIR="${TEST_SRCDIR}"
fi
if [[ -z ${RUNFILES_DIR:-} && -z ${RUNFILES_MANIFEST_FILE:-} ]]; then
if [[ -d "$0.runfiles" ]]; then
RUNFILES_DIR="$0.runfiles"
elif [[ -f "$0.runfiles_manifest" ]]; then
RUNFILES_MANIFEST_FILE="$0.runfiles_manifest"
elif [[ -f "$0.exe.runfiles_manifest" ]]; then
RUNFILES_MANIFEST_FILE="$0.exe.runfiles_manifest"
fi
fi
resolve_runfile() {
local path="${1:-}"
local candidate
local resolved
if [[ -z ${path} ]]; then
echo "Error: missing runfile path" >&2
exit 1
fi
if [[ ${path} == /* || ${path} =~ ^[A-Za-z]:[\\/] ]]; then
printf '%s\n' "${path}"
return 0
fi
if [[ -e ${path} ]]; then
printf '%s\n' "${path}"
return 0
fi
for candidate in \
"${path}" \
"${TEST_WORKSPACE:-}/${path}" \
"_main/${path}"; do
[[ -z ${candidate} ]] && continue
if [[ -n ${RUNFILES_DIR:-} && -e "${RUNFILES_DIR}/${candidate}" ]]; then
printf '%s\n' "${RUNFILES_DIR}/${candidate}"
return 0
fi
if [[ -n ${RUNFILES_MANIFEST_FILE:-} ]]; then
resolved="$(
awk -v key="${candidate}" 'index($0, key " ") == 1 { print substr($0, length(key) + 2); exit }' \
"${RUNFILES_MANIFEST_FILE}"
)"
if [[ -n ${resolved} ]]; then
printf '%s\n' "${resolved}"
return 0
fi
fi
done
echo "Error: unable to resolve runfile: ${path}" >&2
exit 1
}
build_file="$(resolve_runfile "${1:-}")"
grep -Eq 'name = "linux_x86_64"' "${build_file}"
grep -Eq 'name = "linux_aarch64"' "${build_file}"

View File

@@ -1,8 +1,64 @@
#!/usr/bin/env bash
set -euo pipefail
bun_path="$1"
version="$(${bun_path} --version)"
if [[ -z ${RUNFILES_DIR:-} && -n ${TEST_SRCDIR:-} && -d ${TEST_SRCDIR} ]]; then
RUNFILES_DIR="${TEST_SRCDIR}"
fi
if [[ -z ${RUNFILES_DIR:-} && -z ${RUNFILES_MANIFEST_FILE:-} ]]; then
if [[ -d "$0.runfiles" ]]; then
RUNFILES_DIR="$0.runfiles"
elif [[ -f "$0.runfiles_manifest" ]]; then
RUNFILES_MANIFEST_FILE="$0.runfiles_manifest"
elif [[ -f "$0.exe.runfiles_manifest" ]]; then
RUNFILES_MANIFEST_FILE="$0.exe.runfiles_manifest"
fi
fi
resolve_runfile() {
local path="${1:-}"
local candidate
local resolved
if [[ -z ${path} ]]; then
echo "Error: missing runfile path" >&2
exit 1
fi
if [[ ${path} == /* || ${path} =~ ^[A-Za-z]:[\\/] ]]; then
printf '%s\n' "${path}"
return 0
fi
if [[ -e ${path} ]]; then
printf '%s\n' "${path}"
return 0
fi
for candidate in \
"${path}" \
"${TEST_WORKSPACE:-}/${path}" \
"_main/${path}"; do
[[ -z ${candidate} ]] && continue
if [[ -n ${RUNFILES_DIR:-} && -e "${RUNFILES_DIR}/${candidate}" ]]; then
printf '%s\n' "${RUNFILES_DIR}/${candidate}"
return 0
fi
if [[ -n ${RUNFILES_MANIFEST_FILE:-} ]]; then
resolved="$(
awk -v key="${candidate}" 'index($0, key " ") == 1 { print substr($0, length(key) + 2); exit }' \
"${RUNFILES_MANIFEST_FILE}"
)"
if [[ -n ${resolved} ]]; then
printf '%s\n' "${resolved}"
return 0
fi
fi
done
echo "Error: unable to resolve runfile: ${path}" >&2
exit 1
}
bun_path="$(resolve_runfile "${1:-}")"
version="$("${bun_path}" --version)"
if [[ ! ${version} =~ ^[0-9]+\.[0-9]+\.[0-9]+ ]]; then
echo "Unexpected bun version output: ${version}" >&2