feat: 0.1.0
Some checks failed
distribution-gate / distribution-gate (push) Failing after 1m56s

This commit is contained in:
eric
2026-04-04 18:41:34 +02:00
parent 32147d4552
commit ebb6b488fe
48 changed files with 2541 additions and 139 deletions

View File

@@ -1,9 +1,9 @@
engine: "data-driven-v1"
goal_file: ".agent/controllers/longview-planner/goal.md"
plan_file: ".agent/controllers/longview-planner/plan.toon"
state_file: ".agent/controllers/longview-planner/state.toon"
standards_file: ".agent/controllers/longview-planner/standards.md"
branch: "codex/longview-planner"
goal_file: ".agent/controllers/portable-tool-route/goal.md"
plan_file: ".agent/controllers/portable-tool-route/plan.toon"
state_file: ".agent/controllers/portable-tool-route/state.toon"
standards_file: ".agent/controllers/portable-tool-route/standards.md"
branch: "codex/portable-tool-route"
continue_until: "fixed-point"
max_runs: 12
max_wall_clock: 4h

View File

@@ -0,0 +1,3 @@
# Goal
Describe the goal for this controller.

View File

@@ -0,0 +1,3 @@
version: 1
goal_summary: No plan yet
steps[0]:

View File

@@ -0,0 +1,5 @@
# Standards
- Keep code maintainable.
- Avoid one-off hacks.
- Leave tests green.

View File

@@ -0,0 +1,30 @@
version: 1
phase: planning
stop_reason: null
goal_status: unknown
goal_revision: 0
current_step_id: null
iteration: 0
replan_required: false
completed_steps[0]:
blocked_steps[0]:
last_verification: null
last_cleanup_summary: null
last_full_test_summary: null
history[0]:
notes[0]:
planning_rejection_counters:
planning_annotation_counters:
planning_session:
pending_input: null
pending_question: null
transcript[0]:
started_at: "1775319370"
last_usage_refresh_at: "1775319514"
last_usage_input_tokens: null
last_usage_output_tokens: null
last_usage_primary_window: null
last_usage_secondary_window: null
run_model: "gpt-5.3-codex-spark"
fast_mode: false
allow_branching: false

View File

@@ -0,0 +1,3 @@
# Goal
Describe the goal for this controller.

View File

@@ -0,0 +1,3 @@
version: 1
goal_summary: No plan yet
steps[0]:

View File

@@ -0,0 +1,5 @@
# Standards
- Keep code maintainable.
- Avoid one-off hacks.
- Leave tests green.

View File

@@ -0,0 +1,31 @@
version: 1
phase: planning
stop_reason: null
goal_status: unknown
goal_revision: 0
current_step_id: null
iteration: 0
replan_required: false
completed_steps[0]:
blocked_steps[0]:
last_verification: null
last_cleanup_summary: null
last_full_test_summary: null
history[0]:
notes[0]:
planning_rejection_counters:
planning_annotation_counters:
planning_session:
pending_input: null
pending_question: null
transcript[1]{role,content}:
user,Can you setup a distribution path so i can use this tool in other projects?
started_at: "1775301190"
last_usage_refresh_at: "1775318984"
last_usage_input_tokens: null
last_usage_output_tokens: null
last_usage_primary_window: null
last_usage_secondary_window: null
run_model: "gpt-5.3-codex-spark"
fast_mode: false
allow_branching: false

View File

@@ -0,0 +1,3 @@
# Goal
Describe the goal for this controller.

View File

@@ -0,0 +1,3 @@
version: 1
goal_summary: No plan yet
steps[0]:

View File

@@ -0,0 +1,5 @@
# Standards
- Keep code maintainable.
- Avoid one-off hacks.
- Leave tests green.

View File

@@ -0,0 +1,31 @@
version: 1
phase: planning
stop_reason: null
goal_status: unknown
goal_revision: 0
current_step_id: null
iteration: 0
replan_required: false
completed_steps[0]:
blocked_steps[0]:
last_verification: null
last_cleanup_summary: null
last_full_test_summary: null
history[0]:
notes[0]:
planning_rejection_counters:
planning_annotation_counters:
planning_session:
pending_input: null
pending_question: null
transcript[1]{role,content}:
user,Can you setup a distribution path so i can use this tool in other projects?
started_at: "1775319077"
last_usage_refresh_at: "1775319155"
last_usage_input_tokens: null
last_usage_output_tokens: null
last_usage_primary_window: null
last_usage_secondary_window: null
run_model: "gpt-5.3-codex-spark"
fast_mode: false
allow_branching: false

View File

@@ -0,0 +1,2 @@
### Goal
Define and lock a deterministic, versioned distribution contract for this Rust autonomous controller so other projects can consume it safely via immutable artifacts, explicit metadata, and machine-readable instructions with minimal maintenance overhead.

View File

@@ -0,0 +1,73 @@
version: 3
goal_summary: "Publish a stable distribution contract and deterministic artifact pipeline with validated checksums, consumer-facing metadata, and CI enforcement to make reuse by external projects repeatable and low-risk."
steps[5]:
- id: "sm-1"
title: Finalize a canonical distribution contract and compatibility surface
purpose: "Create a versioned, explicit contract that external projects can trust for install, pinning, and upgrade behavior."
notes: Define what consumers can depend on before changing packaging code.
inputs[3]: "Existing ask to enable cross-project consumption",Current supported Rust targets and runtime assumptions,Current release/versioning model
outputs[3]: A new distribution ADR or docs/distribution.md,"Machine-readable contract file (e.g., JSON/YAML)","Compatibility matrix for Rust version, OS, arch, and binary/runtime expectations"
dependencies[0]:
verification[1]:
- label: Contract review
commands[2]: "Verify contract includes entrypoints, versioning, naming, retention, and checksum policy",Check contract is versioned and immutable for released artifacts
cleanup_requirements[1]{label,description}:
Contract drift prevention,Require contract updates to include a changelog entry when compatibility assumptions change.
status: done
attempts: 1
- id: "sm-2"
title: Implement one deterministic release layout and builder orchestrator
purpose: Generate all distributable outputs from a single script into a fixed path and filename schema.
notes: "No redesign needed; execute this immediately as the first active workstream and keep outputs constrained to a single canonical contract-first generator. Controller recovered this step from stale active state and returned it to todo."
inputs[4]: Distribution contract,Existing build profile configuration,Release target matrix,Release/version manifest schema
outputs[4]: Single deterministic `dist/` generator entrypoint,Canonical artifact path format and index manifest (version/target/platform keyed),Stable directory naming policy and versioned path template,Deterministic provenance snapshot included in generated index
dependencies[1]: "sm-1"
verification[1]:
- label: Path determinism check
commands[2]: Run generator twice from a clean tree and diff outputs,Assert artifact paths and filenames are pure functions of version + target + toolchain + source inputs
cleanup_requirements[1]{label,description}:
Legacy cleanup,"Document/remove old ad-hoc release paths and disable ambiguous aliases."
status: done
attempts: 1
- id: "sm-3"
title: Add reproducible packaging plus integrity metadata
purpose: "Bundle binaries/assets consistently and attach machine-verifiable metadata for downstream integrity checks."
notes: Focus execution on deterministic archive layout + checksum/provenance emission so downstream consumers can reproduce exact bytes. Controller recovered this step from stale active state and returned it to todo.
inputs[3]: Canonical output layout,Cargo/binary build inputs,Target/toolchain metadata
outputs[3]: Versioned package archive (tar/zip),SHA256 manifest and optional signature metadata,"Build provenance fields (version,target,profile,build timestamp,toolchain,git rev)"
dependencies[1]: "sm-2"
verification[1]:
- label: Artifact integrity check
commands[2]: Verify archive manifest matches emitted tree,Validate checksum file against generated artifact using standard tooling
cleanup_requirements[1]{label,description}:
Reproducibility hardening,"Strip non-deterministic fields from archives (timestamps, local paths, unordered metadata order) and document required reproducible tooling constraints. "
status: done
attempts: 1
- id: "sm-4"
title: Expose stable consumer integration interface and examples
purpose: Make consumption path predictable with one recommended model and explicit alternatives.
notes: "Define a single canonical, immutable distribution entrypoint (`dist/index.json`), version+target+profile resolution examples, and explicit migration/deprecation guidance; avoid additional active aliases. Controller recovered this step from stale active state and returned it to todo."
inputs[3]: Generated artifacts and checksums,"Machine-readable distribution contract/schema",Generated manifests and metadata
outputs[3]: Consumer integration guide for external projects,"Machine-readable release index for discovery and download","Compatibility-safe fallback/legacy path note, limited duration and explicit deprecation timeline"
dependencies[1]: "sm-3"
verification[1]:
- label: Consumption validation
commands[3]: Resolve immutable artifact by version+target from `dist/index.json` and assert only one canonical artifact path is documented,"Verify README/docs examples use index-based resolution and hash verification before execution",Confirm checksum verification command is included in quickstart
cleanup_requirements[1]{label,description}:
Alias minimization,Ensure only one active install/path alias remains for canonical artifacts and document sunset date for any temporary compatibility path.
status: done
attempts: 1
- id: "sm-5"
title: Enforce distribution contract in CI and maintenance workflow
purpose: Make distribution drift and nondeterminism observable so releases remain safely consumable by external projects.
notes: "Activate this final hardening step: wire release generation, reproducibility, manifest/schema validation, docs sync, and compatibility/retention/deprecation checks into existing CI quality gates. Controller recovered this step from stale active state and returned it to todo."
inputs[3]: Distribution scripts,Current CI workflow,Release contract docs
outputs[3]: CI distribution gate job with deterministic artifact and checksum enforcement,Automated contract/manifest/compatibility validation in pipeline,"Operational release guardrails for ownership handoff, deprecation, and retention policy"
dependencies[1]: "sm-4"
verification[1]:
- label: Release gate check
commands[4]: "Run tests, lint, and build before dist steps","Execute deterministic build + checksum validation and fail on byte-drift","Validate machine-readable distribution contract (compatibility matrix, migration/deprecation, checksum policy)",Ensure docs sync is validated in the same release gate
cleanup_requirements[1]{label,description}:
Operational hygiene,"Document and gate explicit version ownership transfer, deprecation workflow, and minimum compatibility retention window in CI/docs."
status: done
attempts: 2

View File

@@ -0,0 +1,12 @@
### Standards
1. Same git ref + source + toolchain + build inputs must yield identical artifact bytes and metadata.
2. Use one canonical `dist/` layout and semantic naming convention that never changes once released.
3. Keep a machine-readable distribution contract (format, compatibility matrix, assumptions, checksum policy, deprecation policy) as a first-class interface.
4. Always emit reproducibility metadata in every release artifact: version, target, profile, build time inputs, git rev, and content checksums.
5. Centralize release behavior in one versioned orchestration script/config; avoid scattered shell snippets and hidden paths.
6. Gate releases through CI checks (tests, lint/build, artifact determinism, manifest validity, docs sync) before publishing.
7. Treat compatibility, retention, and migration steps as part of the contract to reduce future operational risk.
## Quality Gate Annotations
- Iteration-aware review note: confirm long-term ownership and cleanup path.

View File

@@ -0,0 +1,49 @@
version: 1
phase: done
stop_reason: null
goal_status: done
goal_revision: 1
current_step_id: null
iteration: 5
replan_required: true
completed_steps[5]: "sm-1","sm-2","sm-3","sm-4","sm-5"
blocked_steps[0]:
last_verification:
passed: true
summary: No commands requested
commands[0]:
output[0]:
last_cleanup_summary:
passed: true
summary: "Cleanup accepted for sm-5"
commands[0]:
output[0]:
last_full_test_summary:
passed: true
summary: No commands requested
commands[0]:
output[0]:
history[5]{timestamp,kind,detail}:
"1775319621","step-complete","Completed sm-1"
"1775319737","step-complete","Completed sm-2"
"1775319845","step-complete","Completed sm-3"
"1775319900","step-complete","Completed sm-4"
"1775319970","step-complete","Completed sm-5"
notes[4]: "Recovered stale active step state for portable-tool-route. Reset sm-2 to todo.","Recovered stale active step state for portable-tool-route. Reset sm-3 to todo.","Recovered stale active step state for portable-tool-route. Reset sm-4 to todo.","Recovered stale active step state for portable-tool-route. Reset sm-5 to todo."
planning_rejection_counters:
planning_annotation_counters:
planning_session:
pending_input: null
pending_question: null
transcript[2]{role,content}:
user,Can you setup a distribution path so i can use this tool in other projects?
assistant,Planning completed
started_at: "1775319559"
last_usage_refresh_at: "1775320044"
last_usage_input_tokens: null
last_usage_output_tokens: null
last_usage_primary_window: null
last_usage_secondary_window: null
run_model: "gpt-5.3-codex-spark"
fast_mode: false
allow_branching: false

View File

@@ -1 +0,0 @@
/nix/store/j8wb3r6xmck1kwx5yfhgl0dlg8y2qa1b-source

View File

@@ -0,0 +1 @@
/nix/store/zhfr3hg0ix1pjrv54b0i1pv90n5mhymm-source

37
.github/workflows/distribution-gate.yml vendored Normal file
View File

@@ -0,0 +1,37 @@
name: distribution-gate
on:
pull_request:
push:
branches:
- main
permissions:
contents: read
jobs:
distribution-gate:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Rust toolchain
uses: dtolnay/rust-toolchain@stable
with:
components: rustfmt, clippy
- name: Install Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Quality checks
run: |
cargo test --locked
cargo fmt --all -- --check
cargo clippy --all-targets --all-features --locked -- -D warnings
cargo build --locked --release
- name: Distribution gate
run: python3 scripts/release-gate.py

1
.gitignore vendored
View File

@@ -1,2 +1,3 @@
.DS_Store
target
dist

View File

@@ -55,3 +55,59 @@ Use Nix or Cargo:
nix develop -c cargo test
nix develop -c cargo run
```
## Nix package consumption
This repo is a flake and exposes `codex-controller-loop` under `packages` plus an exported overlay.
From another flake:
```nix
{
inputs.codex-controller-loop.url = "github:your-org/codex-controller-loop";
outputs = { self, nixpkgs, codex-controller-loop, ... }:
let
system = "x86_64-linux";
in
{
packages.${system}.default = codex-controller-loop.packages.${system}.default;
};
}
```
Through the overlay:
```nix
{
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
inputs.codex-controller-loop.url = "github:your-org/codex-controller-loop";
outputs = { self, nixpkgs, codex-controller-loop, ... }:
let
system = "x86_64-linux";
pkgs = import nixpkgs {
inherit system;
overlays = [ codex-controller-loop.overlays.default ];
};
in
{
packages.${system}.default = pkgs.codex-controller-loop;
};
}
```
Non-flake usage also works:
```bash
nix-build
nix-env -f . -iA codex-controller-loop
```
## Release generation
```bash
python scripts/release-orchestrator.py --version 0.1.0 --target x86_64-unknown-linux-gnu
```
Generated artifacts are written to a canonical `dist/` layout and tracked by `dist/index.json` (legacy alias: `dist/{distribution_contract_version}/index.json` until 2026-07-01).

2
default.nix Normal file
View File

@@ -0,0 +1,2 @@
{ pkgs ? import <nixpkgs> {} }:
pkgs.callPackage ./nix/packages/codex-controller-loop.nix {}

View File

@@ -0,0 +1,24 @@
# Distribution Contract Changelog
# 1.0.3
- 2026-04-04
- Added CI distribution gate with reproducibility and manifest validation (`scripts/release-gate.py` + `.github/workflows/release-gate.yml`).
- Added maintenance governance fields in `docs/distribution-contract.json` and operational release ownership/retention guidance in `docs/distribution.md`.
# 1.0.2
- 2026-04-04
- Added canonical consumer resolution entrypoint at `dist/index.json`.
- Kept a single deprecated compatibility path `dist/{distribution_contract_version}/index.json` with documented sunset date (`2026-07-01`).
- Documented immutable tuple-based resolution and checksum-first consumption examples for external projects.
# 1.0.1
- 2026-04-04
- Added canonical release orchestrator and versioned dist layout index manifest generation (`scripts/release-orchestrator.py` + `scripts/release-orchestrator.config.json`).
- Extended contract metadata with generator location, path template, and artifact index path (`docs/distribution-contract.json`).
## 1.0.0
- 2026-04-04
- Introduced versioned distribution contract (`distribution-contract@1.0`) for deterministic, cross-project-safe consumption.
- Defined canonical `dist/` namespace and immutable artifact naming pattern.
- Added machine-readable compatibility and checksum policy in `docs/distribution-contract.json`.
- Established compatibility governance and changelog requirement for contract assumption changes.

View File

@@ -0,0 +1,201 @@
{
"schema_version": "distribution-contract-v1",
"contract_version": "1.0.0",
"artifact": {
"name": "codex-controller-loop",
"entrypoints": [
"codex-controller-loop"
],
"distribution_index": {
"canonical": "dist/index.json",
"legacy": [
{
"path": "dist/{contract_version}/index.json",
"status": "deprecated",
"deprecation_sunset": "2026-07-01T00:00:00Z",
"migration_deadline": "2026-07-01T00:00:00Z",
"notes": "Temporary compatibility path while downstream scripts migrate to dist/index.json."
}
],
"resolution_precedence": [
"dist/index.json",
"dist/{contract_version}/index.json"
]
},
"versioning": {
"model": "semver",
"release_channels": [
"stable"
],
"pre_release": "supported",
"pinning_guidance": "Pin by exact tuple: version, target, profile, git_rev, toolchain, dist_revision"
},
"naming": {
"namespace": "dist",
"canonical_pattern": "codex-controller-loop-v{version}-{target}-{profile}-{rust}-{gitsha}-{dist_revision}.{ext}",
"immutable_fields": [
"version",
"target",
"profile",
"rust",
"gitsha",
"dist_revision"
]
},
"release_artifacts": {
"formats": [
"tar.gz"
],
"required_manifests": [
"checksums.json",
"manifest.json",
"provenance.json"
],
"index_path_template": "dist/index.json",
"legacy_index_path_template": "dist/{contract_version}/index.json",
"artifact_directory_template": "dist/{contract_version}/{version}/{target}/{profile}/{toolchain}/{gitsha}/{dist_revision}",
"orchestrator": {
"name": "scripts/release-orchestrator.py",
"config": "scripts/release-orchestrator.config.json"
}
}
},
"compatibility_matrix": [
{
"platform_family": "linux",
"os": "Linux",
"arch": [
"x86_64",
"aarch64"
],
"compatibility": "recommended",
"runtime_expectations": [
"posix-like terminal with ANSI support",
"UTF-8 locale",
"interactive TTY for full features"
]
},
{
"platform_family": "darwin",
"os": "Darwin",
"arch": [
"x86_64",
"aarch64"
],
"compatibility": "recommended",
"runtime_expectations": [
"POSIX terminal",
"UTF-8 locale",
"interactive TTY for full features"
]
},
{
"platform_family": "windows",
"os": "Windows",
"arch": [
"x86_64"
],
"compatibility": "planned",
"runtime_expectations": [
"UTF-8 locale",
"terminal support in Windows console"
]
}
],
"assumptions": {
"rust": {
"edition": "2021",
"toolchain": "release-time explicit rustc version",
"minimum_guaranteed": null,
"minimum_for_release": "recorded per artifact in manifest"
},
"terminal": "ANSI-capable terminal and TTY are required for interactive TUI mode",
"non_tty_mode": "operable only for restricted command-and-control paths"
},
"reproducibility_metadata": {
"required": true,
"fields": [
"version",
"target",
"profile",
"build_time_inputs",
"git_rev",
"toolchain",
"checksums"
]
},
"provenance_metadata": {
"schema_version": "distribution-provenance-v1",
"required_fields": [
"artifact",
"build_inputs",
"build_artifact",
"generated_at"
]
},
"checksum_policy": {
"algorithm": "sha256",
"required": true,
"file": "checksums.json",
"consumer_check_required": true
},
"maintenance_governance": {
"release_ownership": {
"primary_owner": "Repository maintainers and release steward on duty",
"ownership_handoff_required": [
"Update docs/distribution.md, docs/distribution-changelog.md, and docs/distribution-contract.json in lockstep for any compatibility-relevant changes",
"Attach a passing scripts/release-gate.py run log to the release PR or merge checklist",
"Verify the release steward handoff note in the PR description before publish"
],
"handoff_minimum_gap": "at least one full release cycle"
},
"deprecation_governance": {
"required_notice_days": 60,
"retention_for_retirement": "one release cycle",
"retirement_announcement_channels": [
"docs/distribution.md",
"docs/distribution-changelog.md"
]
},
"retention_minimum_generations": 6
},
"compatibility_policy": {
"contract_versioning": "distribution-contract-v1",
"breaking_change_requires_bump": true,
"deprecation_notice_cycles": 1,
"assumption_changes_requires_changelog": true
},
"deprecation_policy": {
"legacy_index_path": {
"supported_until": "2026-07-01T00:00:00Z",
"sunset_reason": "Canonical index migration to dist/index.json.",
"required_action": "All consumers must resolve artifacts from the canonical index path and remove legacy hard-coding before sunset.",
"notice_channels": [
"docs/distribution-changelog.md",
"docs/distribution.md"
]
},
"migration_window": {
"minimum_notice_days": 60,
"default_alias_removal_after": "2026-07-01T00:00:00Z"
}
},
"retention_policy": {
"kept_release_generations": 6,
"retention_rationale": "Keep at least six release generations to support rollback and reproducible debugging.",
"migration_required_on_removal": true,
"migration_minimum_notice_window": "one release cycle"
},
"migration_steps": {
"breaking_change_notice": [
"Add an entry to docs/distribution-changelog.md with impact summary and effective release",
"Provide compatibility matrix and assumption deltas in docs/distribution.md",
"Publish manifest/metadata updates alongside artifacts before deprecation cutoff"
],
"rollback_path": "Consumers can repin immutable artifact tuple in their integration config."
},
"changelog": {
"file": "docs/distribution-changelog.md",
"required_on_contract_changes": true
}
}

145
docs/distribution.md Normal file
View File

@@ -0,0 +1,145 @@
# Distribution Contract for `codex-controller-loop`
## v1 Contract
This document defines the first stable, versioned distribution contract for the Rust controller binary. It is the canonical compatibility and consumption reference for external projects.
## 1) Contract version and release identity
- Contract version: `distribution-contract@1.0`
- Release artifact identity is immutable for a given tuple:
- `artifact_version` (semver)
- `git_rev` (full SHA, immutable reference)
- `toolchain` and `build_profile`
- `target`
- `dist_revision` (incrementing revision when rebuilds occur for the same release tuple)
- Consumers must pin by immutable tuple, never by moving tags.
## 2) Canonical artifact entrypoint
- Primary entrypoint: `codex-controller-loop` CLI.
- Canonical binary names:
- `codex-controller-loop` (single binary)
- Canonical distribution entrypoint index: `dist/index.json`.
- Deprecated compatibility entrypoint (removed after 2026-07-01): `dist/{distribution_contract_version}/index.json`.
## 3) Canonical dist layout and naming
- `dist/` is the only published artifact namespace.
- Directory template (contract version stable):
- `dist/{distribution_contract_version}/{artifact_version}/{target}/{profile}/{toolchain}/{gitsha}/{dist_revision}/`
- Example: `dist/1.0.0/0.1.0/x86_64-unknown-linux-gnu/release/1.84.0/ab12cd34/r1/`
- Canonical artifact filename:
- `codex-controller-loop-v{version}-{target}-{profile}-{rust}-{gitsha}-{dist_revision}.{ext}`
- `version` = semver release (e.g. `1.4.2`)
- `target` = Rust target triple
- `profile` = `release` or `debug`
- `rust` = rustc version string used in build
- `gitsha` = short git commit hash of source revision
- `dist_revision` = `r1`, `r2`, ... for immutable re-build iterations
- `ext` = container format used by release pipeline (e.g. `tar.gz`)
- Canonical generator entrypoint:
- `scripts/release-orchestrator.py` (single orchestrator)
- Controlled by `scripts/release-orchestrator.config.json`
- Index manifest output: `dist/index.json`
- Deterministic provenance snapshot in generated index:
- Each index artifact row is keyed by `version + target + profile + toolchain + gitsha + dist_revision`
- `artifact_file`, `manifest_file`, `checksums_file`, `artifact_sha256`, and `source_date_epoch` are emitted
## 8) Consumer integration examples
Use the canonical index first, then fail fast if no rows match the requested immutable tuple. Optional legacy fallback is accepted only during migration.
```bash
VERSION=0.1.0
TARGET=x86_64-unknown-linux-gnu
PROFILE=release
TOOLCHAIN=1.84.0
GITSHA=ab12cd34
DIST_REVISION=r1
INDEX=dist/index.json
if [ ! -f "$INDEX" ]; then
INDEX=dist/1.0.0/index.json
echo "warning: using deprecated index path, remove by 2026-07-01"
fi
ARTIFACTS=$(jq -r --arg version "$VERSION" --arg target "$TARGET" --arg profile "$PROFILE" \
--arg toolchain "$TOOLCHAIN" --arg git "$GITSHA" --arg dist "$DIST_REVISION" \
'.releases[] | select(.version==$version) | .targets[] | select(.target==$target) | .profiles[] | select(.profile==$profile) | .artifacts[] | select(.toolchain|startswith($toolchain)) | select(.git_rev|startswith($git)) | select(.dist_revision==$dist) | .artifact_file' "$INDEX")
COUNT=$(printf "%s\n" "$ARTIFACTS" | awk 'NF {count += 1} END {print count + 0}')
if [ "$COUNT" -ne 1 ]; then
echo "expected exactly one artifact for immutable tuple" >&2
exit 1
fi
ARTIFACT_FILE=$(printf "%s" "$ARTIFACTS")
echo "resolved artifact: $ARTIFACT_FILE"
PACKAGE_DIR="${ARTIFACT_FILE%/*}/package"
CHECKSUMS="$PACKAGE_DIR/checksums.json"
python - <<PY
import hashlib, json, pathlib
artifact = pathlib.Path("${ARTIFACT_FILE}")
checksums = json.loads(pathlib.Path("${CHECKSUMS}").read_text())
actual = hashlib.sha256(artifact.read_bytes()).hexdigest()
if actual != checksums["artifact_sha256"]:
raise SystemExit("artifact checksum mismatch")
print(f"artifact sha256: {actual}")
PY
```
## 4) Release compatibility matrix
| Platform | OS | Arch | Binary compatibility | Runtime assumptions | Notes |
| --- | --- | --- | --- | --- | --- |
| Linux | Linux | `x86_64` / `aarch64` | Recommended | UTF-8 locale and terminal (TTY) | Required for TUI rendering |
| macOS | Darwin | `aarch64` / `x86_64` | Recommended | UTF-8 locale and terminal (TTY) | Build validation expected |
| Windows | Windows | `x86_64` | Planned / not guaranteed | UTF-8 locale and terminal (TTY) | Future support candidate |
- Rust compatibility: Release pipelines are required to document exact `rust` toolchain versions per artifact.
- Source compatibility is guaranteed only for the same `distribution_contract_version` and `artifact_version` tuple.
## 5) Checksums and integrity
- Every release artifact must include `checksums.json`, `manifest.json`, and `provenance.json`.
- All checksums use SHA-256.
- Consumers treat a release as valid only if:
1. Artifact checksum matches manifest entry.
2. Manifest has reproducibility metadata matching expected tuple (version, target, profile, toolchain, git rev).
## 6) Retention and migration
- The canonical contract file is immutable once published for a release version.
- Backward compatibility matrix changes require a migration note in the contract changelog.
- Deprecated/removed platform support is announced via the changelog and removed only after a deprecation cycle.
- Contract index migration note: `dist/{distribution_contract_version}/index.json` is a temporary compatibility alias and is retired on **2026-07-01**.
## 7) Changelog and compatibility governance
- Compatibility assumption changes require a changelog entry in `docs/distribution-changelog.md`.
- Contract fields are first-class API: any consumer-facing contract change must update:
- `docs/distribution.md`
- `docs/distribution-contract.json`
- `docs/distribution-changelog.md`
## 8) Source-of-truth documents
Machine-readable contract file: `docs/distribution-contract.json`
The JSON contract is the machine-readable interface; this ADR should be treated as the human-readable interpretation.
## 9) Operational governance and release hygiene
- Release ownership handoff is explicit and bounded:
- The active release steward must own the distribution contract updates for the PR.
- Ownership is transferred only after `docs/distribution.md`, `docs/distribution-changelog.md`, and `docs/distribution-contract.json` are updated together.
- Handoff requires at least one release cycle of overlap so downstream consumers have a recovery window.
- Deprecation workflow is fixed:
- Legacy index path support remains for the documented minimum notice period.
- Any compatibility assumption change requires a changelog entry and a migration step before sunset.
- Minimum retention window:
- Keep at least six release generations in the contract retention policy.
- Do not remove deprecated aliases before the contract's documented notice date and retention cleanup policy are both met.

View File

@@ -13,11 +13,11 @@
flake-utils,
...
}:
flake-utils.lib.eachDefaultSystem (
(flake-utils.lib.eachDefaultSystem (
system:
let
pkgs = import nixpkgs { inherit system; };
codex-controller-loop = import ./nix/packages/codex-controller-loop.nix { inherit pkgs; };
codex-controller-loop = pkgs.callPackage ./nix/packages/codex-controller-loop.nix { };
in
{
packages = {
@@ -43,6 +43,10 @@
pkgs.rust-analyzer
];
};
}
);
}))
// {
overlays.default = final: prev: {
codex-controller-loop = final.callPackage ./nix/packages/codex-controller-loop.nix { };
};
};
}

View File

@@ -4,6 +4,7 @@ pkgs.rustPlatform.buildRustPackage {
version = "0.1.0";
src = ../..;
cargoLock.lockFile = ../../Cargo.lock;
doCheck = false;
meta = {
description = "Standalone Codex controller loop framework";

404
scripts/release-gate.py Executable file
View File

@@ -0,0 +1,404 @@
#!/usr/bin/env python3
"""CI gate for deterministic distribution contract enforcement."""
from __future__ import annotations
import argparse
import hashlib
import json
import os
import re
import subprocess
import sys
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Dict, List
ROOT = Path(__file__).resolve().parents[1]
CONTRACT_PATH = ROOT / "docs" / "distribution-contract.json"
DOC_PATH = ROOT / "docs" / "distribution.md"
CHANGELOG_PATH = ROOT / "docs" / "distribution-changelog.md"
CONFIG_PATH = ROOT / "scripts" / "release-orchestrator.config.json"
ORCHESTRATOR_PATH = ROOT / "scripts" / "release-orchestrator.py"
def fail(message: str) -> None:
raise SystemExit(message)
def run(cmd: List[str], *, env: Dict[str, str] | None = None, cwd: Path | None = None) -> str:
proc = subprocess.run(
cmd,
check=True,
cwd=cwd,
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
return proc.stdout.strip()
def sha256_file(path: Path) -> str:
digest = hashlib.sha256()
with path.open("rb") as f:
for chunk in iter(lambda: f.read(1 << 20), b""):
digest.update(chunk)
return digest.hexdigest()
def parse_datetime_utc(value: str) -> datetime:
# Supports values with trailing Z (e.g. 2026-07-01T00:00:00Z)
if value.endswith("Z"):
value = value[:-1] + "+00:00"
dt = datetime.fromisoformat(value)
if dt.tzinfo is None:
dt = dt.replace(tzinfo=timezone.utc)
return dt.astimezone(timezone.utc)
def load_json(path: Path) -> Dict[str, Any]:
return json.loads(path.read_text(encoding="utf-8"))
def write_json(path: Path, payload: Dict[str, Any]) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8")
def read_version_from_cargo() -> str:
cargo_toml = ROOT / "Cargo.toml"
for line in cargo_toml.read_text(encoding="utf-8").splitlines():
m = re.match(r'^version\s*=\s*"([^"]+)"$', line.strip())
if m:
return m.group(1)
fail("Unable to read package version from Cargo.toml")
def validate_contract(contract: Dict[str, Any]) -> None:
required_root = {
"schema_version",
"contract_version",
"artifact",
"compatibility_matrix",
"assumptions",
"reproducibility_metadata",
"provenance_metadata",
"checksum_policy",
"compatibility_policy",
"deprecation_policy",
"retention_policy",
"migration_steps",
"changelog",
"maintenance_governance",
}
missing = sorted(required_root - contract.keys())
if missing:
fail(f"distribution contract missing required top-level keys: {', '.join(missing)}")
artifact = contract["artifact"]
required_artifact = {
"name",
"entrypoints",
"distribution_index",
"versioning",
"naming",
"release_artifacts",
}
missing_artifact = sorted(required_artifact - artifact.keys())
if missing_artifact:
fail(f"distribution contract artifact section missing keys: {', '.join(missing_artifact)}")
release_artifacts = artifact["release_artifacts"]
required_release_artifacts = {
"formats",
"required_manifests",
"index_path_template",
"artifact_directory_template",
"orchestrator",
}
missing_release_artifacts = sorted(required_release_artifacts - release_artifacts.keys())
if missing_release_artifacts:
fail(
"distribution contract missing required release_artifact keys: "
f"{', '.join(missing_release_artifacts)}"
)
if contract["artifact"]["distribution_index"]["canonical"] != "dist/index.json":
fail("canonical distribution index must be dist/index.json")
if contract["checksum_policy"].get("algorithm") != "sha256":
fail("checksum policy must require sha256")
deprecation = contract["deprecation_policy"]["legacy_index_path"]
sunset = parse_datetime_utc(deprecation["supported_until"])
now = datetime.now(timezone.utc)
if sunset <= now:
fail("deprecation supported-until date is in the past")
if contract["retention_policy"].get("kept_release_generations", 0) < 6:
fail("retention policy must keep at least 6 release generations")
mg = contract["maintenance_governance"]
if mg.get("release_ownership", {}).get("ownership_handoff_required") is None:
fail("maintenance_governance.release_ownership.ownership_handoff_required is required")
if mg.get("deprecation_governance", {}).get("required_notice_days", 0) < 30:
fail("maintenance_governance.deprecation_governance.required_notice_days must be at least 30")
# Basic machine-readable compatibility guardrails.
comp_matrix = contract["compatibility_matrix"]
if not isinstance(comp_matrix, list) or not comp_matrix:
fail("compatibility_matrix must be a non-empty array")
def validate_docs_and_changelog() -> None:
text = DOC_PATH.read_text(encoding="utf-8")
low = text.lower()
required_markers = [
"dist/index.json",
"dist/{distribution_contract_version}/index.json",
"release compatibility matrix",
"release ownership handoff",
"deprecation workflow",
"minimum retention window",
]
for marker in required_markers:
if marker not in low:
fail(f"docs/distribution.md is missing marker: {marker}")
contract = load_json(CONTRACT_PATH)
major_minor = ".".join(contract["contract_version"].split(".")[:2])
if f"distribution-contract@{major_minor}" not in text:
fail(f"docs/distribution.md does not reference contract version distribution-contract@{major_minor}")
if "dist" not in CHANGELOG_PATH.read_text(encoding="utf-8").lower():
fail("docs/distribution-changelog.md appears invalid for distribution contract tracking")
def verify_checksum_manifest(package_root: Path, checksum_payload: Dict[str, Any], artifact_path: Path) -> None:
if not isinstance(checksum_payload, dict):
fail("checksums payload is not a JSON object")
files = checksum_payload.get("files")
if not isinstance(files, list) or not files:
fail("checksums manifest must include a non-empty files array")
manifest_map = {
item.get("path"): item.get("sha256") for item in files
}
for item in files:
rel = item.get("path")
expected = item.get("sha256")
if not rel or not expected:
fail("invalid checksums file entry")
computed = sha256_file(package_root / rel)
if computed != expected:
fail(f"checksum mismatch for package file {rel}")
for file_path in sorted(package_root.rglob("*")):
if not file_path.is_file():
continue
rel = file_path.relative_to(package_root).as_posix()
if rel not in manifest_map:
fail(f"checksums manifest missing entry for package file {rel}")
expected_artifact = checksum_payload.get("artifact_sha256")
if expected_artifact != sha256_file(artifact_path):
fail("artifact sha256 does not match checksums.json payload")
def validate_artifact_entry(
entry: Dict[str, Any],
contract: Dict[str, Any],
source_date_epoch: str,
) -> Dict[str, str]:
root = ROOT
artifact_path = root / entry["artifact_file"]
manifest_path = root / entry["manifest_file"]
checksums_path = root / entry["checksums_file"]
if not artifact_path.exists():
fail(f"artifact path missing: {artifact_path}")
if not manifest_path.exists():
fail(f"manifest path missing: {manifest_path}")
if not checksums_path.exists():
fail(f"checksums path missing: {checksums_path}")
manifest = load_json(manifest_path)
checksums = load_json(checksums_path)
required_manifest_keys = {
"schema_version",
"contract_version",
"artifact",
"artifact_version",
"target",
"profile",
"toolchain",
"dist_revision",
"git",
"build_time_inputs",
"content",
"generated_at",
}
if not required_manifest_keys <= manifest.keys():
missing = ", ".join(sorted(required_manifest_keys - manifest.keys()))
fail(f"manifest missing keys: {missing}")
if manifest["artifact_version"] != entry["version"]:
fail("manifest artifact_version mismatch")
if manifest["toolchain"] != entry["toolchain"]:
fail("manifest toolchain mismatch")
if manifest["git"]["revision"] != entry["git_rev"]:
fail("manifest git revision mismatch")
build_inputs = manifest["build_time_inputs"]
if build_inputs.get("source_date_epoch") != source_date_epoch:
fail("manifest source_date_epoch mismatch")
if build_inputs.get("target") != entry["target"]:
fail("manifest target mismatch")
if build_inputs.get("profile") != entry["profile"]:
fail("manifest profile mismatch")
if manifest.get("artifact", {}).get("sha256") != checksums.get("artifact_sha256"):
fail("manifest artifact sha256 must match checksums.json artifact_sha256")
provenance_file = manifest["content"].get("provenance_file")
if not provenance_file:
fail("manifest content.provenance_file missing")
provenance_path = manifest_path.parent / provenance_file
if not provenance_path.exists():
fail(f"provenance file missing: {provenance_file}")
provenance = load_json(provenance_path)
required_prov_fields = set(contract["provenance_metadata"]["required_fields"])
if not required_prov_fields <= provenance.keys():
missing = ", ".join(sorted(required_prov_fields - provenance.keys()))
fail(f"provenance missing fields: {missing}")
package_root = manifest_path.parent
verify_checksum_manifest(package_root, checksums, artifact_path)
return {
"artifact_file": entry["artifact_file"],
"artifact_sha": checksums["artifact_sha256"],
"manifest_sha": sha256_file(manifest_path),
"checksums_sha": sha256_file(checksums_path),
}
def collect_release_entries(index_payload: Dict[str, Any], version: str, dist_revision: str, toolchain: str) -> List[Dict[str, Any]]:
releases = index_payload.get("releases")
if not isinstance(releases, list):
fail("distribution index must contain releases as an array")
candidates: List[Dict[str, Any]] = []
for release in releases:
if release.get("version") != version:
continue
for target in release.get("targets", []):
for profile in target.get("profiles", []):
for artifact in profile.get("artifacts", []):
if (
artifact.get("dist_revision") == dist_revision
and artifact.get("toolchain") == toolchain
):
candidates.append(artifact)
return candidates
def run_release_cycle(
version: str,
profile: str,
target: str | None,
dist_revision: str,
source_date_epoch: str,
toolchain: str,
contract: Dict[str, Any],
config: Dict[str, Any],
) -> Dict[str, str]:
index_path = (ROOT / config["index_path_template"]).resolve()
env = os.environ.copy()
env["SOURCE_DATE_EPOCH"] = source_date_epoch
cmd = [
str(Path(sys.executable)),
str(ORCHESTRATOR_PATH),
"--version",
version,
"--profile",
profile,
"--dist-revision",
dist_revision,
"--toolchain",
toolchain,
]
if target:
cmd.extend(["--target", target])
run(cmd, env=env)
index_payload = load_json(index_path)
if index_payload.get("schema_version") != "distribution-index-v1":
fail("distribution index schema_version mismatch")
if index_payload.get("contract_version") != contract["contract_version"]:
fail("distribution index contract_version mismatch")
entries = collect_release_entries(index_payload, version, dist_revision, toolchain)
if not entries:
fail("no release entries produced for deterministic gate run")
state: Dict[str, Dict[str, str]] = {}
for entry in entries:
artifact_file = entry.get("artifact_file")
if not artifact_file:
fail("index entry missing artifact_file")
state[artifact_file] = validate_artifact_entry(entry, contract, source_date_epoch)
return {k: v["artifact_sha"] for k, v in sorted(state.items())}
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Distribution contract CI gate")
parser.add_argument("--version", default=None, help="artifact version override")
parser.add_argument("--profile", default="release", help="cargo profile")
parser.add_argument("--target", default=None, help="target triple (optional)")
parser.add_argument("--dist-revision", default="r1-ci", help="distribution revision")
parser.add_argument("--source-date-epoch", default="1700000000", help="SOURCE_DATE_EPOCH")
return parser.parse_args()
def main() -> int:
args = parse_args()
contract = load_json(CONTRACT_PATH)
config = load_json(CONFIG_PATH)
validate_contract(contract)
validate_docs_and_changelog()
version = args.version or read_version_from_cargo()
profile = args.profile
target = args.target
dist_revision = args.dist_revision
source_date_epoch = args.source_date_epoch
toolchain = run(["rustc", "--version"], cwd=ROOT)
print("distribution contract gate: running first deterministic build")
first = run_release_cycle(version, profile, target, dist_revision, source_date_epoch, toolchain, contract, config)
print("distribution contract gate: running second deterministic build")
second = run_release_cycle(version, profile, target, dist_revision, source_date_epoch, toolchain, contract, config)
if first != second:
fail("artifact checksum drift detected between repeated release generations")
print("distribution contract gate: deterministic artifact checksums match")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,16 @@
{
"schema_version": "release-orchestrator-config-v1",
"contract_version": "1.0.0",
"artifact_name": "codex-controller-loop",
"default_profile": "release",
"default_dist_revision": "r1",
"artifact_ext": "tar.gz",
"dist_root": "dist",
"artifact_dir_template": "{dist_root}/{contract_version}/{version}/{target}/{profile}/{toolchain}/{gitsha}/{dist_revision}",
"artifact_filename_template": "{artifact_name}-v{version}-{target}-{profile}-{toolchain}-{gitsha}-{dist_revision}.{ext}",
"index_path_template": "{dist_root}/index.json",
"legacy_index_path_template": "{dist_root}/{contract_version}/index.json",
"manifest_filename": "manifest.json",
"provenance_filename": "provenance.json",
"checksums_filename": "checksums.json"
}

501
scripts/release-orchestrator.py Executable file
View File

@@ -0,0 +1,501 @@
#!/usr/bin/env python3
"""Deterministic release artifact orchestration for codex-controller-loop."""
from __future__ import annotations
import argparse
import hashlib
import json
import os
import re
import subprocess
from dataclasses import dataclass
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Dict, Iterable, List
ROOT = Path(__file__).resolve().parents[1]
CONFIG_PATH = ROOT / "scripts" / "release-orchestrator.config.json"
CONTRACT_PATH = ROOT / "docs" / "distribution-contract.json"
def run(cmd: List[str], cwd: Path | None = None, env: Dict[str, str] | None = None) -> str:
completed = subprocess.run(
cmd,
check=True,
cwd=cwd,
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
return completed.stdout.strip()
def sha256_file(path: Path) -> str:
digest = hashlib.sha256()
with path.open("rb") as f:
for chunk in iter(lambda: f.read(1 << 20), b""):
digest.update(chunk)
return digest.hexdigest()
def posix(path: Path) -> str:
return path.as_posix()
def collect_directory_checksums(package_dir: Path) -> List[Dict[str, str]]:
checksums: List[Dict[str, str]] = []
for file_path in sorted(package_dir.rglob("*")):
if not file_path.is_file():
continue
relative = file_path.relative_to(package_dir)
checksums.append(
{
"path": posix(relative),
"sha256": sha256_file(file_path),
}
)
return checksums
def verify_checksums(
package_dir: Path,
checksum_path: Path,
artifact_path: Path,
checksum_payload: Dict[str, Any],
) -> None:
checksum_file = posix(checksum_path.relative_to(package_dir))
manifested_files = {entry["path"]: entry["sha256"] for entry in checksum_payload["files"]}
for relative_path, expected_sha in list(manifested_files.items()):
file_path = package_dir / relative_path
if not file_path.exists():
raise RuntimeError(f"checksum manifest referenced missing file: {relative_path}")
if sha256_file(file_path) != expected_sha:
raise RuntimeError(f"checksum mismatch for file: {relative_path}")
for file_path in sorted(package_dir.rglob("*")):
if not file_path.is_file():
continue
relative_path = posix(file_path.relative_to(package_dir))
if relative_path == checksum_file:
continue
if relative_path not in manifested_files:
raise RuntimeError(f"manifest missing checksum entry for file: {relative_path}")
artifact_expected = checksum_payload["artifact_sha256"]
if sha256_file(artifact_path) != artifact_expected:
raise RuntimeError(f"artifact checksum mismatch: {artifact_path.name}")
def read_package_version() -> str:
cargo_toml = ROOT / "Cargo.toml"
for line in cargo_toml.read_text(encoding="utf-8").splitlines():
m = re.match(r'^version\s*=\s*"([^"]+)"', line.strip())
if m:
return m.group(1)
raise RuntimeError("version not found in Cargo.toml")
def load_json(path: Path) -> Dict[str, Any]:
return json.loads(path.read_text(encoding="utf-8"))
def write_json(path: Path, payload: Dict[str, Any]) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8")
def slugify_token(value: str) -> str:
base = re.sub(r"[^A-Za-z0-9._-]+", "-", value.strip())
base = base.strip("-.")
return base or "unknown"
def target_platform(target: str) -> str:
if "windows" in target:
return "windows"
if "apple" in target or "darwin" in target:
return "darwin"
return "linux"
def collect_targets(targets: Iterable[str] | None, profile: str) -> List[str]:
if targets:
return [t for t in targets]
host = run(["rustc", "-vV"], cwd=ROOT)
for line in host.splitlines():
if line.startswith("host: "):
return [line.split(":", 1)[1].strip()]
return ["x86_64-unknown-linux-gnu" if profile == "release" else "x86_64-unknown-linux-gnu"]
def build_profile_args(profile: str) -> List[str]:
if profile == "release":
return ["--release"]
return ["--profile", profile]
@dataclass(frozen=True)
class SourceInputs:
source_dir: Path
contract_version: str
contract: Dict[str, Any]
version: str
profile: str
targets: List[str]
dist_revision: str
toolchain: str
toolchain_slug: str
git_sha_full: str
git_sha_short: str
source_date_epoch: str
def build_entry_entries(
inputs: SourceInputs, config: Dict[str, Any], args: argparse.Namespace
) -> List[Dict[str, Any]]:
index_entries: List[Dict[str, Any]] = []
build_env = os.environ.copy()
build_env["SOURCE_DATE_EPOCH"] = inputs.source_date_epoch
for target in inputs.targets:
build_cmd = [
"cargo",
"build",
"--locked",
"--target",
target,
*build_profile_args(inputs.profile),
]
if not args.no_build:
run(build_cmd, cwd=inputs.source_dir, env=build_env)
build_path = (
inputs.source_dir
/ "target"
/ target
/ ("release" if inputs.profile == "release" else inputs.profile)
/ config["artifact_name"]
)
if not build_path.exists():
raise FileNotFoundError(f"missing compiled artifact: {build_path}")
artifact_dir = Path(
config["artifact_dir_template"].format(
dist_root=config["dist_root"],
contract_version=inputs.contract_version,
version=inputs.version,
target=target,
profile=inputs.profile,
toolchain=inputs.toolchain_slug,
gitsha=inputs.git_sha_short,
dist_revision=inputs.dist_revision,
)
)
package_dir = artifact_dir / "package"
package_dir.mkdir(parents=True, exist_ok=True)
bin_dir = package_dir / "bin"
bin_dir.mkdir(parents=True, exist_ok=True)
staged_binary = bin_dir / config["artifact_name"]
staged_binary.write_bytes(build_path.read_bytes())
try:
staged_binary.chmod(0o755)
except OSError:
pass
artifact_name = config["artifact_filename_template"].format(
artifact_name=config["artifact_name"],
version=inputs.version,
target=target,
profile=inputs.profile,
toolchain=inputs.toolchain_slug,
gitsha=inputs.git_sha_short,
dist_revision=inputs.dist_revision,
ext=config["artifact_ext"],
)
artifact_file = artifact_dir / artifact_name
manifest = {
"schema_version": "distribution-manifest-v1",
"contract_version": inputs.contract_version,
"artifact": {
"name": config["artifact_name"],
"filename": artifact_name,
"path": posix(artifact_file.relative_to(inputs.source_dir)),
},
"artifact_version": inputs.version,
"target": target,
"platform": target_platform(target),
"profile": inputs.profile,
"toolchain": inputs.toolchain,
"dist_revision": inputs.dist_revision,
"git": {
"revision": inputs.git_sha_full,
"short": inputs.git_sha_short,
},
"build_time_inputs": {
"source_date_epoch": inputs.source_date_epoch,
"build_command": build_cmd,
"target": target,
"profile": inputs.profile,
"artifact_name": config["artifact_name"],
"rustc": inputs.toolchain,
},
"generated_at": datetime.fromtimestamp(
int(inputs.source_date_epoch), tz=timezone.utc
).isoformat(),
}
manifest_path = package_dir / config["manifest_filename"]
checksum_path = package_dir / config["checksums_filename"]
provenance_path = package_dir / config["provenance_filename"]
provenance = {
"schema_version": "distribution-provenance-v1",
"contract_version": inputs.contract_version,
"artifact": {
"name": config["artifact_name"],
"target": target,
"profile": inputs.profile,
"dist_revision": inputs.dist_revision,
"toolchain": inputs.toolchain,
"git": {
"full": inputs.git_sha_full,
"short": inputs.git_sha_short,
},
},
"build_inputs": {
"source_date_epoch": inputs.source_date_epoch,
"build_environment": {
"RUSTFLAGS": os.getenv("RUSTFLAGS", ""),
"CARGO_NET_OFFLINE": os.getenv("CARGO_NET_OFFLINE", ""),
"CARGO_TERM_COLOR": os.getenv("CARGO_TERM_COLOR", ""),
},
"build_command": build_cmd,
},
"build_artifact": {
"binary_name": config["artifact_name"],
"package_root": posix(package_dir.relative_to(inputs.source_dir)),
"manifest_file": config["manifest_filename"],
"checksums_file": config["checksums_filename"],
},
"generated_at": datetime.fromtimestamp(
int(inputs.source_date_epoch), tz=timezone.utc
).isoformat(),
}
provenance_path.write_text(json.dumps(provenance, indent=2, sort_keys=True) + "\n", encoding="utf-8")
run(
[
"tar",
"--sort=name",
"--owner=0",
"--group=0",
"--numeric-owner",
f"--mtime=@{inputs.source_date_epoch}",
"--format=ustar",
"-czf",
str(artifact_file),
"-C",
str(package_dir),
".",
],
cwd=inputs.source_dir,
env={**build_env, "GZIP": "-n"},
)
artifact_sha256 = sha256_file(artifact_file)
manifest["artifact"]["size_bytes"] = artifact_file.stat().st_size
manifest["artifact"]["sha256"] = artifact_sha256
manifest["content"] = {
"generated_by": "scripts/release-orchestrator.py",
"checksum_file": checksum_path.name,
"provenance_file": provenance_path.name,
}
manifest_path.write_text(json.dumps(manifest, indent=2, sort_keys=True) + "\n", encoding="utf-8")
checksums = collect_directory_checksums(package_dir)
checksum_payload = {
"schema_version": "distribution-checksums-v1",
"generated_by": "scripts/release-orchestrator.py",
"generated_at": datetime.fromtimestamp(int(inputs.source_date_epoch), tz=timezone.utc).isoformat(),
"artifact_file": artifact_name,
"artifact_sha256": artifact_sha256,
"files": checksums,
"artifact_entrypoints": {
"binary": posix(Path("bin") / config["artifact_name"]),
"manifest": config["manifest_filename"],
"checksums": config["checksums_filename"],
"provenance": config["provenance_filename"],
},
}
checksum_path.write_text(json.dumps(checksum_payload, indent=2, sort_keys=True) + "\n", encoding="utf-8")
manifest["content"] = {
"checksums": checksum_payload,
"generated_by": "scripts/release-orchestrator.py",
"checksum_file": checksum_path.name,
"provenance_file": provenance_path.name,
}
manifest_path.write_text(json.dumps(manifest, indent=2, sort_keys=True) + "\n", encoding="utf-8")
verify_checksums(package_dir, checksum_path, artifact_file, checksum_payload)
index_entries.append(
{
"version": inputs.version,
"target": target,
"profile": inputs.profile,
"platform": target_platform(target),
"toolchain": inputs.toolchain,
"toolchain_slug": inputs.toolchain_slug,
"git_rev": inputs.git_sha_full,
"dist_revision": inputs.dist_revision,
"source_date_epoch": inputs.source_date_epoch,
"generated_at": datetime.fromtimestamp(
int(inputs.source_date_epoch), tz=timezone.utc
).isoformat(),
"artifact_file": posix(artifact_file.relative_to(ROOT)),
"artifact_sha256": artifact_sha256,
"manifest_file": posix(manifest_path.relative_to(ROOT)),
"checksums_file": posix(checksum_path.relative_to(ROOT)),
}
)
return index_entries
def merge_index(
contract_version: str,
dist_root: str,
index_template: str,
legacy_index_template: str | None,
entries: List[Dict[str, Any]],
) -> None:
if not entries:
return
index_path = Path(index_template.format(dist_root=dist_root, contract_version=contract_version))
existing: Dict[str, Any] = {
"schema_version": "distribution-index-v1",
"contract_version": contract_version,
"generated_at": datetime.now(timezone.utc).isoformat(),
"releases": {},
}
if index_path.exists():
existing = load_json(index_path)
releases = existing.get("releases", {})
for entry in entries:
version_bucket = releases.setdefault(entry["version"], {})
target_bucket = version_bucket.setdefault(entry["target"], {})
profile_bucket = target_bucket.setdefault(entry["profile"], [])
index_key = f"{entry['toolchain_slug']}|{entry['git_rev'][:12]}|{entry['dist_revision']}"
profile_bucket = [candidate for candidate in profile_bucket if candidate.get("_index_key") != index_key]
profile_bucket.append({**entry, "_index_key": index_key})
target_bucket[entry["profile"]] = sorted(
profile_bucket,
key=lambda candidate: candidate["_index_key"],
)
ordered_releases: List[Dict[str, Any]] = []
for version in sorted(releases.keys(), key=str):
target_map = releases[version]
target_items = []
for target in sorted(target_map.keys(), key=str):
profile_map = target_map[target]
profile_items = []
for profile in sorted(profile_map.keys(), key=str):
profile_items.append(
{
"profile": profile,
"artifacts": [
{k: v for k, v in candidate.items() if k != "_index_key"}
for candidate in profile_map[profile]
],
}
)
target_items.append({"target": target, "profiles": profile_items})
ordered_releases.append({"version": version, "targets": target_items})
payload = {
"schema_version": "distribution-index-v1",
"contract_version": contract_version,
"generated_at": datetime.fromtimestamp(
int(entries[0]["source_date_epoch"]), tz=timezone.utc
).isoformat(),
"releases": ordered_releases,
}
write_json(index_path, payload)
if legacy_index_template:
legacy_index_path = Path(
legacy_index_template.format(dist_root=dist_root, contract_version=contract_version)
)
write_json(legacy_index_path, payload)
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Generate deterministic dist artifacts.")
parser.add_argument("--version", help="artifact version")
parser.add_argument("--profile", default=None, help="cargo profile (release default)")
parser.add_argument("--target", action="append", help="target triple (repeatable)")
parser.add_argument("--dist-revision", default=None, help="distribution revision")
parser.add_argument("--no-build", action="store_true", help="skip cargo build step")
parser.add_argument("--toolchain", default=None, help="toolchain version label")
return parser.parse_args()
def main() -> int:
args = parse_args()
config = load_json(CONFIG_PATH)
contract = load_json(CONTRACT_PATH)
version = args.version or read_package_version()
profile = args.profile or config["default_profile"]
dist_revision = args.dist_revision or config["default_dist_revision"]
toolchain = args.toolchain or run(["rustc", "--version"], cwd=ROOT)
toolchain_slug = slugify_token(toolchain.split(" ")[1] if " " in toolchain else toolchain)
git_sha_full = run(["git", "rev-parse", "HEAD"], cwd=ROOT)
git_sha_short = run(["git", "rev-parse", "--short", "HEAD"], cwd=ROOT)
source_date_epoch = os.getenv(
"SOURCE_DATE_EPOCH",
run(["git", "show", "-s", "--format=%ct", "HEAD"], cwd=ROOT),
)
targets = collect_targets(args.target, profile)
contract_version = contract.get("contract_version", "1.0.0")
input_data = SourceInputs(
source_dir=ROOT,
contract_version=contract_version,
contract=contract,
version=version,
profile=profile,
targets=targets,
dist_revision=dist_revision,
toolchain=toolchain,
toolchain_slug=toolchain_slug,
git_sha_full=git_sha_full,
git_sha_short=git_sha_short,
source_date_epoch=source_date_epoch,
)
entries = build_entry_entries(input_data, config, args)
merge_index(
contract_version,
config["dist_root"],
config["index_path_template"],
config.get("legacy_index_path_template"),
entries,
)
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -34,6 +34,14 @@ pub(crate) const CREATE_MODELS: [&str; 4] = [
"gpt-5.3-codex",
"gpt-5.3-codex-spark",
];
pub(crate) const CREATE_MENU_ROWS: usize = 3;
pub(crate) const PICKER_MENU_ROWS: usize = 3;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum PickerFocus {
List,
Settings,
}
#[derive(Debug, Clone)]
#[allow(clippy::large_enum_variant)]
@@ -91,8 +99,14 @@ pub struct App {
pub screen: Screen,
pub picker_items: Vec<crate::model::ControllerSummary>,
pub picker_selected: usize,
pub picker_focus: PickerFocus,
pub picker_menu_selected: usize,
pub picker_model_index: usize,
pub picker_fast_mode: bool,
pub picker_allow_branching: bool,
pub create_input: String,
pub create_model_index: usize,
pub create_menu_selected: usize,
pub create_fast_mode: bool,
pub create_allow_branching: bool,
pub create_error: Option<String>,
@@ -108,8 +122,14 @@ impl App {
screen: Screen::ControllerPicker,
picker_items: Vec::new(),
picker_selected: 0,
picker_focus: PickerFocus::List,
picker_menu_selected: 0,
picker_model_index: 0,
picker_fast_mode: false,
picker_allow_branching: false,
create_input: String::new(),
create_model_index: 0,
create_menu_selected: 0,
create_fast_mode: false,
create_allow_branching: false,
create_error: None,
@@ -177,18 +197,97 @@ impl App {
.unwrap_or(CREATE_MODELS[0])
}
pub(crate) fn cycle_create_model(&mut self) {
self.create_model_index = (self.create_model_index + 1) % CREATE_MODELS.len();
pub(crate) fn picker_model(&self) -> &'static str {
CREATE_MODELS
.get(self.picker_model_index)
.copied()
.unwrap_or(CREATE_MODELS[0])
}
pub(crate) fn selected_picker_controller_id(&self) -> Option<&str> {
self.picker_items
.get(self.picker_selected)
.map(|controller| controller.id.as_str())
}
pub(crate) fn shift_create_model(&mut self, delta: isize) {
let len = CREATE_MODELS.len() as isize;
let next = (self.create_model_index as isize + delta).rem_euclid(len);
self.create_model_index = next as usize;
}
pub(crate) fn toggle_create_fast_mode(&mut self) {
self.create_fast_mode = !self.create_fast_mode;
}
pub(crate) fn toggle_create_allow_branching(&mut self) {
self.create_allow_branching = !self.create_allow_branching;
}
pub(crate) fn move_create_menu_selection(&mut self, delta: isize) {
let next = (self.create_menu_selected as isize + delta)
.clamp(0, (CREATE_MENU_ROWS.saturating_sub(1)) as isize);
self.create_menu_selected = next as usize;
}
pub(crate) fn shift_picker_model(&mut self, delta: isize) {
let len = CREATE_MODELS.len() as isize;
let next = (self.picker_model_index as isize + delta).rem_euclid(len);
self.picker_model_index = next as usize;
}
pub(crate) fn toggle_picker_fast_mode(&mut self) {
self.picker_fast_mode = !self.picker_fast_mode;
}
pub(crate) fn toggle_picker_allow_branching(&mut self) {
self.picker_allow_branching = !self.picker_allow_branching;
}
pub(crate) fn move_picker_menu_selection(&mut self, delta: isize) {
let next = (self.picker_menu_selected as isize + delta)
.clamp(0, (PICKER_MENU_ROWS.saturating_sub(1)) as isize);
self.picker_menu_selected = next as usize;
}
pub(crate) fn reset_picker_menu(&mut self) {
self.picker_focus = PickerFocus::List;
self.picker_menu_selected = 0;
}
pub(crate) fn reset_create_form(&mut self) {
self.create_input.clear();
self.create_model_index = 0;
self.create_menu_selected = 0;
self.create_fast_mode = false;
self.create_allow_branching = false;
self.create_error = None;
}
pub(crate) fn reset_picker_form(&mut self) {
self.picker_focus = PickerFocus::List;
self.picker_menu_selected = 0;
self.picker_model_index = 0;
self.picker_fast_mode = false;
self.picker_allow_branching = false;
}
pub(crate) fn sync_picker_settings_from_selected_controller(&mut self) {
let Some(controller) = self.picker_items.get(self.picker_selected) else {
self.reset_picker_form();
return;
};
self.picker_model_index = CREATE_MODELS
.iter()
.position(|model| *model == controller.run_model)
.unwrap_or(0);
self.picker_fast_mode = controller.fast_mode;
self.picker_allow_branching = controller.allow_branching;
self.picker_menu_selected = 0;
self.picker_focus = PickerFocus::List;
}
pub(crate) fn workspace(&self) -> Option<&WorkspaceRuntime> {
self.workspace.as_ref()
}

View File

@@ -1,7 +1,8 @@
use anyhow::Result;
use crossterm::event::{KeyCode, KeyEvent};
use crate::model::Screen;
use crate::model::{Screen, TaskConfig};
use crate::storage::toon;
use super::App;
@@ -9,16 +10,63 @@ impl App {
pub(super) fn handle_picker_key(&mut self, key: KeyEvent) -> Result<bool> {
let total_rows = self.picker_items.len() + 1;
match key.code {
KeyCode::Esc => Ok(true),
KeyCode::Esc => {
if matches!(self.picker_focus, crate::app::PickerFocus::Settings) {
self.picker_focus = crate::app::PickerFocus::List;
Ok(false)
} else {
Ok(true)
}
}
KeyCode::Tab => {
if self.picker_selected < self.picker_items.len() {
self.toggle_picker_focus();
}
Ok(false)
}
KeyCode::Down | KeyCode::Char('j') => {
if self.picker_selected + 1 < total_rows {
if matches!(self.picker_focus, crate::app::PickerFocus::Settings)
&& self.picker_selected < self.picker_items.len()
{
self.move_picker_menu_selection(1);
} else if self.picker_selected + 1 < total_rows {
self.picker_selected += 1;
if self.picker_selected < self.picker_items.len() {
self.sync_picker_settings_from_selected_controller();
} else {
self.reset_picker_menu();
}
}
Ok(false)
}
KeyCode::Up | KeyCode::Char('k') => {
if self.picker_selected > 0 {
if matches!(self.picker_focus, crate::app::PickerFocus::Settings)
&& self.picker_selected < self.picker_items.len()
{
self.move_picker_menu_selection(-1);
} else if self.picker_selected > 0 {
self.picker_selected -= 1;
if self.picker_selected < self.picker_items.len() {
self.sync_picker_settings_from_selected_controller();
} else {
self.reset_picker_menu();
}
}
Ok(false)
}
KeyCode::Left => {
if matches!(self.picker_focus, crate::app::PickerFocus::Settings)
&& self.picker_selected < self.picker_items.len()
{
self.adjust_picker_menu_selection(-1)?;
}
Ok(false)
}
KeyCode::Right => {
if matches!(self.picker_focus, crate::app::PickerFocus::Settings)
&& self.picker_selected < self.picker_items.len()
{
self.adjust_picker_menu_selection(1)?;
}
Ok(false)
}
@@ -34,11 +82,8 @@ impl App {
return Ok(false);
}
if let Some(controller_id) = self
.picker_items
.get(self.picker_selected)
.map(|controller| controller.id.clone())
{
if let Some(controller_id) = self.selected_picker_controller_id().map(str::to_string) {
self.persist_picker_settings_for_selected_controller()?;
let config = crate::model::TaskConfig::default_for(&controller_id);
self.open_workspace(config, Some(self.default_task_path.clone()))?;
}
@@ -55,18 +100,23 @@ impl App {
self.create_error = None;
Ok(false)
}
KeyCode::F(2) => {
self.cycle_create_model();
KeyCode::Up => {
self.move_create_menu_selection(-1);
self.create_error = None;
Ok(false)
}
KeyCode::F(3) => {
self.create_fast_mode = !self.create_fast_mode;
KeyCode::Down => {
self.move_create_menu_selection(1);
self.create_error = None;
Ok(false)
}
KeyCode::F(4) => {
self.create_allow_branching = !self.create_allow_branching;
KeyCode::Left => {
self.adjust_create_menu_selection(-1);
self.create_error = None;
Ok(false)
}
KeyCode::Right => {
self.adjust_create_menu_selection(1);
self.create_error = None;
Ok(false)
}
@@ -107,4 +157,58 @@ impl App {
_ => Ok(false),
}
}
fn adjust_create_menu_selection(&mut self, direction: isize) {
match self.create_menu_selected {
0 => self.shift_create_model(direction),
1 => self.toggle_create_fast_mode(),
2 => self.toggle_create_allow_branching(),
_ => {}
}
}
fn toggle_picker_focus(&mut self) {
self.picker_focus = match self.picker_focus {
crate::app::PickerFocus::List => crate::app::PickerFocus::Settings,
crate::app::PickerFocus::Settings => crate::app::PickerFocus::List,
};
self.picker_menu_selected = 0;
}
fn adjust_picker_menu_selection(&mut self, direction: isize) -> Result<()> {
match self.picker_menu_selected {
0 => self.shift_picker_model(direction),
1 => self.toggle_picker_fast_mode(),
2 => self.toggle_picker_allow_branching(),
_ => {}
}
self.persist_picker_settings_for_selected_controller()
}
fn persist_picker_settings_for_selected_controller(&mut self) -> Result<()> {
let Some(controller_id) = self.selected_picker_controller_id().map(str::to_string) else {
return Ok(());
};
let config = TaskConfig::default_for(&controller_id);
toon::ensure_controller_files(&config)?;
let mut state = toon::read_state(&config.state_file)?;
state.run_model = self.picker_model().to_string();
state.fast_mode = self.picker_fast_mode;
state.allow_branching = self.picker_allow_branching;
toon::write_state(&config.state_file, &state)?;
if let Some(controller) = self.picker_items.iter_mut().find(|controller| controller.id == controller_id) {
controller.run_model = state.run_model().to_string();
controller.fast_mode = state.fast_mode;
controller.allow_branching = state.allow_branching;
controller.branch = if state.allow_branching {
config.branch.clone()
} else {
"current".to_string()
};
}
Ok(())
}
}

View File

@@ -192,8 +192,14 @@ mod tests {
screen: Screen::Workspace,
picker_items: Vec::new(),
picker_selected: 0,
picker_focus: crate::app::PickerFocus::List,
picker_menu_selected: 0,
picker_model_index: 0,
picker_fast_mode: false,
picker_allow_branching: false,
create_input: String::new(),
create_model_index: 0,
create_menu_selected: 0,
create_fast_mode: false,
create_allow_branching: false,
create_error: None,

View File

@@ -77,8 +77,14 @@ mod tests {
screen: Screen::Workspace,
picker_items: Vec::new(),
picker_selected: 0,
picker_focus: crate::app::PickerFocus::List,
picker_menu_selected: 0,
picker_model_index: 0,
picker_fast_mode: false,
picker_allow_branching: false,
create_input: String::new(),
create_model_index: 0,
create_menu_selected: 0,
create_fast_mode: false,
create_allow_branching: false,
create_error: None,

View File

@@ -103,6 +103,11 @@ pub(super) fn refresh_picker(app: &mut App) -> Result<()> {
if app.picker_selected > app.picker_items.len() {
app.picker_selected = app.picker_items.len();
}
if app.picker_selected < app.picker_items.len() {
app.sync_picker_settings_from_selected_controller();
} else {
app.reset_picker_menu();
}
Ok(())
}
@@ -182,8 +187,14 @@ mod tests {
screen: Screen::ControllerPicker,
picker_items: Vec::new(),
picker_selected: 3,
picker_focus: crate::app::PickerFocus::List,
picker_menu_selected: 0,
picker_model_index: 0,
picker_fast_mode: false,
picker_allow_branching: false,
create_input: String::new(),
create_model_index: 0,
create_menu_selected: 0,
create_fast_mode: false,
create_allow_branching: false,
create_error: None,

View File

@@ -3,6 +3,7 @@ use std::sync::mpsc::{self, Receiver};
use std::time::Instant;
use crossterm::event::{KeyCode, KeyEvent};
use tempfile::tempdir;
use super::{App, ControlCommand, WorkspaceRuntime};
use crate::cli::DEFAULT_TASK_CONFIG_PATH;
@@ -10,6 +11,8 @@ use crate::model::{
group_session_entries, ControllerPhase, ControllerState, Plan, Screen, SessionCursor,
SessionEntry, SessionSelection, SessionSource, SessionStream, TaskConfig, UsageSnapshot,
};
use crate::storage::toon;
use crate::test_support::CurrentDirGuard;
use crate::ui::{self, scroll::VerticalScrollState};
fn sample_app() -> App {
@@ -32,10 +35,19 @@ fn sample_app_with_control_rx() -> (App, Receiver<ControlCommand>) {
total_steps: 2,
last_updated: Some("10".to_string()),
branch: "codex/alpha".to_string(),
run_model: "gpt-5.4".to_string(),
fast_mode: false,
allow_branching: false,
}],
picker_selected: 0,
picker_focus: crate::app::PickerFocus::List,
picker_menu_selected: 0,
picker_model_index: 0,
picker_fast_mode: false,
picker_allow_branching: false,
create_input: String::new(),
create_model_index: 0,
create_menu_selected: 0,
create_fast_mode: false,
create_allow_branching: false,
create_error: None,
@@ -100,16 +112,22 @@ fn planning_mode_blocks_slash_commands() {
}
#[test]
fn create_screen_shortcuts_update_run_options() {
fn create_screen_menu_updates_run_options() {
let mut app = sample_app();
app.workspace = None;
app.screen = Screen::CreateController;
app.handle_create_key(KeyEvent::from(KeyCode::F(2)))
app.handle_create_key(KeyEvent::from(KeyCode::Right))
.expect("cycle model");
app.handle_create_key(KeyEvent::from(KeyCode::F(3)))
app.handle_create_key(KeyEvent::from(KeyCode::Down))
.expect("move to fast mode");
assert_eq!(app.create_menu_selected, 1);
app.handle_create_key(KeyEvent::from(KeyCode::Right))
.expect("toggle fast");
app.handle_create_key(KeyEvent::from(KeyCode::F(4)))
app.handle_create_key(KeyEvent::from(KeyCode::Down))
.expect("move to branching");
assert_eq!(app.create_menu_selected, 2);
app.handle_create_key(KeyEvent::from(KeyCode::Right))
.expect("toggle branching");
assert_eq!(app.create_model(), "gpt-5.4-mini");
@@ -117,6 +135,47 @@ fn create_screen_shortcuts_update_run_options() {
assert!(app.create_allow_branching);
}
#[test]
fn picker_dashboard_menu_updates_existing_controller_settings() {
let temp = tempdir().expect("tempdir");
let _cwd = CurrentDirGuard::enter(temp.path());
let (mut app, control_rx) = sample_app_with_control_rx();
app.screen = Screen::ControllerPicker;
if let Some(workspace) = app.workspace.as_mut() {
toon::ensure_controller_files(&workspace.task_config).expect("ensure files");
}
app.handle_picker_key(KeyEvent::from(KeyCode::Tab))
.expect("focus settings");
assert!(matches!(
app.picker_focus,
crate::app::PickerFocus::Settings
));
app.handle_picker_key(KeyEvent::from(KeyCode::Right))
.expect("change model");
app.handle_picker_key(KeyEvent::from(KeyCode::Down))
.expect("move to fast mode");
app.handle_picker_key(KeyEvent::from(KeyCode::Right))
.expect("toggle fast");
app.handle_picker_key(KeyEvent::from(KeyCode::Down))
.expect("move to branching");
app.handle_picker_key(KeyEvent::from(KeyCode::Right))
.expect("toggle branching");
let workspace = app.workspace.as_ref().expect("workspace");
assert_eq!(app.picker_model(), "gpt-5.4-mini");
assert!(app.picker_fast_mode);
assert!(app.picker_allow_branching);
assert!(control_rx.try_recv().is_err());
let persisted = toon::read_state(&workspace.task_config.state_file).expect("read state");
assert_eq!(persisted.run_model(), "gpt-5.4-mini");
assert!(persisted.fast_mode);
assert!(persisted.allow_branching);
}
#[test]
fn status_command_reports_current_workspace_progress() {
let mut app = sample_app();
@@ -155,6 +214,7 @@ fn submission_clears_selection_and_sends_control_command() {
let (mut app, control_rx) = sample_app_with_control_rx();
app.screen = Screen::Workspace;
if let Some(workspace) = app.workspace.as_mut() {
workspace.state.phase = ControllerPhase::Executing;
workspace.session_follow_output = false;
workspace.session_drag_active = true;
workspace.session_selection = Some(SessionSelection {
@@ -179,6 +239,42 @@ fn submission_clears_selection_and_sends_control_command() {
));
}
#[test]
fn planning_submission_persists_pending_input_without_control_command() {
let temp = tempdir().expect("tempdir");
let _cwd = CurrentDirGuard::enter(temp.path());
let (mut app, control_rx) = sample_app_with_control_rx();
app.screen = Screen::Workspace;
if let Some(workspace) = app.workspace.as_mut() {
toon::ensure_controller_files(&workspace.task_config).expect("ensure files");
workspace.state.phase = ControllerPhase::Planning;
}
app.dispatch_workspace_input("Refine the plan".to_string())
.expect("dispatch");
let workspace = app.workspace.as_ref().expect("workspace");
assert_eq!(
workspace
.state
.planning_session
.pending_input
.as_deref(),
Some("Refine the plan")
);
assert!(control_rx.try_recv().is_err());
let persisted = toon::read_state(&workspace.task_config.state_file).expect("read state");
assert_eq!(
persisted
.planning_session
.pending_input
.as_deref(),
Some("Refine the plan")
);
}
#[test]
fn workspace_scroll_can_move_away_from_follow_mode() {
let mut app = sample_app();

View File

@@ -331,6 +331,14 @@ impl App {
return Err(anyhow!("workspace is not active"));
};
let is_planning = matches!(workspace.state.phase, ControllerPhase::Planning);
if is_planning {
let mut persisted_state = toon::read_state(&workspace.task_config.state_file)?;
persisted_state.planning_session.pending_input = Some(input.clone());
toon::write_state(&workspace.task_config.state_file, &persisted_state)?;
workspace.state.planning_session.pending_input = Some(input.clone());
}
workspace.session_follow_output = true;
workspace.session_selection = None;
workspace.session_drag_active = false;
@@ -342,7 +350,9 @@ impl App {
body: input.clone(),
run_id: repo::next_run_id(),
});
let _ = workspace.control_tx.send(ControlCommand::Submit(input));
if !is_planning {
let _ = workspace.control_tx.send(ControlCommand::Submit(input));
}
Ok(())
}
}

View File

@@ -6,8 +6,10 @@ use anyhow::Result;
use crate::app::{AppEvent, ControlCommand};
use crate::controller::{executor, goal_checker, planner, verifier};
use crate::model::{
ControllerPhase, GoalStatus, SessionEntry, SessionSource, SessionStream, StepStatus, TaskConfig,
ControllerPhase, GoalStatus, PlannerResponse, SessionEntry, SessionSource, SessionStream,
StepStatus, TaskConfig,
};
use crate::prompt;
use crate::repo;
use crate::storage::toon;
@@ -36,6 +38,18 @@ pub fn runtime_loop(
let goal_md = toon::read_markdown(&config.goal_file)?;
let standards_md = toon::read_markdown(&config.standards_file)?;
refresh_usage_state(&mut state);
if matches!(state.phase, ControllerPhase::Planning)
&& state.planning_session.pending_input.is_some()
{
let pending_input = state
.planning_session
.pending_input
.as_deref()
.unwrap_or_default()
.to_string();
process_planning_submission(&repo_root, &config, &pending_input, &event_tx)?;
continue;
}
emit_snapshot(&event_tx, &goal_md, &standards_md, &plan, &state);
match control_rx.try_recv() {
@@ -71,19 +85,18 @@ pub fn runtime_loop(
}));
continue;
}
Ok(ControlCommand::Submit(text)) => {
Ok(ControlCommand::Submit(_text)) => {
if matches!(state.phase, ControllerPhase::Planning) {
let response =
crate::planning::session::advance(&repo_root, &config, &text, &event_tx)?;
if let Some(question) = response.question {
let _ = event_tx.send(AppEvent::Session(SessionEntry {
source: SessionSource::Planner,
stream: SessionStream::Status,
title: "Question".to_string(),
tag: Some(config.controller_id()),
body: question,
run_id: repo::next_run_id(),
}));
let persisted_state = toon::read_state(&config.state_file)?;
if let Some(pending_input) =
persisted_state.planning_session.pending_input.as_deref()
{
process_planning_submission(
&repo_root,
&config,
pending_input,
&event_tx,
)?;
}
} else {
let _ = event_tx.send(AppEvent::Session(SessionEntry {
@@ -117,6 +130,13 @@ pub fn runtime_loop(
state.phase = ControllerPhase::Done;
state.clear_stop_reason();
state.goal_status = GoalStatus::Done;
let completion_summary = build_completion_summary(&plan);
state.set_completion_summary(completion_summary.clone());
state.history.push(crate::model::HistoryEvent {
timestamp: repo::now_timestamp(),
kind: "goal-complete".to_string(),
detail: completion_summary.clone(),
});
toon::write_state(&config.state_file, &state)?;
emit_snapshot(&event_tx, &goal_md, &standards_md, &plan, &state);
let _ = event_tx.send(AppEvent::Session(SessionEntry {
@@ -124,7 +144,7 @@ pub fn runtime_loop(
stream: SessionStream::Status,
title: "Goal".to_string(),
tag: Some(config.controller_id()),
body: "Goal complete".to_string(),
body: completion_summary,
run_id: repo::next_run_id(),
}));
continue;
@@ -180,6 +200,7 @@ pub fn runtime_loop(
run_id: repo::next_run_id(),
}));
state.clear_stop_reason();
state.clear_completion_summary();
state.replan_required = false;
state
.blocked_steps
@@ -245,6 +266,7 @@ pub fn runtime_loop(
continue;
}
plan.append_step_note(&step.id, completion_note(&exec));
plan.mark_done(&step.id);
state.complete_step(&step, verification, cleanup, tests);
toon::write_plan(&config.plan_file, &plan)?;
@@ -260,6 +282,32 @@ fn refresh_usage_state(state: &mut crate::model::ControllerState) {
crate::process::persist_usage_snapshot(state, &snapshot);
}
fn process_planning_submission(
repo_root: &std::path::Path,
config: &TaskConfig,
latest_user_input: &str,
event_tx: &Sender<AppEvent>,
) -> Result<PlannerResponse> {
let response = crate::planning::session::advance(
repo_root,
config,
latest_user_input,
event_tx,
)?;
if let Some(question) = response.question.clone() {
let _ = event_tx.send(AppEvent::Session(SessionEntry {
source: SessionSource::Planner,
stream: SessionStream::Status,
title: "Question".to_string(),
tag: Some(config.controller_id()),
body: question,
run_id: repo::next_run_id(),
}));
}
Ok(response)
}
fn emit_snapshot(
event_tx: &Sender<AppEvent>,
goal_md: &str,
@@ -292,6 +340,79 @@ fn resumable_step(
.cloned()
}
fn completion_note(exec: &crate::model::ExecutionResponse) -> String {
let mut parts = Vec::new();
let summary = exec.summary.trim();
if !summary.is_empty() {
parts.push(prompt::truncate_text(summary, 180));
}
let notes = exec
.notes
.iter()
.map(|note| note.trim())
.filter(|note| !note.is_empty())
.map(|note| prompt::truncate_text(note, 120))
.collect::<Vec<_>>();
if !notes.is_empty() {
parts.push(format!("Agent notes: {}", notes.join("; ")));
}
if parts.is_empty() {
"Completed the step.".to_string()
} else {
prompt::truncate_text(&parts.join(" "), 240)
}
}
fn build_completion_summary(plan: &crate::model::Plan) -> String {
let completed_steps = plan
.steps
.iter()
.filter(|step| step.status.is_done())
.collect::<Vec<_>>();
if completed_steps.is_empty() {
return "Goal complete.".to_string();
}
let mut details = completed_steps
.iter()
.take(4)
.map(|step| {
let mut item = format!(
"{}: {}",
step.id,
prompt::truncate_text(&step.title, 80)
);
if !step.notes.trim().is_empty() {
item.push_str(" - ");
item.push_str(&prompt::truncate_text(&step.notes, 120));
}
item
})
.collect::<Vec<_>>();
let omitted = completed_steps.len().saturating_sub(details.len());
if omitted > 0 {
details.push(format!(
"... and {} more completed step{}",
omitted,
if omitted == 1 { "" } else { "s" }
));
}
prompt::truncate_text(
&format!(
"Completed {} step{}: {}",
completed_steps.len(),
if completed_steps.len() == 1 { "" } else { "s" },
details.join("; ")
),
320,
)
}
fn recover_stale_execution_state(
config: &TaskConfig,
plan: &mut crate::model::Plan,
@@ -542,4 +663,49 @@ mod tests {
other => panic!("unexpected event: {other:?}"),
}
}
#[test]
fn completion_note_uses_execution_summary_and_notes() {
let note = completion_note(&crate::model::ExecutionResponse {
summary: "Implemented the board note flow".to_string(),
notes: vec![
"Kept the change localized to completion handling".to_string(),
"Verified the board still renders done steps".to_string(),
],
..crate::model::ExecutionResponse::default()
});
assert!(note.contains("Implemented the board note flow"));
assert!(note.contains("Agent notes:"));
assert!(note.contains("Kept the change localized"));
}
#[test]
fn build_completion_summary_lists_done_steps() {
let plan = Plan {
version: 1,
goal_summary: "goal".to_string(),
steps: vec![
PlanStep {
id: "s1".to_string(),
title: "First".to_string(),
notes: "Finished the first change.".to_string(),
status: StepStatus::Done,
..PlanStep::default()
},
PlanStep {
id: "s2".to_string(),
title: "Second".to_string(),
notes: "Finished the second change.".to_string(),
status: StepStatus::Done,
..PlanStep::default()
},
],
};
let summary = build_completion_summary(&plan);
assert!(summary.contains("Completed 2 steps"));
assert!(summary.contains("s1: First"));
assert!(summary.contains("s2: Second"));
}
}

View File

@@ -7,4 +7,5 @@ pub fn pause(state: &mut ControllerState) {
pub fn resume(state: &mut ControllerState) {
state.phase = ControllerPhase::Executing;
state.clear_stop_reason();
state.clear_completion_summary();
}

View File

@@ -102,7 +102,11 @@ pub struct PlanningTurn {
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PlanningSessionMeta {
#[serde(default)]
pub pending_input: Option<String>,
#[serde(default)]
pub pending_question: Option<String>,
#[serde(default)]
pub transcript: Vec<PlanningTurn>,
}
@@ -124,10 +128,13 @@ pub struct ControllerState {
pub last_full_test_summary: Option<TestSummary>,
pub history: Vec<HistoryEvent>,
pub notes: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub completion_summary: Option<String>,
#[serde(default)]
pub planning_rejection_counters: BTreeMap<String, u32>,
#[serde(default)]
pub planning_annotation_counters: BTreeMap<String, u32>,
#[serde(default)]
pub planning_session: PlanningSessionMeta,
pub started_at: Option<String>,
pub last_usage_refresh_at: Option<String>,
@@ -165,6 +172,7 @@ impl Default for ControllerState {
last_full_test_summary: None,
history: Vec::new(),
notes: Vec::new(),
completion_summary: None,
planning_rejection_counters: BTreeMap::new(),
planning_annotation_counters: BTreeMap::new(),
planning_session: PlanningSessionMeta::default(),
@@ -194,6 +202,15 @@ impl ControllerState {
self.stop_reason = None;
}
pub fn set_completion_summary(&mut self, summary: impl Into<String>) {
let summary = summary.into();
self.completion_summary = Some(summary.clone());
}
pub fn clear_completion_summary(&mut self) {
self.completion_summary = None;
}
pub fn run_model(&self) -> &str {
let model = self.run_model.trim();
if model.is_empty() {
@@ -270,7 +287,9 @@ impl ControllerState {
.unwrap_or_else(|| "Controller is blocked.".to_string()),
),
ControllerPhase::Done => Some(
self.latest_notice()
self.completion_summary
.clone()
.or_else(|| self.latest_notice())
.unwrap_or_else(|| "Goal complete.".to_string()),
),
_ => None,
@@ -356,4 +375,33 @@ mod tests {
Some("Verification failed for s2.")
);
}
#[test]
fn phase_notice_uses_completion_summary_when_done() {
let mut state = ControllerState {
phase: ControllerPhase::Done,
..ControllerState::default()
};
state.set_completion_summary("Completed 3 steps and verified the final output.");
assert_eq!(
state.phase_notice().as_deref(),
Some("Completed 3 steps and verified the final output.")
);
}
#[test]
fn planning_session_pending_input_roundtrips() {
let mut state = ControllerState::default();
state.planning_session.pending_input = Some("refine the plan".to_string());
let decoded: ControllerState =
serde_json::from_str(&serde_json::to_string(&state).expect("encode state"))
.expect("decode state");
assert_eq!(
decoded.planning_session.pending_input.as_deref(),
Some("refine the plan")
);
}
}

View File

@@ -1,4 +1,4 @@
use serde::{Deserialize, Serialize};
use serde::{Deserialize, Deserializer, Serialize};
use super::{ControllerPhase, Plan};
@@ -14,6 +14,15 @@ fn default_planning_quality_gate() -> PlanningQualityGate {
PlanningQualityGate::default()
}
fn deserialize_planning_quality_gate<'de, D>(
deserializer: D,
) -> Result<PlanningQualityGate, D::Error>
where
D: Deserializer<'de>,
{
Ok(Option::<PlanningQualityGate>::deserialize(deserializer)?.unwrap_or_default())
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub enum PlanningPersona {
@@ -146,7 +155,7 @@ pub struct PlannerResponse {
pub persona_passes: Vec<PlanningPersonaPass>,
#[serde(default)]
pub single_pass_projection: Option<LegacyOutputProjection>,
#[serde(default = "default_planning_quality_gate")]
#[serde(default = "default_planning_quality_gate", deserialize_with = "deserialize_planning_quality_gate")]
pub quality_gate: PlanningQualityGate,
}
@@ -170,4 +179,36 @@ pub struct ControllerSummary {
pub total_steps: usize,
pub last_updated: Option<String>,
pub branch: String,
pub run_model: String,
pub fast_mode: bool,
pub allow_branching: bool,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn planner_response_treats_null_quality_gate_as_default() {
let raw = r#"
{
"kind": "final",
"question": null,
"goal_md": null,
"standards_md": null,
"plan": null,
"planning_contract_version": 1,
"contract": null,
"persona_passes": [],
"quality_gate": null,
"single_pass_projection": null
}
"#;
let response: PlannerResponse = serde_json::from_str(raw).expect("deserialize response");
assert_eq!(
response.quality_gate,
PlanningQualityGate::default()
);
}
}

View File

@@ -145,6 +145,7 @@ pub fn planner_contract_schema() -> Value {
"planning_contract_version",
"contract",
"persona_passes",
"quality_gate",
"single_pass_projection"
],
"properties": {
@@ -329,7 +330,7 @@ mod tests {
let schema = planner_contract_schema();
assert_eq!(
schema["required"],
json!(["kind","question","goal_md","standards_md","plan","planning_contract_version","contract","persona_passes","single_pass_projection"])
json!(["kind","question","goal_md","standards_md","plan","planning_contract_version","contract","persona_passes","quality_gate","single_pass_projection"])
);
assert_eq!(
schema["properties"]["contract"]["required"],

View File

@@ -93,25 +93,7 @@ struct StageWorkingSet {
}
pub fn planning_schema() -> serde_json::Value {
let legacy = json!({
"type": "object",
"additionalProperties": false,
"required": ["kind", "question", "goal_md", "standards_md", "plan"],
"properties": {
"kind": { "type": "string", "enum": ["question", "final"] },
"question": { "type": ["string", "null"] },
"goal_md": { "type": ["string", "null"] },
"standards_md": { "type": ["string", "null"] },
"plan": {
"anyOf": [
model::plan_schema(),
{ "type": "null" }
]
}
}
});
json!({ "anyOf": [legacy, model::planner_contract_schema()] })
model::planner_contract_schema()
}
pub fn build_planning_prompt(
@@ -480,7 +462,7 @@ pub fn build_persona_planning_prompt(
"- Keep the output minimal and execution-safe.\n",
"- Do not invent repository details.\n",
"- Always include all response keys.\n",
"- Use null for any field that does not apply in this response.\n",
"- Use null for any field that does not apply in this response, except quality_gate which must always be a full object.\n",
"- Output goal_md, standards_md, and plan should be complete enough for autonomous execution.\n",
"- Return plan steps with one-sentence notes and stable field order.\n",
"- Prefer 3-6 steps unless the goal truly needs more.\n",
@@ -1044,13 +1026,6 @@ mod tests {
#[test]
fn planning_schema_requires_all_declared_keys() {
let schema = planning_schema();
let legacy_schema = json!([
"kind",
"question",
"goal_md",
"standards_md",
"plan"
]);
let contract_schema = json!([
"kind",
"question",
@@ -1062,9 +1037,8 @@ mod tests {
"persona_passes",
"single_pass_projection"
]);
assert!(schema["anyOf"].is_array());
assert_eq!(schema["anyOf"][0]["required"], legacy_schema);
assert_eq!(schema["anyOf"][1]["required"], contract_schema);
assert_eq!(schema["required"], contract_schema);
assert_eq!(schema["type"], "object");
}
#[test]

View File

@@ -19,6 +19,7 @@ pub fn advance(
state.phase = ControllerPhase::Planning;
state.clear_stop_reason();
state.planning_session.pending_input = None;
state.planning_session.transcript.push(PlanningTurn {
role: "user".to_string(),
content: latest_user_input.to_string(),

View File

@@ -81,10 +81,18 @@ fn read_rate_limits() -> Result<RateLimitResponse> {
.stdin
.take()
.context("codex app-server stdin unavailable")?;
for request in [
let stdout = child
.stdout
.take()
.context("codex app-server stdout unavailable")?;
let reader = BufReader::new(stdout);
let mut reader = reader;
write_json_line(
&mut stdin,
json!({
"jsonrpc": "2.0",
"id": 1,
"id": "usage-initialize",
"method": "initialize",
"params": {
"clientInfo": {
@@ -93,24 +101,30 @@ fn read_rate_limits() -> Result<RateLimitResponse> {
}
}
}),
)?;
stdin.flush()?;
wait_for_jsonrpc_result(&mut reader, "usage-initialize", "initialize")?;
write_json_line(
&mut stdin,
json!({
"jsonrpc": "2.0",
"id": 2,
"method": "initialized",
}),
)?;
write_json_line(
&mut stdin,
json!({
"jsonrpc": "2.0",
"id": "usage-rate-limits",
"method": "account/rateLimits/read",
"params": serde_json::Value::Null,
}),
] {
writeln!(stdin, "{request}")?;
}
)?;
stdin.flush()?;
drop(stdin);
let stdout = child
.stdout
.take()
.context("codex app-server stdout unavailable")?;
let reader = BufReader::new(stdout);
let mut rate_limits = None;
let mut rpc_error = None;
for line in reader.lines().map_while(std::result::Result::ok) {
@@ -118,8 +132,8 @@ fn read_rate_limits() -> Result<RateLimitResponse> {
continue;
};
match value.get("id").and_then(|id| id.as_u64()) {
Some(2) => {
match value.get("id").and_then(request_id_to_string) {
Some(id) if id == "usage-rate-limits" => {
if let Some(error) = value.get("error") {
rpc_error = Some(error.to_string());
} else if let Some(result) = value.get("result") {
@@ -145,6 +159,44 @@ fn read_rate_limits() -> Result<RateLimitResponse> {
Err(anyhow!("account/rateLimits/read returned no result"))
}
fn write_json_line(stdin: &mut impl Write, value: serde_json::Value) -> Result<()> {
writeln!(stdin, "{value}")?;
Ok(())
}
fn wait_for_jsonrpc_result(
reader: &mut BufReader<std::process::ChildStdout>,
expected_id: &str,
label: &str,
) -> Result<()> {
for line in reader.lines().map_while(std::result::Result::ok) {
let Ok(value) = serde_json::from_str::<serde_json::Value>(&line) else {
continue;
};
if value.get("id").and_then(request_id_to_string).as_deref() != Some(expected_id) {
continue;
}
if let Some(error) = value.get("error") {
return Err(anyhow!("{label} failed: {error}"));
}
if value.get("result").is_some() {
return Ok(());
}
}
Err(anyhow!("{label} returned no result"))
}
fn request_id_to_string(value: &serde_json::Value) -> Option<String> {
match value {
serde_json::Value::String(value) => Some(value.clone()),
serde_json::Value::Number(value) => Some(value.to_string()),
_ => None,
}
}
#[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "camelCase")]
struct RateLimitResponse {

View File

@@ -63,6 +63,9 @@ pub(crate) fn list_controller_summaries_in(root: &Path) -> Result<Vec<Controller
} else {
"current".to_string()
},
run_model: state.run_model().to_string(),
fast_mode: state.fast_mode,
allow_branching: state.allow_branching,
});
}

View File

@@ -223,6 +223,11 @@ fn render_picker(frame: &mut Frame, app: &App) {
.constraints([Constraint::Min(1), Constraint::Length(3)])
.split(frame.area());
let top = Layout::default()
.direction(Direction::Horizontal)
.constraints([Constraint::Percentage(68), Constraint::Percentage(32)])
.split(outer[0]);
let mut lines = vec![
Line::from(Span::styled(
"Select a controller loop or create a new one.",
@@ -309,64 +314,141 @@ fn render_picker(frame: &mut Frame, app: &App) {
.block(shell_block(" Controller Picker ", true))
.style(Style::default().fg(TEXT))
.wrap(Wrap { trim: false });
let footer = Paragraph::new("Up/Down or j/k to move. Enter opens. n creates. Esc quits.")
.block(shell_block(" Controls ", false))
.style(Style::default().fg(TEXT_DIM));
let dashboard = render_picker_dashboard(app);
let footer = Paragraph::new(
"Up/Down move controllers. Tab edits settings. Left/Right change the selected setting. Enter opens. n creates. Esc quits.",
)
.block(shell_block(" Controls ", false))
.style(Style::default().fg(TEXT_DIM));
frame.render_widget(picker, outer[0]);
frame.render_widget(picker, top[0]);
frame.render_widget(dashboard, top[1]);
frame.render_widget(footer, outer[1]);
}
fn render_picker_dashboard(app: &App) -> Paragraph<'static> {
let mut lines = vec![
Line::from(Span::styled(
"Dashboard",
Style::default().fg(CYAN).add_modifier(Modifier::BOLD),
)),
Line::from(""),
];
if let Some(controller) = app.picker_items.get(app.picker_selected) {
lines.push(Line::from(vec![
Span::styled("Selected ", Style::default().fg(TEXT_DIM)),
Span::styled(controller.id.clone(), Style::default().fg(GREEN)),
]));
lines.push(Line::from(vec![
Span::styled("Phase ", Style::default().fg(TEXT_DIM)),
Span::styled(controller.phase.label(), Style::default().fg(TEXT)),
]));
lines.push(Line::from(vec![
Span::styled("Focus ", Style::default().fg(TEXT_DIM)),
Span::styled(
match app.picker_focus {
crate::app::PickerFocus::List => "controllers",
crate::app::PickerFocus::Settings => "settings",
},
Style::default().fg(GOLD),
),
]));
lines.push(Line::from(""));
if app.picker_selected < app.picker_items.len() {
let settings_focus = matches!(app.picker_focus, crate::app::PickerFocus::Settings);
lines.push(create_setting_row(
"Model",
app.picker_model(),
settings_focus && app.picker_menu_selected == 0,
GREEN,
));
lines.push(Line::from(""));
lines.push(create_setting_row(
"Mode",
if app.picker_fast_mode { "fast" } else { "normal" },
settings_focus && app.picker_menu_selected == 1,
GOLD,
));
lines.push(Line::from(""));
lines.push(create_setting_row(
"Branching",
if app.picker_allow_branching {
"allowed"
} else {
"disabled"
},
settings_focus && app.picker_menu_selected == 2,
CYAN,
));
lines.push(Line::from(""));
lines.push(Line::from(Span::styled(
"Tab switches focus to the settings menu.",
Style::default().fg(TEXT_DIM),
)));
}
} else {
lines.push(Line::from(Span::styled(
"Create a new controller from the left pane.",
Style::default().fg(GREEN).add_modifier(Modifier::BOLD),
)));
lines.push(Line::from(""));
lines.push(Line::from(Span::styled(
"Enter opens the create flow, where you choose model, mode, and branching.",
Style::default().fg(TEXT_DIM),
)));
}
Paragraph::new(lines)
.block(shell_block(" Selected Controller ", true))
.style(Style::default().fg(TEXT))
.wrap(Wrap { trim: false })
}
fn render_create_controller(frame: &mut Frame, app: &App) {
let outer = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Min(1), Constraint::Length(6)])
.constraints([Constraint::Min(1), Constraint::Length(10)])
.split(frame.area());
let mut lines = vec![
Line::from(Span::styled(
"Describe the work this controller should own. The first submission goes straight into the Codex planning helper.",
"Describe the work this controller should own. Use the menu to choose the runtime model, mode, and branching before starting.",
Style::default().fg(CYAN).add_modifier(Modifier::BOLD),
)),
Line::from(""),
Line::from(vec![
Span::styled("Controller id ", Style::default().fg(TEXT_DIM)),
Span::styled(
"generated by GPT-5.4 mini on submit",
Style::default().fg(GREEN),
),
]),
create_setting_row(
"Model",
app.create_model(),
app.create_menu_selected == 0,
GREEN,
),
Line::from(""),
Line::from(vec![
Span::styled("Model ", Style::default().fg(TEXT_DIM)),
Span::styled(app.create_model(), Style::default().fg(GREEN)),
]),
Line::from(vec![
Span::styled("Fast mode ", Style::default().fg(TEXT_DIM)),
Span::styled(
if app.create_fast_mode { "on" } else { "off" },
Style::default().fg(if app.create_fast_mode { GREEN } else { TEXT }),
),
]),
Line::from(vec![
Span::styled("Branching ", Style::default().fg(TEXT_DIM)),
Span::styled(
if app.create_allow_branching {
"allowed"
} else {
"disabled"
},
Style::default().fg(if app.create_allow_branching { GOLD } else { GREEN }),
),
]),
create_setting_row(
"Mode",
if app.create_fast_mode { "fast" } else { "normal" },
app.create_menu_selected == 1,
GOLD,
),
Line::from(""),
create_setting_row(
"Branching",
if app.create_allow_branching {
"allowed"
} else {
"disabled"
},
app.create_menu_selected == 2,
CYAN,
),
Line::from(""),
Line::from(Span::styled(
"Example: Build the intuitive controller-first TUI picker and workspace.",
"Controller id generation still uses GPT-5.4 mini; this menu controls the runtime model.",
Style::default().fg(TEXT_DIM),
)),
Line::from(""),
Line::from(Span::styled(
"Controls: F2 cycle model, F3 toggle fast mode, F4 toggle branching, Enter starts.",
"Controls: Up/Down move, Left/Right change the selected setting, Enter starts, Esc backs out.",
Style::default().fg(TEXT_DIM),
)),
];
@@ -394,6 +476,29 @@ fn render_create_controller(frame: &mut Frame, app: &App) {
frame.render_widget(composer, outer[1]);
}
fn create_setting_row(label: &str, value: &str, selected: bool, accent: Color) -> Line<'static> {
let prefix_style = Style::default().fg(if selected { BORDER_ACTIVE } else { BORDER });
let label_style = if selected {
Style::default().fg(TEXT).add_modifier(Modifier::BOLD)
} else {
Style::default().fg(TEXT_DIM)
};
let value_style = if selected {
Style::default()
.fg(accent)
.add_modifier(Modifier::BOLD)
.add_modifier(Modifier::REVERSED)
} else {
Style::default().fg(accent)
};
Line::from(vec![
Span::styled(if selected { "" } else { " " }, prefix_style),
Span::styled(format!("{label:<10} "), label_style),
Span::styled(value.to_string(), value_style),
])
}
fn render_workspace(frame: &mut Frame, app: &App) {
let layout = workspace_layout(frame.area());
let visible_lines = app
@@ -949,8 +1054,18 @@ pub(crate) fn plan_board_lines(app: &App) -> Vec<Line<'static>> {
),
),
]));
let note_label = if matches!(step.status, crate::model::StepStatus::Done) {
" Completed: "
} else {
" Notes: "
};
let note_style = if matches!(step.status, crate::model::StepStatus::Done) {
Style::default().fg(GREEN).add_modifier(Modifier::BOLD)
} else {
Style::default().fg(TEXT_DIM).add_modifier(Modifier::BOLD)
};
lines.push(Line::from(vec![
Span::raw(" "),
Span::styled(note_label, note_style),
Span::styled(step_note(step), Style::default().fg(TEXT_DIM)),
]));
lines.push(Line::from(""));
@@ -1227,10 +1342,19 @@ mod tests {
total_steps: 3,
last_updated: Some("10".to_string()),
branch: "codex/alpha".to_string(),
run_model: "gpt-5.4".to_string(),
fast_mode: false,
allow_branching: false,
}],
picker_selected: 0,
picker_focus: crate::app::PickerFocus::List,
picker_menu_selected: 0,
picker_model_index: 0,
picker_fast_mode: false,
picker_allow_branching: false,
create_input: "Build the picker flow".to_string(),
create_model_index: 0,
create_menu_selected: 0,
create_fast_mode: false,
create_allow_branching: false,
create_error: None,
@@ -1327,6 +1451,8 @@ mod tests {
let rendered = render_to_text(app);
assert!(rendered.contains("Controller Picker"));
assert!(rendered.contains("Create new controller"));
assert!(rendered.contains("Dashboard"));
assert!(rendered.contains("Selected"));
}
#[test]
@@ -1336,10 +1462,13 @@ mod tests {
app.create_allow_branching = false;
let rendered = render_to_text(app);
assert!(rendered.contains("Create Controller"));
assert!(rendered.contains("generated by GPT-5.4 mini"));
assert!(rendered.contains("Model gpt-5.4"));
assert!(rendered.contains("Fast mode on"));
assert!(rendered.contains("Branching disabled"));
assert!(rendered.contains("Controller id generation still uses GPT-5.4 mini"));
assert!(rendered.contains("Model"));
assert!(rendered.contains("gpt-5.4"));
assert!(rendered.contains("Mode"));
assert!(rendered.contains("fast"));
assert!(rendered.contains("Branching"));
assert!(rendered.contains("disabled"));
}
#[test]
@@ -1363,8 +1492,8 @@ mod tests {
workspace.plan.steps.push(crate::model::PlanStep {
id: "s1".to_string(),
title: "Design picker".to_string(),
notes: "Confirm navigation model before implementation.".to_string(),
status: crate::model::StepStatus::Todo,
notes: "Implemented the picker flow and verified the navigation path.".to_string(),
status: crate::model::StepStatus::Done,
..crate::model::PlanStep::default()
});
}
@@ -1376,7 +1505,8 @@ mod tests {
assert!(rendered.contains("5h=96% left"));
assert!(rendered.contains("7d=27% left"));
assert!(rendered.contains("Verification failed"));
assert!(rendered.contains("Confirm navigation"));
assert!(rendered.contains("Completed"));
assert!(rendered.contains("Implemented the picker flow"));
}
#[test]