From 92d3e849675c89445312bf60f3e00213fe155587 Mon Sep 17 00:00:00 2001 From: Ben Lovell Date: Fri, 17 Apr 2026 13:58:38 +0200 Subject: [PATCH 1/7] feat(TOW-1415): Claude Code plugin with bundled MCP server and skill (#247) * feat: add Claude Code plugin with MCP server and skill * feat: improve skill setup flow with auth detection and install fallbacks * chore: align CLI and MCP tool descriptions * chore: make coderabbit review this PR * chore: sync SKILL.md generator with checked-in skill content * ci: open PR to regenerate SKILL.md when CLI changes land on develop --- .claude-plugin/plugin.json | 12 + .coderabbit.yaml | 4 + .github/workflows/regenerate-skill.yml | 47 ++++ .gitignore | 3 + .mcp.json | 8 + crates/tower-cmd/src/apps.rs | 4 +- crates/tower-cmd/src/lib.rs | 9 + crates/tower-cmd/src/mcp.rs | 6 +- crates/tower-cmd/src/secrets.rs | 2 +- crates/tower-cmd/src/skill.rs | 243 ++++++++++++++++ skills/tower/SKILL.md | 369 +++++++++++++++++++++++++ 11 files changed, 703 insertions(+), 4 deletions(-) create mode 100644 .claude-plugin/plugin.json create mode 100644 .coderabbit.yaml create mode 100644 .github/workflows/regenerate-skill.yml create mode 100644 .mcp.json create mode 100644 crates/tower-cmd/src/skill.rs create mode 100644 skills/tower/SKILL.md diff --git a/.claude-plugin/plugin.json b/.claude-plugin/plugin.json new file mode 100644 index 00000000..033af098 --- /dev/null +++ b/.claude-plugin/plugin.json @@ -0,0 +1,12 @@ +{ + "name": "tower", + "description": "Tower compute platform — run and deploy Python apps, pipelines, and AI agents", + "version": "1.0.0", + "author": { + "name": "Tower", + "url": "https://tower.dev" + }, + "homepage": "https://tower.dev/docs", + "repository": "https://github.com/tower/tower-cli", + "license": "MIT" +} diff --git a/.coderabbit.yaml b/.coderabbit.yaml new file mode 100644 index 00000000..26934f78 --- /dev/null +++ b/.coderabbit.yaml @@ -0,0 +1,4 @@ +language: en-US +reviews: + base_branches: + - develop diff --git a/.github/workflows/regenerate-skill.yml b/.github/workflows/regenerate-skill.yml new file mode 100644 index 00000000..ea16be2d --- /dev/null +++ b/.github/workflows/regenerate-skill.yml @@ -0,0 +1,47 @@ +name: Regenerate SKILL.md + +on: + push: + branches: [develop] + paths: + - 'crates/tower-cmd/**' + - 'crates/tower/**' + - 'skills/tower/SKILL.md' + - '.github/workflows/regenerate-skill.yml' + +concurrency: + group: regenerate-skill + cancel-in-progress: false + +jobs: + regenerate: + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + + steps: + - uses: actions/checkout@v6 + + - name: Set up Rust + run: rustup show + + - name: Cache Rust + uses: Swatinem/rust-cache@v2 + + - name: Regenerate SKILL.md + run: cargo run --quiet --bin tower -- skill generate > skills/tower/SKILL.md + + - name: Open PR if SKILL.md changed + uses: peter-evans/create-pull-request@v7 + with: + commit-message: "chore: regenerate SKILL.md" + title: "chore: regenerate SKILL.md" + body: | + `tower skill generate` output has drifted from the checked-in `skills/tower/SKILL.md`. + + This PR was opened automatically by the `Regenerate SKILL.md` workflow after a merge to `develop`. + branch: chore/regenerate-skill + base: develop + delete-branch: true + add-paths: skills/tower/SKILL.md diff --git a/.gitignore b/.gitignore index 82e88e1c..01c5d225 100644 --- a/.gitignore +++ b/.gitignore @@ -28,3 +28,6 @@ pytest.ini # wheel build artifacts *.data/ + +# local MCP overrides (e.g. internal tooling with secrets) +.mcp.json.local diff --git a/.mcp.json b/.mcp.json new file mode 100644 index 00000000..32fb7a6c --- /dev/null +++ b/.mcp.json @@ -0,0 +1,8 @@ +{ + "mcpServers": { + "tower": { + "command": "uvx", + "args": ["tower", "mcp-server"] + } + } +} diff --git a/crates/tower-cmd/src/apps.rs b/crates/tower-cmd/src/apps.rs index 9b90ec35..ab632620 100644 --- a/crates/tower-cmd/src/apps.rs +++ b/crates/tower-cmd/src/apps.rs @@ -11,7 +11,7 @@ pub fn apps_cmd() -> Command { Command::new("apps") .about("Manage the apps in your current Tower account") .arg_required_else_help(true) - .subcommand(Command::new("list").about("List all of your apps")) + .subcommand(Command::new("list").about("List all apps in your Tower account")) .subcommand( Command::new("show") .arg( @@ -21,7 +21,7 @@ pub fn apps_cmd() -> Command { .required(true) .help("Name of the app"), ) - .about("Show the details about an app in Tower"), + .about("Show details for a Tower app and its recent runs"), ) .subcommand( Command::new("logs") diff --git a/crates/tower-cmd/src/lib.rs b/crates/tower-cmd/src/lib.rs index 2d668f5a..e42789b7 100644 --- a/crates/tower-cmd/src/lib.rs +++ b/crates/tower-cmd/src/lib.rs @@ -14,6 +14,7 @@ mod run; mod schedules; mod secrets; mod session; +mod skill; mod teams; mod towerfile_gen; mod util; @@ -201,6 +202,13 @@ impl App { } } } + Some(("skill", sub_matches)) => match sub_matches.subcommand() { + Some(("generate", _)) => skill::do_skill_generate(root_cmd()).await, + _ => { + skill::skill_cmd().print_help().unwrap(); + std::process::exit(2); + } + }, Some(("mcp-server", args)) => mcp::do_mcp_server(sessionized_config, args) .await .unwrap_or_else(|e| { @@ -255,4 +263,5 @@ fn root_cmd() -> Command { .subcommand(version::version_cmd()) .subcommand(teams::teams_cmd()) .subcommand(mcp::mcp_cmd()) + .subcommand(skill::skill_cmd()) } diff --git a/crates/tower-cmd/src/mcp.rs b/crates/tower-cmd/src/mcp.rs index 49dd7beb..1c140caf 100644 --- a/crates/tower-cmd/src/mcp.rs +++ b/crates/tower-cmd/src/mcp.rs @@ -460,7 +460,11 @@ impl TowerService { (result, output) } - #[tool(description = "List all Tower apps in your account")] + // Tool descriptions below should stay in sync with .about() in the corresponding command + // files (apps.rs, secrets.rs, etc.). Proc macros require string literals so they can't + // share constants directly. MCP-only descriptions (with Prerequisites/Optional) are + // intentionally more detailed and don't need a CLI counterpart. + #[tool(description = "List all apps in your Tower account")] async fn tower_apps_list(&self) -> Result { match api::list_apps(&self.config).await { Ok(response) => { diff --git a/crates/tower-cmd/src/secrets.rs b/crates/tower-cmd/src/secrets.rs index 53c63596..e6a95393 100644 --- a/crates/tower-cmd/src/secrets.rs +++ b/crates/tower-cmd/src/secrets.rs @@ -38,7 +38,7 @@ pub fn secrets_cmd() -> Command { .help("List secrets across all environments") .action(clap::ArgAction::SetTrue), ) - .about("List all of your secrets"), + .about("List secrets in your Tower account"), ) .subcommand( Command::new("create") diff --git a/crates/tower-cmd/src/skill.rs b/crates/tower-cmd/src/skill.rs new file mode 100644 index 00000000..13caca60 --- /dev/null +++ b/crates/tower-cmd/src/skill.rs @@ -0,0 +1,243 @@ +use clap::Command; + +pub fn skill_cmd() -> Command { + Command::new("skill") + .about("Generate Claude Code skill files for AI agent integration") + .arg_required_else_help(true) + .subcommand( + Command::new("generate") + .about("Generate a SKILL.md describing how to use Tower with AI agents"), + ) +} + +pub async fn do_skill_generate(root: Command) { + let content = generate_skill_md(root); + print!("{}", content); +} + +fn generate_skill_md(root: Command) -> String { + let mut out = String::new(); + + out.push_str(WORKFLOW_HEADER); + out.push_str("\n\n"); + out.push_str("## Command Reference\n\n"); + out.push_str("*This section is generated from the CLI's built-in help.*\n\n"); + + append_command(&mut out, &root, &[], 3); + + out +} + +fn append_command(out: &mut String, cmd: &Command, path: &[&str], depth: usize) { + let subcommands: Vec<_> = cmd + .get_subcommands() + .filter(|c| !c.is_hide_set()) + .collect(); + + for sub in &subcommands { + let name = sub.get_name(); + let mut full_path = path.to_vec(); + full_path.push(name); + + let heading = "#".repeat(depth); + let cmd_str = format!("tower {}", full_path.join(" ")); + out.push_str(&format!("{} `{}`\n\n", heading, cmd_str)); + + if let Some(about) = sub.get_about() { + out.push_str(&format!("{}\n\n", about)); + } + + // Positional args + let positional: Vec<_> = sub + .get_arguments() + .filter(|a| a.is_positional() && !a.is_hide_set()) + .collect(); + + // Named args / flags + let named: Vec<_> = sub + .get_arguments() + .filter(|a| !a.is_positional() && !a.is_hide_set() && a.get_long().is_some()) + .collect(); + + if !positional.is_empty() || !named.is_empty() { + out.push_str("**Arguments:**\n\n"); + for arg in &positional { + let req = if arg.is_required_set() { + " *(required)*" + } else { + "" + }; + let help = arg + .get_help() + .map(|h| format!(" — {}", h)) + .unwrap_or_default(); + out.push_str(&format!( + "- `<{}>` {}{}\n", + arg.get_id(), + req, + help + )); + } + for arg in &named { + let long = arg.get_long().unwrap(); + let req = if arg.is_required_set() { + " *(required)*" + } else { + "" + }; + let help = arg + .get_help() + .map(|h| format!(" — {}", h)) + .unwrap_or_default(); + if let Some(short) = arg.get_short() { + out.push_str(&format!("- `-{}`, `--{}`{}{}\n", short, long, req, help)); + } else { + out.push_str(&format!("- `--{}`{}{}\n", long, req, help)); + } + } + out.push('\n'); + } + + let child_subs: Vec<_> = sub + .get_subcommands() + .filter(|c| !c.is_hide_set()) + .collect(); + + if !child_subs.is_empty() { + append_command(out, sub, &full_path, depth + 1); + } + } +} + +const WORKFLOW_HEADER: &str = r#"--- +description: Use Tower to build, run, and deploy Python data apps, pipelines, and AI agents. Covers MCP tools, Towerfile setup, local development, cloud deployment, scheduling, and secrets management. +--- + +# Tower Skill + +Tower is a compute platform for Python data apps, pipelines, and AI agents. + +**The Tower CLI is not in AI training data — always use MCP tools when running inside an agent.** + +## Setup + +First, check if Tower is already installed and authenticated: + +```bash +tower teams list +``` + +If that works, skip to the workflow. Otherwise, install and log in. + +### Install + +Preferred — `uvx` runs Tower with no global install (requires `uv`): + +```bash +uvx tower login +``` + +If you don't have `uvx`, install with pip (Python ≥ 3.9): + +```bash +pip install tower +tower login +``` + +Or with nix: + +```bash +nix run nixpkgs#tower -- login +``` + +### MCP server + +The MCP server gives Claude structured access to Tower tools. If it's not already running (you'll see `tower_*` tools available), start it: + +```bash +uvx tower mcp-server # if using uvx +tower mcp-server # if installed via pip/nix +``` + +If you installed Tower via the Claude Code plugin, this is already configured. Otherwise, copy the `.mcp.json` from the [tower-cli repo](https://github.com/tower/tower-cli) into your project root. + +**If MCP tools are unavailable**, fall back to the CLI equivalents — every MCP tool has a direct CLI counterpart (e.g. `tower apps list`, `tower deploy`). + +## MCP-First, CLI as Fallback + +Use MCP tools when running inside an agent — they return structured data and are easier to compose. Fall back to the CLI for scripting or debugging outside an agent. + +MCP tool names mirror the CLI: `tower apps list` → `tower_apps_list`, `tower deploy` → `tower_deploy`. + +## WORKING_DIRECTORY Parameter + +All MCP tools accept an optional `working_directory` parameter. + +- Default: current working directory +- Use it when managing multiple projects or when the project isn't in the current directory + +``` +tower_file_generate({}) # current directory +tower_file_generate({"working_directory": "/path/to/app"}) # explicit path +tower_run_local({"working_directory": "../other-app"}) +``` + +## Workflow + +### 0. Python project (if new) + +```bash +uv init +``` + +Creates `pyproject.toml`, `main.py`, `README.md`. Keep `pyproject.toml` minimal — `[project]` metadata and dependencies only. No `[build-system]`, `[tool.hatchling]`, or similar. Skip if a `pyproject.toml` already exists. + +### 1. Towerfile + +``` +tower_file_generate → tower_file_update → tower_file_add/edit/remove_parameter → tower_file_validate +``` + +Always use `tower_file_update` or `tower_file_add/edit/remove_parameter` to modify. Never edit the TOML directly. + +### 2. Local development (preferred) + +``` +tower_run_local +``` + +Runs the app locally with access to Tower secrets. Use this to test before deploying. + +### 3. Cloud deployment + +``` +tower_apps_create → tower_deploy → tower_run_remote +``` + +Deploy pushes source code to Tower cloud — no build step needed. + +### 4. Scheduling (recurring jobs) + +``` +tower_schedules_create # set up cron-based recurring runs +tower_schedules_list # view existing schedules +tower_schedules_update # modify timing or parameters +tower_schedules_delete # remove a schedule +``` + +### 5. Management & monitoring + +``` +tower_apps_list # list all apps +tower_apps_show # details and recent runs +tower_apps_logs # logs from a specific run +tower_teams_list, tower_teams_switch # manage team context +tower_secrets_create, tower_secrets_list # store credentials and API keys +``` + +## Reminders + +- Tower deploys source code directly — no build tools needed +- Use Tower secrets for sensitive data (database credentials, API keys) +- Prefer `tower_run_local` during development — faster, and has secret access +- Always use MCP tools to modify Towerfiles (never edit TOML files manually)"#; diff --git a/skills/tower/SKILL.md b/skills/tower/SKILL.md new file mode 100644 index 00000000..d232d8c8 --- /dev/null +++ b/skills/tower/SKILL.md @@ -0,0 +1,369 @@ +--- +description: Use Tower to build, run, and deploy Python data apps, pipelines, and AI agents. Covers MCP tools, Towerfile setup, local development, cloud deployment, scheduling, and secrets management. +--- + +# Tower Skill + +Tower is a compute platform for Python data apps, pipelines, and AI agents. + +**The Tower CLI is not in AI training data — always use MCP tools when running inside an agent.** + +## Setup + +First, check if Tower is already installed and authenticated: + +```bash +tower teams list +``` + +If that works, skip to the workflow. Otherwise, install and log in. + +### Install + +Preferred — `uvx` runs Tower with no global install (requires `uv`): + +```bash +uvx tower login +``` + +If you don't have `uvx`, install with pip (Python ≥ 3.9): + +```bash +pip install tower +tower login +``` + +Or with nix: + +```bash +nix run nixpkgs#tower -- login +``` + +### MCP server + +The MCP server gives Claude structured access to Tower tools. If it's not already running (you'll see `tower_*` tools available), start it: + +```bash +uvx tower mcp-server # if using uvx +tower mcp-server # if installed via pip/nix +``` + +If you installed Tower via the Claude Code plugin, this is already configured. Otherwise, copy the `.mcp.json` from the [tower-cli repo](https://github.com/tower/tower-cli) into your project root. + +**If MCP tools are unavailable**, fall back to the CLI equivalents — every MCP tool has a direct CLI counterpart (e.g. `tower apps list`, `tower deploy`). + +## MCP-First, CLI as Fallback + +Use MCP tools when running inside an agent — they return structured data and are easier to compose. Fall back to the CLI for scripting or debugging outside an agent. + +MCP tool names mirror the CLI: `tower apps list` → `tower_apps_list`, `tower deploy` → `tower_deploy`. + +## WORKING_DIRECTORY Parameter + +All MCP tools accept an optional `working_directory` parameter. + +- Default: current working directory +- Use it when managing multiple projects or when the project isn't in the current directory + +``` +tower_file_generate({}) # current directory +tower_file_generate({"working_directory": "/path/to/app"}) # explicit path +tower_run_local({"working_directory": "../other-app"}) +``` + +## Workflow + +### 0. Python project (if new) + +```bash +uv init +``` + +Creates `pyproject.toml`, `main.py`, `README.md`. Keep `pyproject.toml` minimal — `[project]` metadata and dependencies only. No `[build-system]`, `[tool.hatchling]`, or similar. Skip if a `pyproject.toml` already exists. + +### 1. Towerfile + +``` +tower_file_generate → tower_file_update → tower_file_add/edit/remove_parameter → tower_file_validate +``` + +Always use `tower_file_update` or `tower_file_add/edit/remove_parameter` to modify. Never edit the TOML directly. + +### 2. Local development (preferred) + +``` +tower_run_local +``` + +Runs the app locally with access to Tower secrets. Use this to test before deploying. + +### 3. Cloud deployment + +``` +tower_apps_create → tower_deploy → tower_run_remote +``` + +Deploy pushes source code to Tower cloud — no build step needed. + +### 4. Scheduling (recurring jobs) + +``` +tower_schedules_create # set up cron-based recurring runs +tower_schedules_list # view existing schedules +tower_schedules_update # modify timing or parameters +tower_schedules_delete # remove a schedule +``` + +### 5. Management & monitoring + +``` +tower_apps_list # list all apps +tower_apps_show # details and recent runs +tower_apps_logs # logs from a specific run +tower_teams_list, tower_teams_switch # manage team context +tower_secrets_create, tower_secrets_list # store credentials and API keys +``` + +## Reminders + +- Tower deploys source code directly — no build tools needed +- Use Tower secrets for sensitive data (database credentials, API keys) +- Prefer `tower_run_local` during development — faster, and has secret access +- Always use MCP tools to modify Towerfiles (never edit TOML files manually) + +## Command Reference + +*This section is generated from the CLI's built-in help.* + +### `tower login` + +Create a session with Tower + +**Arguments:** + +- `-n`, `--no-browser` — Do not attempt to open the browser automatically + +### `tower apps` + +Manage the apps in your current Tower account + +#### `tower apps list` + +List all apps in your Tower account + +#### `tower apps show` + +Show details for a Tower app and its recent runs + +**Arguments:** + +- `` *(required)* — Name of the app + +#### `tower apps logs` + +Get the logs from a previous Tower app run + +**Arguments:** + +- `` *(required)* — app_name#run_number +- `` +- `-f`, `--follow` — Follow logs in real time + +#### `tower apps create` + +Create a new app in Tower + +**Arguments:** + +- `-n`, `--name` *(required)* +- `--description` + +#### `tower apps delete` + +Delete an app in Tower + +**Arguments:** + +- `` *(required)* — Name of the app + +#### `tower apps cancel` + +Cancel a running app run + +**Arguments:** + +- `` *(required)* — Name of the app +- `` *(required)* — Run number to cancel + +### `tower catalogs` + +Interact with the catalogs in your Tower account + +#### `tower catalogs list` + +List all of your catalogs + +**Arguments:** + +- `-e`, `--environment` — List catalogs in this environment +- `-a`, `--all` — List catalogs across all environments + +#### `tower catalogs show` + +Show the details of a catalog, including its property names + +**Arguments:** + +- `` *(required)* — Name of the catalog +- `-e`, `--environment` — Environment the catalog belongs to + +### `tower schedules` + +Manage schedules for your Tower apps + +#### `tower schedules list` + +List all schedules + +**Arguments:** + +- `-a`, `--app` — Filter schedules by app name +- `-e`, `--environment` — Filter schedules by environment + +#### `tower schedules create` + +Create a new schedule for an app + +**Arguments:** + +- `-a`, `--app` *(required)* — The name of the app to schedule +- `-e`, `--environment` — The environment to run the app in +- `-c`, `--cron` *(required)* — The cron expression defining when the app should run +- `-p`, `--parameter` — Parameters (key=value) to pass to the app + +#### `tower schedules delete` + +Delete a schedule + +**Arguments:** + +- `` *(required)* — The schedule ID to delete + +#### `tower schedules update` + +Update an existing schedule + +**Arguments:** + +- `` *(required)* — ID or name of the schedule to update +- `-c`, `--cron` — The cron expression defining when the app should run +- `-p`, `--parameter` — Parameters (key=value) to pass to the app + +### `tower secrets` + +Interact with the secrets in your Tower account + +#### `tower secrets list` + +List secrets in your Tower account + +**Arguments:** + +- `-s`, `--show` — Show secrets in plain text +- `-e`, `--environment` — List secrets in this environment +- `-a`, `--all` — List secrets across all environments + +#### `tower secrets create` + +Create a new secret in your Tower account + +**Arguments:** + +- `-n`, `--name` *(required)* — Secret name to create +- `-e`, `--environment` — Environment to store the secret in +- `-v`, `--value` *(required)* — Secret value to store + +#### `tower secrets delete` + +Delete a secret in Tower + +**Arguments:** + +- `` *(required)* — secret name, or environment/secret_name +- `-e`, `--environment` — environment to delete the secret from + +### `tower environments` + +Manage the environments in your current Tower account + +#### `tower environments list` + +List all of your environments + +#### `tower environments create` + +Create a new environment in Tower + +**Arguments:** + +- `-n`, `--name` *(required)* + +### `tower deploy` + +Deploy your latest code to Tower + +**Arguments:** + +- `-d`, `--dir` — The directory containing the app to deploy +- `-f`, `--create` — Automatically force creation of the app if it doesn't already exist + +### `tower run` + +Run your code in Tower or locally + +**Arguments:** + +- `` — Name of a deployed app to run (uses ./Towerfile if omitted) +- `--dir` — The directory containing the Towerfile +- `--local` — Run this app locally +- `-e`, `--environment` — The environment to invoke the app in +- `-p`, `--parameter` — Parameters (key=value) to pass to the app +- `-d`, `--detached` — Don't follow the run output in your CLI + +### `tower version` + +Print the current version of Tower + +### `tower teams` + +View information about team membership and switch between teams + +#### `tower teams list` + +List teams you belong to + +#### `tower teams switch` + +Switch context to a different team + +**Arguments:** + +- `` *(required)* — Name of the team to switch to + +### `tower mcp-server` + +Runs an MCP server for LLM interaction + +**Arguments:** + +- `-t`, `--transport` — Transport mode +- `-p`, `--port` — Port for HTTP/SSE server (default: 34567) + +### `tower skill` + +Generate Claude Code skill files for AI agent integration + +#### `tower skill generate` + +Generate a SKILL.md describing how to use Tower with AI agents + From 49f0d52c4ee1d3d1be32e3b81c1c88d188505e35 Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Mon, 20 Apr 2026 23:28:37 +0100 Subject: [PATCH 2/7] feat: multi-environment support for tower deploy (#248) * feat: add --environment and --all flags to tower deploy command Support deploying to specific environments or all environments at once: - `tower deploy` (default, unchanged behavior) - `tower deploy --environment=production` / `tower deploy -e staging` - `tower deploy --all` The flags are mutually exclusive. The environment is passed as a query parameter on the deploy API endpoint. * Updates to support deployment across all deployments * chore: Remove the default deploy target * chore: Encode environment name before sending it --- crates/tower-cmd/src/deploy.rs | 165 +++++++++++++++++++++++++++- crates/tower-cmd/src/mcp.rs | 3 +- crates/tower-cmd/src/util/deploy.rs | 12 +- 3 files changed, 174 insertions(+), 6 deletions(-) diff --git a/crates/tower-cmd/src/deploy.rs b/crates/tower-cmd/src/deploy.rs index 15baadf7..433cfe30 100644 --- a/crates/tower-cmd/src/deploy.rs +++ b/crates/tower-cmd/src/deploy.rs @@ -24,6 +24,20 @@ pub fn deploy_cmd() -> Command { .help("Automatically force creation of the app if it doesn't already exist") .action(clap::ArgAction::SetTrue), ) + .arg( + Arg::new("environment") + .long("environment") + .short('e') + .help("The environment to deploy to") + .conflicts_with("all"), + ) + .arg( + Arg::new("all") + .long("all") + .help("Deploy to all environments") + .action(clap::ArgAction::SetTrue) + .conflicts_with("environment"), + ) .about("Deploy your latest code to Tower") } @@ -35,10 +49,30 @@ fn resolve_path(args: &ArgMatches) -> PathBuf { } } +/// Resolves the target environment from CLI args. +/// +/// - `--all` → `DeployTarget::All` +/// - `--environment ` → `DeployTarget::Environment(name)` +/// - neither → `DeployTarget::Default` +#[derive(Debug, Clone)] +pub enum DeployTarget { + Environment(String), + All, +} + pub async fn do_deploy(config: Config, args: &ArgMatches) { let dir = resolve_path(args); let create_app = args.get_flag("create"); - if let Err(err) = deploy_from_dir(config, dir, create_app).await { + + let target = if args.get_flag("all") { + DeployTarget::All + } else if let Some(env) = args.get_one::("environment") { + DeployTarget::Environment(env.clone()) + } else { + DeployTarget::Environment("default".to_string()) + }; + + if let Err(err) = deploy_from_dir(config, dir, create_app, target).await { match err { crate::Error::ApiDeployError { source } => { output::tower_error_and_die(source, "Deploying app failed") @@ -66,6 +100,7 @@ pub async fn deploy_from_dir( config: Config, dir: PathBuf, create_app: bool, + target: DeployTarget, ) -> Result<(), crate::Error> { debug!("Building package from directory: {:?}", dir); @@ -96,23 +131,145 @@ pub async fn deploy_from_dir( }; spinner.success(); - do_deploy_package(api_config, package, &towerfile).await + do_deploy_package(api_config, package, &towerfile, target).await } async fn do_deploy_package( api_config: Configuration, package: Package, towerfile: &Towerfile, + target: DeployTarget, ) -> Result<(), crate::Error> { - let res = util::deploy::deploy_app_package(&api_config, &towerfile.app.name, package).await; + let (environment, all_environments) = match &target { + DeployTarget::All => (None, true), + DeployTarget::Environment(env) => (Some(env.as_str()), false), + }; + + let res = util::deploy::deploy_app_package( + &api_config, + &towerfile.app.name, + package, + environment, + all_environments, + ) + .await; match res { Ok(resp) => { let version = resp.version; - let line = format!("Version `{}` has been deployed to Tower!", version.version); + let line = match &target { + DeployTarget::All => format!( + "Version `{}` has been deployed to all environments!", + version.version + ), + DeployTarget::Environment(env) => format!( + "Version `{}` has been deployed to environment '{}'!", + version.version, env + ), + }; output::success(&line); Ok(()) } Err(err) => Err(crate::Error::ApiDeployError { source: err }), } } + +#[cfg(test)] +mod tests { + use super::deploy_cmd; + + fn parse(args: &[&str]) -> Result { + let mut full = vec!["deploy"]; + full.extend_from_slice(args); + deploy_cmd().try_get_matches_from(full) + } + + #[test] + fn no_args_uses_defaults() { + let m = parse(&[]).unwrap(); + assert_eq!(m.get_one::("environment"), None); + assert!(!m.get_flag("all")); + } + + #[test] + fn environment_flag_long() { + let m = parse(&["--environment", "production"]).unwrap(); + assert_eq!( + m.get_one::("environment").map(|s| s.as_str()), + Some("production") + ); + } + + #[test] + fn environment_flag_short() { + let m = parse(&["-e", "staging"]).unwrap(); + assert_eq!( + m.get_one::("environment").map(|s| s.as_str()), + Some("staging") + ); + } + + #[test] + fn environment_flag_equals_syntax() { + let m = parse(&["--environment=production"]).unwrap(); + assert_eq!( + m.get_one::("environment").map(|s| s.as_str()), + Some("production") + ); + } + + #[test] + fn all_flag() { + let m = parse(&["--all"]).unwrap(); + assert!(m.get_flag("all")); + assert_eq!(m.get_one::("environment"), None); + } + + #[test] + fn environment_and_all_conflict() { + let err = parse(&["--environment", "production", "--all"]).unwrap_err(); + assert_eq!(err.kind(), clap::error::ErrorKind::ArgumentConflict); + } + + #[test] + fn all_and_environment_conflict() { + let err = parse(&["--all", "--environment", "staging"]).unwrap_err(); + assert_eq!(err.kind(), clap::error::ErrorKind::ArgumentConflict); + } + + #[test] + fn create_flag_with_environment() { + let m = parse(&["--create", "--environment", "production"]).unwrap(); + assert!(m.get_flag("create")); + assert_eq!( + m.get_one::("environment").map(|s| s.as_str()), + Some("production") + ); + } + + #[test] + fn create_flag_with_all() { + let m = parse(&["--create", "--all"]).unwrap(); + assert!(m.get_flag("create")); + assert!(m.get_flag("all")); + } + + #[test] + fn dir_with_environment() { + let m = parse(&["-d", "/tmp/myapp", "-e", "production"]).unwrap(); + assert_eq!( + m.get_one::("dir").map(|s| s.as_str()), + Some("/tmp/myapp") + ); + assert_eq!( + m.get_one::("environment").map(|s| s.as_str()), + Some("production") + ); + } + + #[test] + fn help_flag_shows_help() { + let err = parse(&["--help"]).unwrap_err(); + assert_eq!(err.kind(), clap::error::ErrorKind::DisplayHelp); + } +} diff --git a/crates/tower-cmd/src/mcp.rs b/crates/tower-cmd/src/mcp.rs index 1c140caf..d46ba454 100644 --- a/crates/tower-cmd/src/mcp.rs +++ b/crates/tower-cmd/src/mcp.rs @@ -712,8 +712,9 @@ impl TowerService { Parameters(request): Parameters, ) -> Result { let working_dir = Self::resolve_working_directory(&request.common); + let deploy_target = deploy::DeployTarget::Environment("default".to_string()); - match deploy::deploy_from_dir(self.config.clone(), working_dir, true).await { + match deploy::deploy_from_dir(self.config.clone(), working_dir, true, deploy_target).await { Ok(_) => Self::text_success("Deploy completed successfully".to_string()), Err(e) => Self::error_result("Deploy failed", e), } diff --git a/crates/tower-cmd/src/util/deploy.rs b/crates/tower-cmd/src/util/deploy.rs index ad995887..62e01184 100644 --- a/crates/tower-cmd/src/util/deploy.rs +++ b/crates/tower-cmd/src/util/deploy.rs @@ -8,6 +8,7 @@ use tower_package::{compute_sha256_file, Package}; use tower_telemetry::debug; use tower_api::apis::configuration::Configuration; +use tower_api::apis::urlencode; use tower_api::apis::default_api::DeployAppError; use tower_api::apis::Error; use tower_api::apis::ResponseContent; @@ -96,6 +97,8 @@ pub async fn deploy_app_package( api_config: &tower_api::apis::configuration::Configuration, app_name: &str, package: Package, + environment: Option<&str>, + all_environments: bool, ) -> Result> { let progress_bar = Arc::new(Mutex::new(output::progress_bar("Deploying to Tower..."))); @@ -116,7 +119,14 @@ pub async fn deploy_app_package( // Create the URL for the API endpoint let base_url = &api_config.base_path; - let url = format!("{}/apps/{}/deploy", base_url, app_name); + let url = if all_environments { + format!("{}/apps/{}/deploy?all_environments=true", base_url, app_name) + } else if let Some(env) = environment { + let encoded_environment = urlencode(env); + format!("{}/apps/{}/deploy?environment={}", base_url, app_name, encoded_environment) + } else { + format!("{}/apps/{}/deploy", base_url, app_name) + }; // Upload the package let response = upload_file_with_progress( From 46474f5e45dcd19b786166629b727b1f5a90a48b Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Tue, 21 Apr 2026 09:18:03 +0100 Subject: [PATCH 3/7] chore: Docs bug from CodeRabbit --- crates/tower-cmd/src/deploy.rs | 1 - crates/tower-cmd/src/util/deploy.rs | 8 +++++--- skills/tower/SKILL.md | 2 ++ 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/crates/tower-cmd/src/deploy.rs b/crates/tower-cmd/src/deploy.rs index 433cfe30..d0eca3df 100644 --- a/crates/tower-cmd/src/deploy.rs +++ b/crates/tower-cmd/src/deploy.rs @@ -53,7 +53,6 @@ fn resolve_path(args: &ArgMatches) -> PathBuf { /// /// - `--all` → `DeployTarget::All` /// - `--environment ` → `DeployTarget::Environment(name)` -/// - neither → `DeployTarget::Default` #[derive(Debug, Clone)] pub enum DeployTarget { Environment(String), diff --git a/crates/tower-cmd/src/util/deploy.rs b/crates/tower-cmd/src/util/deploy.rs index 62e01184..d38f75d3 100644 --- a/crates/tower-cmd/src/util/deploy.rs +++ b/crates/tower-cmd/src/util/deploy.rs @@ -119,13 +119,15 @@ pub async fn deploy_app_package( // Create the URL for the API endpoint let base_url = &api_config.base_path; + let encoded_app_name = urlencode(app_name); + let url = if all_environments { - format!("{}/apps/{}/deploy?all_environments=true", base_url, app_name) + format!("{}/apps/{}/deploy?all_environments=true", base_url, encoded_app_name) } else if let Some(env) = environment { let encoded_environment = urlencode(env); - format!("{}/apps/{}/deploy?environment={}", base_url, app_name, encoded_environment) + format!("{}/apps/{}/deploy?environment={}", base_url, encoded_app_name, encoded_environment) } else { - format!("{}/apps/{}/deploy", base_url, app_name) + format!("{}/apps/{}/deploy", base_url, encoded_app_name) }; // Upload the package diff --git a/skills/tower/SKILL.md b/skills/tower/SKILL.md index d232d8c8..2db92717 100644 --- a/skills/tower/SKILL.md +++ b/skills/tower/SKILL.md @@ -315,6 +315,8 @@ Deploy your latest code to Tower **Arguments:** - `-d`, `--dir` — The directory containing the app to deploy +- `-a`, `--all` - Deploy this app to all environments. You can only specify `-a` or `-e`, not both. +- `-e`, `--environment` — The environment to deploy this app to. You can only specifiy `-a` or `-e`, not both. - `-f`, `--create` — Automatically force creation of the app if it doesn't already exist ### `tower run` From 922ed265b500e0c141bc1af26a588df590b76196 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 21 Apr 2026 11:56:27 +0100 Subject: [PATCH 4/7] chore: regenerate SKILL.md (#256) Co-authored-by: bradhe <310958+bradhe@users.noreply.github.com> --- skills/tower/SKILL.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/skills/tower/SKILL.md b/skills/tower/SKILL.md index 2db92717..5a3a0176 100644 --- a/skills/tower/SKILL.md +++ b/skills/tower/SKILL.md @@ -315,9 +315,9 @@ Deploy your latest code to Tower **Arguments:** - `-d`, `--dir` — The directory containing the app to deploy -- `-a`, `--all` - Deploy this app to all environments. You can only specify `-a` or `-e`, not both. -- `-e`, `--environment` — The environment to deploy this app to. You can only specifiy `-a` or `-e`, not both. - `-f`, `--create` — Automatically force creation of the app if it doesn't already exist +- `-e`, `--environment` — The environment to deploy to +- `--all` — Deploy to all environments ### `tower run` From 315e18655c0a79a0693bf26b74eb4606b48e6bf2 Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Tue, 21 Apr 2026 13:15:34 +0100 Subject: [PATCH 5/7] Bump version to v0.3.59 --- Cargo.lock | 22 +++++++++++----------- Cargo.toml | 2 +- pyproject.toml | 2 +- uv.lock | 2 +- 4 files changed, 14 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cb7ec09a..738500eb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -491,7 +491,7 @@ dependencies = [ [[package]] name = "config" -version = "0.3.58" +version = "0.3.59" dependencies = [ "base64", "chrono", @@ -598,7 +598,7 @@ checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crypto" -version = "0.3.58" +version = "0.3.59" dependencies = [ "aes-gcm", "base64", @@ -3316,7 +3316,7 @@ dependencies = [ [[package]] name = "testutils" -version = "0.3.58" +version = "0.3.59" dependencies = [ "pem", "rsa", @@ -3586,7 +3586,7 @@ checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" [[package]] name = "tower" -version = "0.3.58" +version = "0.3.59" dependencies = [ "config", "pyo3", @@ -3614,7 +3614,7 @@ dependencies = [ [[package]] name = "tower-api" -version = "0.3.58" +version = "0.3.59" dependencies = [ "reqwest", "serde", @@ -3626,7 +3626,7 @@ dependencies = [ [[package]] name = "tower-cmd" -version = "0.3.58" +version = "0.3.59" dependencies = [ "axum", "bytes", @@ -3696,7 +3696,7 @@ checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-package" -version = "0.3.58" +version = "0.3.59" dependencies = [ "async-compression", "config", @@ -3714,7 +3714,7 @@ dependencies = [ [[package]] name = "tower-runtime" -version = "0.3.58" +version = "0.3.59" dependencies = [ "async-trait", "chrono", @@ -3737,7 +3737,7 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tower-telemetry" -version = "0.3.58" +version = "0.3.59" dependencies = [ "tracing", "tracing-appender", @@ -3746,7 +3746,7 @@ dependencies = [ [[package]] name = "tower-uv" -version = "0.3.58" +version = "0.3.59" dependencies = [ "async-compression", "async_zip", @@ -3764,7 +3764,7 @@ dependencies = [ [[package]] name = "tower-version" -version = "0.3.58" +version = "0.3.59" dependencies = [ "anyhow", "chrono", diff --git a/Cargo.toml b/Cargo.toml index 3465a8b1..e032d666 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ resolver = "2" [workspace.package] edition = "2021" -version = "0.3.58" +version = "0.3.59" description = "Tower is the best way to host Python data apps in production" rust-version = "1.81" authors = ["Brad Heller "] diff --git a/pyproject.toml b/pyproject.toml index 8e7dae54..7372df97 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "maturin" [project] name = "tower" -version = "0.3.58" +version = "0.3.59" description = "Tower CLI and runtime environment for Tower." authors = [{ name = "Tower Computing Inc.", email = "brad@tower.dev" }] readme = "README.md" diff --git a/uv.lock b/uv.lock index b59ac1fa..3040e62e 100644 --- a/uv.lock +++ b/uv.lock @@ -2598,7 +2598,7 @@ wheels = [ [[package]] name = "tower" -version = "0.3.58" +version = "0.3.59" source = { editable = "." } dependencies = [ { name = "attrs" }, From 4535c1048ae6683fb456bebc5847a0bea0df108c Mon Sep 17 00:00:00 2001 From: Ben Lovell Date: Wed, 22 Apr 2026 14:48:48 +0200 Subject: [PATCH 6/7] refactor(tower-package): extract pure core crate for bundle building (#257) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor(tower-package): extract pure core crate for bundle building Splits tower-package into a pure tower-package-core crate (no tokio, fs, or glob) and a native shell. The core exposes build_package(PackageInputs) which produces a gzipped tar from in-memory bytes, enabling future wasm32 targets. The native crate handles filesystem walking, globbing, and canonicalization, then delegates to the core. Output is now byte-deterministic: entries are sorted by archive name, tar header mtime/uid/gid are zeroed with mode 0644, and the gzip header drops its embedded mtime. The bundle format (ustar+gzip, app/modules layout, MANIFEST, Towerfile) and checksum algorithm are unchanged, so existing server-side unpacking continues to work. * feat(tower-package-wasm): WASM bindings for bundle builder New crate exposing tower_package_core::build_package to TypeScript via wasm-bindgen. The crate produces an npm-publishable package with typed bindings (Uint8Array in, Uint8Array out). Output matches what the native tower-package produces, so bundles built in the browser or Node deploy through the server's existing unpack path without changes. The flake devshell now carries wasm-pack, wasm-bindgen-cli, and binaryen, and the rust toolchain picks up the wasm32-unknown-unknown target. Build the package with crates/tower-package-wasm/scripts/build.sh, optionally passing bundler (default), web, or nodejs. * test(tower-package-wasm): TypeScript test suite + CI workflow Node-based test suite using tsx and node:test. Covers the output shape, byte-determinism, sort order, MANIFEST contents, module file flow, and checksum divergence on different inputs. Parses the tar stream inline so there are no npm deps beyond tsx and type declarations. The workflow builds the nodejs-target wasm package and runs the tests on every PR that touches tower-package-core, tower-package-wasm, or the workspace manifest. * refactor(tower-package): collapse core/native/wasm into a single crate Folds tower-package-core and tower-package-wasm back into tower-package, using cargo features instead of separate crates to split the shells. Default features remain compatible with existing Rust callers: cargo build on a workspace member still gets tokio, glob, Package::build, and everything else it used to. - native feature (default): tokio, glob, tmpdir, Package, PackageSpec, FileResolver — the CLI's usual path. - wasm feature: wasm-bindgen + serde-wasm-bindgen + serde_bytes, exposes buildBundle to JavaScript. - Pure core (Entry, Manifest, build_package, sorting, hashing) is always compiled under both shells. Build the wasm package with crates/tower-package/scripts/build.sh, which invokes wasm-pack with --no-default-features --features wasm and renames the npm package to tower-package-wasm so the crate and npm names can diverge. Ten native tests and eight TypeScript tests pass. * ci(tower-package): publish tower-package-wasm to npm on release Mirrors the publish-pypi subworkflow: on every release tag, cargo-dist's release.yml calls a new publish-npm.yml that builds the bundler-target wasm package and runs npm publish. Version tracks Cargo.toml, so the npm package stays in lockstep with the Rust crate. First cut publishes the wasm package only; the consumer brings their own bundler. * ci(tower-package): use npm trusted publishing instead of a token Drops NPM_TOKEN in favour of OIDC; no long-lived secret to rotate. Requires a trusted publisher to be configured on the package at npmjs.com matching this repo, this workflow filename, and the release environment. Bumps node to 22 and pulls npm@latest to ensure the CLI supports OIDC publishing. * chore: add Ben Lovell as workspace author * refactor(tower-package): simplify code in core, native, and wasm tests * fix(tower-package): restore comment; tighten publish-npm workflow header * chore: bump nixpkgs input * refactor(tower-package): rename bundle to package and drop schedule * refactor(tower-package): derive manifest from Towerfile bytes * refactor(tower-package): own the Towerfile schema Move Towerfile/App/Parameter from config into a new tower-package::towerfile module and flip the crate dependency — config now re-exports them so existing `use config::Towerfile` call sites keep working. core::build_package parses the bytes directly through Towerfile::from_toml, retiring the private TowerfileSpec shadow struct and unifying Parameter (description: String) across authoring and manifest views. * fix(tower-package): unused Path import on wasm; drop obsolete schedule test Gate the Path import out — it's only needed inside save() under the native feature. Also delete test_manifest_contains_schedule; schedule was removed from the manifest in f625834. --- .github/workflows/publish-npm.yml | 48 ++ .github/workflows/release.yml | 10 + .github/workflows/test-wasm.yml | 52 ++ Cargo.lock | 42 +- Cargo.toml | 4 +- crates/config/Cargo.toml | 2 +- crates/config/src/error.rs | 24 - crates/config/src/lib.rs | 3 +- crates/tower-cmd/src/deploy.rs | 10 +- crates/tower-cmd/src/error.rs | 14 +- crates/tower-cmd/src/output.rs | 19 +- crates/tower-cmd/src/package.rs | 2 +- crates/tower-cmd/src/run.rs | 2 +- crates/tower-cmd/src/util/deploy.rs | 4 +- crates/tower-package/.gitignore | 1 + crates/tower-package/Cargo.toml | 47 +- crates/tower-package/README.md | 72 ++ crates/tower-package/scripts/build.sh | 31 + crates/tower-package/src/core.rs | 309 +++++++ crates/tower-package/src/error.rs | 32 + crates/tower-package/src/lib.rs | 764 +----------------- crates/tower-package/src/native.rs | 471 +++++++++++ .../src/towerfile.rs | 75 +- crates/tower-package/src/wasm.rs | 58 ++ crates/tower-package/test/.gitignore | 2 + crates/tower-package/test/build.test.ts | 139 ++++ crates/tower-package/test/package.json | 13 + crates/tower-package/test/tsconfig.json | 13 + crates/tower-package/tests/package_test.rs | 109 +-- crates/tower-package/types.d.ts | 23 + flake.lock | 18 +- flake.nix | 7 +- tests/tower/test_build_package.py | 17 - 33 files changed, 1498 insertions(+), 939 deletions(-) create mode 100644 .github/workflows/publish-npm.yml create mode 100644 .github/workflows/test-wasm.yml create mode 100644 crates/tower-package/.gitignore create mode 100644 crates/tower-package/README.md create mode 100755 crates/tower-package/scripts/build.sh create mode 100644 crates/tower-package/src/core.rs create mode 100644 crates/tower-package/src/native.rs rename crates/{config => tower-package}/src/towerfile.rs (91%) create mode 100644 crates/tower-package/src/wasm.rs create mode 100644 crates/tower-package/test/.gitignore create mode 100644 crates/tower-package/test/build.test.ts create mode 100644 crates/tower-package/test/package.json create mode 100644 crates/tower-package/test/tsconfig.json create mode 100644 crates/tower-package/types.d.ts diff --git a/.github/workflows/publish-npm.yml b/.github/workflows/publish-npm.yml new file mode 100644 index 00000000..9661ac41 --- /dev/null +++ b/.github/workflows/publish-npm.yml @@ -0,0 +1,48 @@ +# Publish tower-package-wasm to npm. +# Uses OIDC Trusted Publishing — no NPM_TOKEN needed, but the package must have a trusted publisher +# configured on npmjs.com matching this repo, this workflow file, and the `release` environment. +name: "[tower] Publish to npm" + +on: + workflow_call: + inputs: + plan: + required: true + type: string + +jobs: + npm-publish: + name: Upload to npm + runs-on: ubuntu-latest + environment: + name: release + permissions: + contents: read + id-token: write + steps: + - uses: actions/checkout@v6 + + - name: Set up Rust + uses: actions-rust-lang/setup-rust-toolchain@v1.11.0 + with: + target: wasm32-unknown-unknown + + - name: Install wasm-pack + uses: jetli/wasm-pack-action@v0.4.0 + + - name: Set up Node + uses: actions/setup-node@v4 + with: + node-version: 22 + registry-url: 'https://registry.npmjs.org' + + - name: Upgrade npm for Trusted Publishing + run: npm install -g npm@latest + + - name: Build wasm package + working-directory: crates/tower-package + run: ./scripts/build.sh bundler + + - name: Publish to npm + working-directory: crates/tower-package/pkg + run: npm publish --access public --provenance diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6fada1d9..15429ade 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -302,6 +302,16 @@ jobs: "id-token": "write" "packages": "write" + custom-publish-npm: + needs: + - plan + - host + if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }} + uses: ./.github/workflows/publish-npm.yml + with: + plan: ${{ needs.plan.outputs.val }} + secrets: inherit + announce: needs: - plan diff --git a/.github/workflows/test-wasm.yml b/.github/workflows/test-wasm.yml new file mode 100644 index 00000000..9b8813d0 --- /dev/null +++ b/.github/workflows/test-wasm.yml @@ -0,0 +1,52 @@ +name: "[tower] Test wasm" + +on: + pull_request: + paths: + - 'crates/tower-package/**' + - '.github/workflows/test-wasm.yml' + - 'Cargo.toml' + - 'Cargo.lock' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + RUST_BACKTRACE: 1 + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + + - name: Set up Rust + uses: actions-rust-lang/setup-rust-toolchain@v1.11.0 + with: + target: wasm32-unknown-unknown + + - name: Cache Rust + uses: Swatinem/rust-cache@v2 + with: + save-if: ${{ github.ref_name == 'main' }} + + - name: Install wasm-pack + uses: jetli/wasm-pack-action@v0.4.0 + + - name: Build wasm package + working-directory: crates/tower-package + run: ./scripts/build.sh nodejs + + - name: Set up Node + uses: actions/setup-node@v4 + with: + node-version: 20 + + - name: Install test deps + working-directory: crates/tower-package/test + run: npm install + + - name: Run tests + working-directory: crates/tower-package/test + run: npm test diff --git a/Cargo.lock b/Cargo.lock index 738500eb..21f48ccf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -503,8 +503,8 @@ dependencies = [ "snafu", "testutils", "tokio", - "toml", "tower-api", + "tower-package", "tower-telemetry", "url", ] @@ -2882,6 +2882,27 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-wasm-bindgen" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8302e169f0eddcc139c70f139d19d6467353af16f9fce27e8c30158036a1e16b" +dependencies = [ + "js-sys", + "serde", + "wasm-bindgen", +] + +[[package]] +name = "serde_bytes" +version = "0.11.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5d440709e79d88e51ac01c4b72fc6cb7314017bb7da9eeff678aa94c10e3ea8" +dependencies = [ + "serde", + "serde_core", +] + [[package]] name = "serde_core" version = "1.0.228" @@ -3276,6 +3297,17 @@ dependencies = [ "syn 2.0.104", ] +[[package]] +name = "tar" +version = "0.4.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22692a6476a21fa75fdfc11d452fda482af402c008cdbaf3476414e122040973" +dependencies = [ + "filetime", + "libc", + "xattr", +] + [[package]] name = "target-lexicon" version = "0.13.4" @@ -3699,17 +3731,23 @@ name = "tower-package" version = "0.3.59" dependencies = [ "async-compression", - "config", + "flate2", "glob", "serde", + "serde-wasm-bindgen", + "serde_bytes", "serde_json", "sha2", "snafu", + "tar", + "testutils", "tmpdir", "tokio", "tokio-stream", "tokio-tar", + "toml", "tower-telemetry", + "wasm-bindgen", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index e032d666..e8f3fe2f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" version = "0.3.59" description = "Tower is the best way to host Python data apps in production" rust-version = "1.81" -authors = ["Brad Heller "] +authors = ["Brad Heller ", "Ben Lovell "] license = "MIT" repository = "https://github.com/tower/tower-cli" @@ -28,6 +28,7 @@ config = { path = "crates/config" } crypto = { path = "crates/crypto" } ctrlc = "3" dirs = "5" +flate2 = "1" fs2 = "0.4" futures = "0.3" futures-util = "0.3" @@ -51,6 +52,7 @@ serde = "1" serde_json = "1.0" sha2 = "0.10" snafu = "0.7" +tar = "0.4" spinners = "4" testutils = { path = "crates/testutils" } tmpdir = "1.0" diff --git a/crates/config/Cargo.toml b/crates/config/Cargo.toml index 39ec33a7..3cf75e69 100644 --- a/crates/config/Cargo.toml +++ b/crates/config/Cargo.toml @@ -16,8 +16,8 @@ serde = { workspace = true } serde_json = { workspace = true } snafu = { workspace = true } tokio = { workspace = true } -toml = { workspace = true } testutils = { workspace = true } url = { workspace = true } tower-api = { workspace = true } +tower-package = { workspace = true } tower-telemetry = { workspace = true } diff --git a/crates/config/src/error.rs b/crates/config/src/error.rs index 53589af2..e2fbbcc0 100644 --- a/crates/config/src/error.rs +++ b/crates/config/src/error.rs @@ -1,5 +1,4 @@ use snafu::prelude::*; -use tower_telemetry::debug; #[derive(Debug, Snafu)] pub enum Error { @@ -15,15 +14,6 @@ pub enum Error { #[snafu(display("No session file found"))] NoSession, - #[snafu(display("Invalid Towerfile"))] - InvalidTowerfile, - - #[snafu(display("No Towerfile was found in this directory"))] - MissingTowerfile, - - #[snafu(display("Missing required app field `{}` in Towerfile", field))] - MissingRequiredAppField { field: String }, - #[snafu(display("Team with name {} not found!", team_name))] TeamNotFound { team_name: String }, @@ -46,17 +36,3 @@ impl From for Error { Error::NoSession } } - -impl From for Error { - fn from(err: toml::de::Error) -> Self { - debug!("error parsing Towerfile TOMl: {}", err); - Error::InvalidTowerfile - } -} - -impl From for Error { - fn from(err: toml::ser::Error) -> Self { - debug!("error serializing Towerfile TOML: {}", err); - Error::InvalidTowerfile - } -} diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index ca01b55f..cca17e88 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -5,11 +5,10 @@ use url::Url; mod error; mod session; -mod towerfile; pub use error::Error; pub use session::{default_tower_url, Session, Team, Token, User}; -pub use towerfile::{Parameter, Towerfile}; +pub use tower_package::{Parameter, Towerfile}; pub use session::{get_last_version_check_timestamp, set_last_version_check_timestamp}; diff --git a/crates/tower-cmd/src/deploy.rs b/crates/tower-cmd/src/deploy.rs index d0eca3df..069c1ca9 100644 --- a/crates/tower-cmd/src/deploy.rs +++ b/crates/tower-cmd/src/deploy.rs @@ -87,7 +87,7 @@ pub async fn do_deploy(config: Config, args: &ArgMatches) { std::process::exit(1); } crate::Error::TowerfileLoadFailed { source, .. } => { - output::config_error(source); + output::package_error(source); std::process::exit(1); } _ => output::die(&err.to_string()), @@ -105,7 +105,13 @@ pub async fn deploy_from_dir( let path = dir.join("Towerfile"); - let towerfile = Towerfile::from_path(path)?; + let path_display = path.display().to_string(); + let towerfile = Towerfile::from_path(path).map_err(|source| { + crate::Error::TowerfileLoadFailed { + path: path_display, + source, + } + })?; let api_config = config.into(); // Add app existence check before proceeding diff --git a/crates/tower-cmd/src/error.rs b/crates/tower-cmd/src/error.rs index 6fbfc8e1..94dfaff7 100644 --- a/crates/tower-cmd/src/error.rs +++ b/crates/tower-cmd/src/error.rs @@ -48,7 +48,10 @@ pub enum Error { ApiError, #[snafu(display("Failed to load Towerfile from {}: {}", path, source))] - TowerfileLoadFailed { path: String, source: config::Error }, + TowerfileLoadFailed { + path: String, + source: tower_package::Error, + }, // Towerfile generation errors #[snafu(display("pyproject.toml not found at {}", path))] @@ -143,15 +146,6 @@ impl From for Error { } } -impl From for Error { - fn from(source: config::Error) -> Self { - Self::TowerfileLoadFailed { - path: "unknown".to_string(), - source, - } - } -} - impl From for Error { fn from(source: std::net::AddrParseError) -> Self { Self::AddressParseError { source } diff --git a/crates/tower-cmd/src/output.rs b/crates/tower-cmd/src/output.rs index eb324ba6..1fa99fc4 100644 --- a/crates/tower-cmd/src/output.rs +++ b/crates/tower-cmd/src/output.rs @@ -167,6 +167,16 @@ pub fn package_error(err: tower_package::Error) { tower_package::Error::InvalidGlob { message } => { format!("Invalid file glob pattern: {}", message) } + tower_package::Error::InvalidTowerfile { message } => { + format!("Invalid Towerfile: {}", message) + } + tower_package::Error::MissingTowerfile => { + "No Towerfile was found in the target directory".to_string() + } + tower_package::Error::MissingRequiredAppField { field } => { + format!("Missing required app field `{}` in Towerfile", field) + } + tower_package::Error::Io { source } => format!("IO error: {}", source), }; let line = format!("{} {}\n", "Package error:".red(), msg); @@ -207,15 +217,6 @@ pub fn config_error(err: config::Error) { config::Error::NoHomeDir => "No home directory found".to_string(), config::Error::Io { ref source } => format!("IO error: {}", source), config::Error::NoSession => "No session".to_string(), - config::Error::InvalidTowerfile => { - "Couldn't read the Towerfile in this directory".to_string() - } - config::Error::MissingTowerfile => { - "No Towerfile was found in the target directory".to_string() - } - config::Error::MissingRequiredAppField { ref field } => { - format!("Missing required app field `{}` in Towerfile", field) - } config::Error::TeamNotFound { ref team_name } => { format!("Team with name `{}` not found!", team_name) } diff --git a/crates/tower-cmd/src/package.rs b/crates/tower-cmd/src/package.rs index 10010321..c325bd26 100644 --- a/crates/tower-cmd/src/package.rs +++ b/crates/tower-cmd/src/package.rs @@ -72,7 +72,7 @@ pub async fn do_package(_config: Config, args: &ArgMatches) { } } Err(err) => { - output::config_error(err); + output::package_error(err); } } } diff --git a/crates/tower-cmd/src/run.rs b/crates/tower-cmd/src/run.rs index 6bf2f7cd..3378dc83 100644 --- a/crates/tower-cmd/src/run.rs +++ b/crates/tower-cmd/src/run.rs @@ -508,7 +508,7 @@ fn handle_run_completion(res: Result) -> Result< /// from the parsed CLI args. fn get_run_parameters( args: &ArgMatches, -) -> Result<(bool, PathBuf, HashMap, Option), config::Error> { +) -> Result<(bool, PathBuf, HashMap, Option), crate::Error> { let local = *args.get_one::("local").unwrap(); let path = resolve_path(args); let params = parse_parameters(args); diff --git a/crates/tower-cmd/src/util/deploy.rs b/crates/tower-cmd/src/util/deploy.rs index d38f75d3..5a6d3619 100644 --- a/crates/tower-cmd/src/util/deploy.rs +++ b/crates/tower-cmd/src/util/deploy.rs @@ -35,9 +35,9 @@ pub async fn upload_file_with_progress( let file_size = metadata.len(); // Check if bundle size exceeds the maximum allowed size - if file_size > tower_package::MAX_BUNDLE_SIZE { + if file_size > tower_package::MAX_PACKAGE_SIZE { let size_mb = file_size as f64 / (1024.0 * 1024.0); - let max_mb = tower_package::MAX_BUNDLE_SIZE as f64 / (1024.0 * 1024.0); + let max_mb = tower_package::MAX_PACKAGE_SIZE as f64 / (1024.0 * 1024.0); output::die(&format!( "Your App is too big! ({:.2} MB) exceeds maximum allowed size ({:.0} MB). Please consider reducing app size by removing unnecessary files or import_paths in the Towerfile.", size_mb, max_mb diff --git a/crates/tower-package/.gitignore b/crates/tower-package/.gitignore new file mode 100644 index 00000000..01d0a084 --- /dev/null +++ b/crates/tower-package/.gitignore @@ -0,0 +1 @@ +pkg/ diff --git a/crates/tower-package/Cargo.toml b/crates/tower-package/Cargo.toml index 1e302058..709358be 100644 --- a/crates/tower-package/Cargo.toml +++ b/crates/tower-package/Cargo.toml @@ -6,16 +6,45 @@ edition = { workspace = true } rust-version = { workspace = true } license = { workspace = true } +[lib] +crate-type = ["cdylib", "rlib"] + +[features] +default = ["native"] +native = [ + "dep:async-compression", + "dep:glob", + "dep:tmpdir", + "dep:tokio", + "dep:tokio-stream", + "dep:tokio-tar", + "dep:tower-telemetry", +] +wasm = ["dep:wasm-bindgen", "dep:serde-wasm-bindgen", "dep:serde_bytes"] + [dependencies] -async-compression = { workspace = true } -config = { workspace = true } -glob = { workspace = true } -serde = { workspace = true } +# Pure core — always compiled. +flate2 = { workspace = true } +serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } sha2 = { workspace = true } snafu = { workspace = true } -tmpdir = { workspace = true } -tokio = { workspace = true } -tokio-stream = { workspace = true } -tokio-tar = { workspace = true } -tower-telemetry = { workspace = true } +tar = { workspace = true } +toml = { workspace = true } + +# Native shell — gated behind the `native` feature. +async-compression = { workspace = true, optional = true } +glob = { workspace = true, optional = true } +tmpdir = { workspace = true, optional = true } +tokio = { workspace = true, optional = true } +tokio-stream = { workspace = true, optional = true } +tokio-tar = { workspace = true, optional = true } +tower-telemetry = { workspace = true, optional = true } + +# WASM bindings — gated behind the `wasm` feature. +serde_bytes = { version = "0.11", optional = true } +serde-wasm-bindgen = { version = "0.6", optional = true } +wasm-bindgen = { version = "0.2", optional = true } + +[dev-dependencies] +testutils = { workspace = true } diff --git a/crates/tower-package/README.md b/crates/tower-package/README.md new file mode 100644 index 00000000..d122a735 --- /dev/null +++ b/crates/tower-package/README.md @@ -0,0 +1,72 @@ +# tower-package + +Bundle builder for Tower apps. Used by the Tower CLI to pack an app +directory into a gzipped tar archive, and published as an npm package +(`tower-package-wasm`) for building bundles from TypeScript. + +The crate has three layers: + +- **Core** (always compiled) — pure types and the `build_package` + function that turns in-memory bytes into a deterministic tar.gz. +- **Native** (`native` feature, default) — `Package::build` walks the + filesystem, resolves globs, reads files, and delegates to the core. + Used by the Tower CLI. +- **WASM** (`wasm` feature) — `wasm-bindgen` wrapper exposing + `buildBundle` to JavaScript. + +## Native (Rust) + +```toml +[dependencies] +tower-package = "0.3" # default features include `native` +``` + +Existing CLI callers are unchanged. + +## WebAssembly (TypeScript) + +Build from inside the nix devshell (`nix develop`): + +```sh +./scripts/build.sh # bundler (webpack/vite/rollup) — default +./scripts/build.sh web # native ES modules, fetch-based init +./scripts/build.sh nodejs # CommonJS, Node 18+ +``` + +Output lands in `pkg/` and is publishable to npm as `tower-package-wasm`. + +### Usage + +```ts +import { buildPackage, PackageInputs } from 'tower-package-wasm'; + +const inputs: PackageInputs = { + appFiles: [ + { archiveName: 'app/main.py', bytes: new TextEncoder().encode('print("hi")') }, + ], + moduleFiles: [], + towerfileBytes: new TextEncoder().encode( + '[app]\nname = "my-app"\nscript = "main.py"\n', + ), +}; + +const tarGz: Uint8Array = buildPackage(inputs); +``` + +Archive names must already be rooted under `app/` or `modules//`; +the core does no path rewriting. `invoke`, `parameters`, and import +paths in the manifest are derived from `towerfileBytes`. + +Output is byte-deterministic for a given input: entries are sorted by +`archiveName`, tar headers are normalized (zero mtime/uid/gid, mode +`0644`), and the gzip header embeds no mtime. The package format +(`ustar` + gzip, `MANIFEST` + `Towerfile` at the top level) matches +what the Tower CLI produces natively. + +## Tests + +```sh +cargo test -p tower-package # native Rust tests +./scripts/build.sh nodejs # then +cd test && npm install && npm test # TypeScript tests +``` diff --git a/crates/tower-package/scripts/build.sh b/crates/tower-package/scripts/build.sh new file mode 100755 index 00000000..eb17846e --- /dev/null +++ b/crates/tower-package/scripts/build.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash +# Build the tower-package WebAssembly npm package. +# +# Runs wasm-pack against the tower-package crate with only the `wasm` +# feature, replaces the generated `.d.ts` (which types inputs as `any`) +# with the hand-written typed interface, and renames the npm package to +# `tower-package-wasm` so the Rust crate and the npm package can have +# different names. + +set -euo pipefail + +CRATE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +cd "$CRATE_DIR" + +TARGET="${1:-bundler}" +OUT_DIR="pkg" + +wasm-pack build . \ + --target "$TARGET" \ + --release \ + --out-dir "$OUT_DIR" \ + -- \ + --no-default-features \ + --features wasm + +cp types.d.ts "$OUT_DIR/tower_package.d.ts" + +sed -i.bak 's/"name": "tower-package"/"name": "tower-package-wasm"/' "$OUT_DIR/package.json" +rm "$OUT_DIR/package.json.bak" + +echo "Built tower-package-wasm npm package at $CRATE_DIR/$OUT_DIR" diff --git a/crates/tower-package/src/core.rs b/crates/tower-package/src/core.rs new file mode 100644 index 00000000..ed35e264 --- /dev/null +++ b/crates/tower-package/src/core.rs @@ -0,0 +1,309 @@ +use flate2::{write::GzEncoder, Compression, GzBuilder}; +use serde::{Deserialize, Serialize}; +use sha2::{Digest, Sha256}; +use snafu::prelude::*; +use std::collections::HashMap; +use std::io::Write; +use std::path::{Component, Path}; +use tar::{Builder, Header}; + +use crate::towerfile::{Parameter, Towerfile}; + +// Version History: +// 1 - Initial version +// 2 - Add app_dir, modules_dir, and checksum +// 3 - Change checksum algorithm to be cross-platform +pub const CURRENT_PACKAGE_VERSION: i32 = 3; + +pub const MAX_PACKAGE_SIZE: u64 = 50 * 1024 * 1024; + +#[derive(Debug, Snafu)] +pub enum Error { + #[snafu(display("Invalid path"))] + InvalidPath, + + #[snafu(display("Invalid Towerfile: {message}"))] + InvalidTowerfile { message: String }, + + #[snafu(display("No Towerfile was found in this directory"))] + MissingTowerfile, + + #[snafu(display("Missing required app field `{field}` in Towerfile"))] + MissingRequiredAppField { field: String }, + + #[snafu(display("Serialization error: {source}"))] + Serialization { source: serde_json::Error }, + + #[snafu(display("IO error: {source}"))] + Io { source: std::io::Error }, +} + +impl From for Error { + fn from(source: serde_json::Error) -> Self { + Error::Serialization { source } + } +} + +impl From for Error { + fn from(source: std::io::Error) -> Self { + Error::Io { source } + } +} + +impl From for Error { + fn from(err: toml::de::Error) -> Self { + Error::InvalidTowerfile { + message: err.to_string(), + } + } +} + +impl From for Error { + fn from(err: toml::ser::Error) -> Self { + Error::InvalidTowerfile { + message: err.to_string(), + } + } +} + +#[derive(Clone, Serialize, Deserialize)] +pub struct Manifest { + pub version: Option, + pub invoke: String, + + #[serde(default)] + pub parameters: Vec, + + pub schedule: Option, + + #[serde(default)] + pub import_paths: Vec, + + #[serde(default)] + pub app_dir_name: String, + + #[serde(default)] + pub modules_dir_name: String, + + #[serde(default)] + pub checksum: String, +} + +impl Manifest { + pub fn from_json(data: &str) -> Result { + Ok(serde_json::from_str(data)?) + } +} + +#[derive(Debug, Clone)] +pub struct Entry { + // archive_name is the POSIX-normalized path inside the tar (e.g. "app/main.py"). + pub archive_name: String, + pub bytes: Vec, +} + +#[derive(Debug)] +pub struct PackageInputs { + // app_files have archive_name already rooted under "app/". + pub app_files: Vec, + + // module_files have archive_name already rooted under "modules/". + pub module_files: Vec, + + // towerfile_bytes is the sole source of invoke, parameters, and import_paths. + pub towerfile_bytes: Vec, +} + +pub struct BuiltPackage { + pub bytes: Vec, + pub manifest: Manifest, +} + +// build_package produces a gzipped tar archive containing the given entries plus a generated +// MANIFEST and the original Towerfile. Entries are sorted by archive_name and tar headers are +// normalized (mtime/uid/gid zero, mode 0644) so the output is byte-deterministic for a given +// input. +pub fn build_package(inputs: PackageInputs) -> Result { + let towerfile_str = std::str::from_utf8(&inputs.towerfile_bytes).map_err(|e| { + Error::InvalidTowerfile { + message: format!("Towerfile is not valid UTF-8: {}", e), + } + })?; + let towerfile = Towerfile::from_toml(towerfile_str)?; + + let import_paths: Vec = towerfile + .app + .import_paths + .iter() + .map(|p| format!("modules/{}", import_path_basename(&p.to_string_lossy()))) + .collect(); + + let mut entries: Vec = Vec::with_capacity(inputs.app_files.len() + inputs.module_files.len()); + entries.extend(inputs.app_files); + entries.extend(inputs.module_files); + entries.sort_by(|a, b| a.archive_name.cmp(&b.archive_name)); + + let mut path_hashes: HashMap = HashMap::with_capacity(entries.len()); + for entry in &entries { + path_hashes.insert(entry.archive_name.clone(), compute_sha256_bytes(&entry.bytes)); + } + + let manifest = Manifest { + version: Some(CURRENT_PACKAGE_VERSION), + invoke: towerfile.app.script, + parameters: towerfile.parameters, + schedule: None, + import_paths, + app_dir_name: "app".to_string(), + modules_dir_name: "modules".to_string(), + checksum: compute_sha256_package(&path_hashes), + }; + + let manifest_bytes = serde_json::to_vec(&manifest)?; + + let gz = GzBuilder::new() + .mtime(0) + .write(Vec::new(), Compression::default()); + let mut builder = Builder::new(gz); + + for entry in &entries { + append_entry(&mut builder, &entry.archive_name, &entry.bytes)?; + } + append_entry(&mut builder, "MANIFEST", &manifest_bytes)?; + append_entry(&mut builder, "Towerfile", &inputs.towerfile_bytes)?; + + let gz: GzEncoder> = builder.into_inner()?; + let bytes = gz.finish()?; + + Ok(BuiltPackage { bytes, manifest }) +} + +fn append_entry(builder: &mut Builder, name: &str, bytes: &[u8]) -> Result<(), Error> { + let mut header = Header::new_gnu(); + header.set_size(bytes.len() as u64); + header.set_mode(0o644); + header.set_mtime(0); + header.set_uid(0); + header.set_gid(0); + header.set_entry_type(tar::EntryType::Regular); + header.set_cksum(); + builder.append_data(&mut header, name, bytes)?; + Ok(()) +} + +// import_path_basename returns the final non-empty path component of an import path string. +// Accepts both forward- and back-slashes so Towerfiles authored on either OS parse the same. +fn import_path_basename(path: &str) -> &str { + path.rsplit(|c| c == '/' || c == '\\') + .find(|s| !s.is_empty() && *s != "." && *s != "..") + .unwrap_or("") +} + +// normalize_path converts a Path to a POSIX-style string with forward slashes, dropping root and +// Windows prefix components and refusing ".." navigation that escapes the path. +pub fn normalize_path(path: &Path) -> Result { + let mut next = Vec::new(); + + for component in path.components() { + match component { + Component::Prefix(_) | Component::RootDir => {} + Component::CurDir => {} + Component::ParentDir => { + if !next.is_empty() { + return Err(Error::InvalidPath); + } + } + Component::Normal(os_str) => { + if let Some(s) = os_str.to_str() { + next.push(s.to_string()); + } + } + } + } + + Ok(next.join("/")) +} + +pub fn compute_sha256_bytes(bytes: &[u8]) -> String { + let mut hasher = Sha256::new(); + hasher.update(bytes); + format!("{:x}", hasher.finalize()) +} + +// compute_sha256_package hashes the sorted (archive_name, file_hash) pairs to produce a stable +// fingerprint of the package contents. +pub fn compute_sha256_package(path_hashes: &HashMap) -> String { + let mut keys: Vec<&String> = path_hashes.keys().collect(); + keys.sort(); + + let mut hasher = Sha256::new(); + for key in keys { + hasher.update(format!("{}:{}", key, &path_hashes[key]).as_bytes()); + } + format!("{:x}", hasher.finalize()) +} + +#[cfg(test)] +mod test { + use super::*; + use std::path::PathBuf; + + #[test] + fn test_normalize_path() { + let path = PathBuf::from(".") + .join("some") + .join("nested") + .join("path") + .join("to") + .join("file.txt"); + assert_eq!(normalize_path(&path).unwrap(), "some/nested/path/to/file.txt"); + } + + #[test] + fn test_build_package_is_deterministic() { + let inputs = || PackageInputs { + app_files: vec![ + Entry { archive_name: "app/b.py".into(), bytes: b"b".to_vec() }, + Entry { archive_name: "app/a.py".into(), bytes: b"a".to_vec() }, + ], + module_files: vec![], + towerfile_bytes: b"[app]\nname = \"x\"\nscript = \"app/a.py\"\n".to_vec(), + }; + + let p1 = build_package(inputs()).unwrap(); + let p2 = build_package(inputs()).unwrap(); + assert_eq!(p1.bytes, p2.bytes); + assert!(!p1.manifest.checksum.is_empty()); + assert_eq!(p1.manifest.invoke, "app/a.py"); + } + + #[test] + fn test_derives_import_paths_from_towerfile() { + let towerfile = br#" +[app] +name = "x" +script = "main.py" +import_paths = ["../shared", "libs/inner", "./weird/"] +"#; + let out = build_package(PackageInputs { + app_files: vec![], + module_files: vec![], + towerfile_bytes: towerfile.to_vec(), + }) + .unwrap(); + assert_eq!( + out.manifest.import_paths, + vec!["modules/shared", "modules/inner", "modules/weird"] + ); + } + + #[test] + fn test_invalid_towerfile_is_rejected() { + let result = build_package(PackageInputs { + app_files: vec![], + module_files: vec![], + towerfile_bytes: b"not = = toml".to_vec(), + }); + assert!(matches!(result, Err(Error::InvalidTowerfile { .. }))); + } +} diff --git a/crates/tower-package/src/error.rs b/crates/tower-package/src/error.rs index dde66067..e57afe64 100644 --- a/crates/tower-package/src/error.rs +++ b/crates/tower-package/src/error.rs @@ -14,6 +14,18 @@ pub enum Error { #[snafu(display("Invalid glob pattern: {message}"))] InvalidGlob { message: String }, + + #[snafu(display("Invalid Towerfile: {message}"))] + InvalidTowerfile { message: String }, + + #[snafu(display("No Towerfile was found in this directory"))] + MissingTowerfile, + + #[snafu(display("Missing required app field `{field}` in Towerfile"))] + MissingRequiredAppField { field: String }, + + #[snafu(display("IO error: {source}"))] + Io { source: std::io::Error }, } impl From for Error { @@ -29,3 +41,23 @@ impl From for Error { Error::InvalidManifest } } + +impl From for Error { + fn from(err: crate::core::Error) -> Self { + use crate::core::Error as Core; + match err { + Core::InvalidPath => Error::InvalidPath, + Core::Serialization { source } => { + debug!("core serialization error: {}", source); + Error::InvalidManifest + } + Core::Io { source } => { + debug!("core IO error: {}", source); + Error::NoManifest + } + Core::InvalidTowerfile { message } => Error::InvalidTowerfile { message }, + Core::MissingTowerfile => Error::MissingTowerfile, + Core::MissingRequiredAppField { field } => Error::MissingRequiredAppField { field }, + } + } +} diff --git a/crates/tower-package/src/lib.rs b/crates/tower-package/src/lib.rs index 25c8a9a9..d876067b 100644 --- a/crates/tower-package/src/lib.rs +++ b/crates/tower-package/src/lib.rs @@ -1,753 +1,21 @@ -use config::Towerfile; -use glob::glob; -use serde::{Deserialize, Serialize}; -use sha2::{Digest, Sha256}; -use std::collections::{HashMap, VecDeque}; -use std::path::{Component, Path, PathBuf}; -use std::pin::Pin; -use tmpdir::TmpDir; -use tokio::{ - fs::File, - io::{AsyncRead, AsyncReadExt, AsyncWriteExt, BufReader}, -}; -use tokio_tar::{Archive, Builder}; - -use async_compression::tokio::bufread::GzipDecoder; -use async_compression::tokio::write::GzipEncoder; +mod core; +mod towerfile; -use tower_telemetry::debug; +pub use core::{ + build_package, compute_sha256_bytes, compute_sha256_package, normalize_path, BuiltPackage, + Entry, Manifest, PackageInputs, CURRENT_PACKAGE_VERSION, MAX_PACKAGE_SIZE, +}; +pub use towerfile::{App, Parameter, Towerfile}; +#[cfg(feature = "native")] mod error; -pub use error::Error; - -// current version of the package format. we keep a version history here just in case anyone has -// questions. will probably promote this to proper docs at some point. -// -// Version History: -// 1 - Initial version -// 2 - Add app_dir, modules_dir, and checksum -// 3 - Change checksum algorithm to be cross-platform -const CURRENT_PACKAGE_VERSION: i32 = 3; - -// Maximum allowed size for a bundle package in bytes (50MB) -// This limit ensures bundles remain manageable for deployment and storage. -pub const MAX_BUNDLE_SIZE: u64 = 50 * 1024 * 1024; - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct Parameter { - #[serde(default)] - pub name: String, - - #[serde(default)] - pub description: Option, - - #[serde(default)] - pub default: String, - - #[serde(default)] - pub hidden: bool, -} - -#[derive(Clone, Serialize, Deserialize)] -pub struct Manifest { - // version is the version of the packaging format that was used. - pub version: Option, - - // invoke is the target in this package to invoke. - pub invoke: String, - - #[serde(default)] - pub parameters: Vec, - - // schedule is the schedule that we want to execute this app on. this is, just temporarily, - // where it will live. - pub schedule: Option, - - // import_paths are the rewritten collection of modules that this app's code goes into. - #[serde(default)] - pub import_paths: Vec, - - // app_dir_name is the name of the application directory within the package. - #[serde(default)] - pub app_dir_name: String, - - // modules_dir_name is the name of the modules directory within the package. - #[serde(default)] - pub modules_dir_name: String, - - // checksum contains a hash of all the content in the package. - #[serde(default)] - pub checksum: String, -} - -impl Manifest { - pub async fn from_path(path: &Path) -> Result { - let mut file = File::open(path).await?; - let mut contents = String::new(); - file.read_to_string(&mut contents).await?; - Self::from_json(&contents).await - } - - pub async fn from_json(data: &str) -> Result { - let manifest: Self = serde_json::from_str(data)?; - Ok(manifest) - } -} - -// PackageSpec describes how to build a package. -#[derive(Debug)] -pub struct PackageSpec { - // towerfile_path is the path to the Towerfile that was used to build this package. - pub towerfile_path: PathBuf, - - // invoke is the file to invoke when the package is run. - pub invoke: String, - - // base_dir is the directory where the package is located. - pub base_dir: PathBuf, - - // file_globs is a list of globs that match the files in the package. - pub file_globs: Vec, - - // parameters are the parameters to use for this app. - pub parameters: Vec, - - // schedule defines the frequency that this app should be run on. - pub schedule: Option, - - pub import_paths: Vec, -} - -fn get_parameters(towerfile: &Towerfile) -> Vec { - let mut parameters = Vec::new(); - for p in &towerfile.parameters { - parameters.push(Parameter { - name: p.name.clone(), - description: Some(p.description.clone()), - default: p.default.clone(), - hidden: p.hidden, - }); - } - parameters -} - -impl PackageSpec { - pub fn from_towerfile(towerfile: &Towerfile) -> Self { - debug!("creating package spec from towerfile: {:?}", towerfile); - let towerfile_path = towerfile.file_path.clone(); - let base_dir = towerfile_path - .parent() - .unwrap_or_else(|| Path::new(".")) - .to_path_buf(); - - let schedule = if towerfile.app.schedule.is_empty() { - None - } else { - Some(towerfile.app.schedule.to_string()) - }; - - // We need to turn these (validated) paths into something taht we can use at runtime. - let import_paths = towerfile - .app - .import_paths - .iter() - .map(|p| p.to_string_lossy().to_string()) - .collect(); - - Self { - schedule, - towerfile_path, - base_dir, - import_paths, - invoke: towerfile.app.script.clone(), - file_globs: towerfile.app.source.clone(), - parameters: get_parameters(towerfile), - } - } -} - -pub struct Package { - pub manifest: Manifest, - - // tmp_dir is used to keep the package directory around occasionally so the directory doesn't - // get deleted out from under the application. - pub tmp_dir: Option, - - // package_file_path is path to the packed file on disk. - pub package_file_path: Option, - - // unpacked_path is the path to the unpackaged package on disk. - pub unpacked_path: Option, -} - -impl Package { - pub fn default() -> Self { - Self { - tmp_dir: None, - package_file_path: None, - unpacked_path: None, - manifest: Manifest { - version: Some(CURRENT_PACKAGE_VERSION), - invoke: "".to_string(), - parameters: vec![], - schedule: None, - import_paths: vec![], - app_dir_name: "app".to_string(), - modules_dir_name: "modules".to_string(), - checksum: "".to_string(), - }, - } - } - - pub async fn from_unpacked_path(path: PathBuf) -> Result { - let manifest_path = path.join("MANIFEST"); - let manifest = Manifest::from_path(&manifest_path).await?; - - Ok(Self { - tmp_dir: None, - package_file_path: None, - unpacked_path: Some(path), - manifest, - }) - } - - // build creates a new package from a PackageSpec. PackageSpec is typically composed of fields - // copied from the Towerfile. The most important thing to know is that the collection of file - // globs to include in the package. - // - // The underlying package is just a TAR file with a special `MANIFEST` file that has also been - // GZip'd. - pub async fn build(spec: PackageSpec) -> Result { - debug!("building package from spec: {:?}", spec); - - // we canonicalize this because we want to treat all paths in the same keyspace more or - // less. - let base_dir = spec.base_dir.canonicalize()?; - - // Canonicalize import paths upfront so the resolver can whitelist files within them. - let canonical_import_paths: Vec = spec - .import_paths - .iter() - .map(|p| base_dir.join(p).canonicalize()) - .collect::, _>>()?; - - let resolver = FileResolver::new(base_dir.clone(), canonical_import_paths.clone()); - - let tmp_dir = TmpDir::new("tower-package").await?; - let package_path = tmp_dir.to_path_buf().join("package.tar"); - debug!("building package at: {:?}", package_path); - - let file = File::create(package_path.clone()).await?; - let gzip = GzipEncoder::new(file); - let mut builder = Builder::new(gzip); - - // These help us compute the integrity of the package contents overall. For each path, we'll - // store a hash of the contents written to the file. Then we'll hash the final content to - // create a fingerprint of the data. - let mut path_hashes = HashMap::new(); - - // If the user didn't specify anything here we'll package everything under this directory - // and ship it to Tower. - let mut file_globs = spec.file_globs.clone(); - - // If there was no source specified, we'll pull in all the source code in the current - // directory. - if file_globs.is_empty() { - debug!("no source files specified. using default paths."); - file_globs.push("./**/*".to_string()); - } - - // We'll collect all the file paths in a collection here. - let mut file_paths = HashMap::new(); - - for file_glob in file_globs { - let path = base_dir.join(file_glob); - resolver.resolve_glob(path, &mut file_paths).await?; - } - - // App code lives in the app dir - let app_dir = PathBuf::from("app"); - - // Now that we have all the paths, we'll append them to the builder. - for (physical_path, logical_path) in file_paths { - // All of the app code goes into the "app" directory. - let logical_path = app_dir.join(logical_path); - // Normalize to forward slashes so archive entry names are POSIX-compatible - // on all platforms (Windows PathBuf uses backslashes). - let archive_name = normalize_path(&logical_path)?; - - let hash = compute_sha256_file(&physical_path).await?; - path_hashes.insert(PathBuf::from(&archive_name), hash); - - builder - .append_path_with_name(physical_path, &archive_name) - .await?; - } - - // Module code lives in the modules dir. - let module_dir = PathBuf::from("modules"); - let mut import_paths = vec![]; - - // Now we need to package up all the modules to include in the code base too. - for import_path in &canonical_import_paths { - - let mut file_paths = HashMap::new(); - resolver.resolve_path(&import_path, &mut file_paths).await; - - // Resolve module files relative to the import path's parent so that the - // directory structure inside the package matches the manifest entry. Without - // this, an import path that lives inside base_dir (e.g. libs/shared) would be - // resolved relative to base_dir by logical_path(), producing - // modules/libs/shared/... while the manifest entry is modules/shared. - let import_parent = import_path.parent().unwrap_or(import_path.as_path()); - - // The file_name should constitute the logical path - let import_path = import_path.file_name().unwrap(); - let import_path = module_dir.join(import_path); - // Normalize to forward slashes for the manifest (POSIX, cross-platform). - let import_path_str = normalize_path(&import_path)?; - import_paths.push(import_path_str); - - // Now we write all of these paths to the modules directory. - for (physical_path, _) in file_paths { - let logical_path = match physical_path.strip_prefix(import_parent) { - Ok(p) => module_dir.join(p), - Err(_) => continue, - }; - // Normalize to forward slashes so archive entry names are POSIX-compatible - // on all platforms (Windows PathBuf uses backslashes). - let archive_name = normalize_path(&logical_path)?; - - let hash = compute_sha256_file(&physical_path).await?; - path_hashes.insert(PathBuf::from(&archive_name), hash); - - debug!("adding file {}", logical_path.display()); - builder - .append_path_with_name(physical_path, &archive_name) - .await?; - } - } - - let manifest = Manifest { - import_paths, - version: Some(CURRENT_PACKAGE_VERSION), - invoke: String::from(spec.invoke), - parameters: spec.parameters, - schedule: spec.schedule, - app_dir_name: app_dir.to_string_lossy().to_string(), - modules_dir_name: module_dir.to_string_lossy().to_string(), - checksum: compute_sha256_package(&path_hashes)?, - }; - - // the whole manifest needs to be written to a file as a convenient way to avoid having to - // manually populate the TAR file headers for this data. maybe in the future, someone will - // have the humption to do so here, thus avoiding an unnecessary file write (and the - // associated failure modes). - let manifest_path = tmp_dir.to_path_buf().join("MANIFEST"); - write_manifest_to_file(&manifest_path, &manifest).await?; - builder - .append_path_with_name(manifest_path, "MANIFEST") - .await?; - - // Let's also package the Towerfile along with it. - builder - .append_path_with_name(spec.towerfile_path, "Towerfile") - .await?; +#[cfg(feature = "native")] +mod native; - let mut gzip = builder.into_inner().await?; - gzip.shutdown().await?; - - // probably not explicitly required; however, makes the test suite pass so... - let mut file = gzip.into_inner(); - file.shutdown().await?; - - Ok(Self { - manifest, - unpacked_path: None, - tmp_dir: Some(tmp_dir), - package_file_path: Some(package_path), - }) - } - - /// unpack is the primary interface in to unpacking a package. It will allocate a temporary - /// directory if one isn't already allocated and unpack the package contents into that location. - pub async fn unpack(&mut self) -> Result<(), Error> { - // If there's already a tmp_dir allocated to this package, then we'll use that. Otherwise, - // we allocate one and store it on this package for later use. - let path = if let Some(tmp_dir) = self.tmp_dir.as_ref() { - tmp_dir.to_path_buf() - } else { - let tmp_dir = TmpDir::new("tower-package").await?; - let path = tmp_dir.to_path_buf(); - self.tmp_dir = Some(tmp_dir); - path - }; - - // self.package_file_path should be set otherwise this is a bug. - let package_path = self.package_file_path.clone().unwrap(); - unpack_archive(&package_path, &path).await?; - self.unpacked_path = Some(path); - Ok(()) - } -} - -async fn write_manifest_to_file(path: &PathBuf, manifest: &Manifest) -> Result<(), Error> { - let mut file = File::create(path).await?; - let data = serde_json::to_string(&manifest)?; - file.write_all(data.as_bytes()).await?; - - // this is required to ensure that everything gets flushed to disk. it's not enough to just let - // the file reference get dropped. - file.shutdown().await?; - - Ok(()) -} - -fn extract_glob_path(path: PathBuf) -> String { - let str = path.to_str().unwrap(); - - #[cfg(windows)] - { - // This is a nasty hack to get around a limitation in the `glob` crate on Windows. There's - // a (documented) bug that prevents it from globbing on canonicalized paths. - // - // See https://github.com/rust-lang/glob/issues/132 - str.strip_prefix(r"\\?\").ok_or(str).unwrap().to_string() - } - - #[cfg(not(windows))] - { - str.to_string() - } -} - -/// Check if a file is a valid gzip file by attempting to decompress it -async fn is_valid_gzip>(path: P) -> bool { - let file = match File::open(&path).await { - Ok(file) => file, - Err(_) => return false, - }; - - let reader = BufReader::new(file); - let mut decoder = GzipDecoder::new(reader); - - // Try to read a small amount of data. If we can, then we assume that it's a valid gzip file. - // Othwewise, it's not gzipped I suppose? - let mut buffer = [0u8; 1024]; - match decoder.read(&mut buffer).await { - Ok(_) => true, - Err(_) => false, - } -} - -async fn unpack_archive>( - package_path: P, - output_path: P, -) -> Result<(), std::io::Error> { - let reader: Pin> = if is_valid_gzip(&package_path).await { - // gor gzipped files - let file = File::open(&package_path).await?; - let buf_reader = BufReader::new(file); - let decoder = GzipDecoder::new(buf_reader); - Box::pin(decoder) - } else { - // For regular files - let file = File::open(&package_path).await?; - Box::pin(file) - }; - - // Create and unpack the archive - let mut archive = Archive::new(reader); - archive.unpack(output_path).await?; - - Ok(()) -} - -fn is_in_dir(p: &PathBuf, dir: &str) -> bool { - let mut comps = p.components(); - comps.any(|comp| { - if let std::path::Component::Normal(name) = comp { - name == dir - } else { - false - } - }) -} - -fn is_file(p: &PathBuf, name: &str) -> bool { - if let Some(file_name) = p.file_name() { - file_name == name - } else { - false - } -} - -struct FileResolver { - // base_dir is the directory from which logical paths are computed. - base_dir: PathBuf, - - // import_paths are canonicalized paths to imported directories. Files within these directories - // are also allowed, with logical paths computed relative to each import path's parent. - import_paths: Vec, -} - -impl FileResolver { - fn new(base_dir: PathBuf, import_paths: Vec) -> Self { - Self { - base_dir, - import_paths, - } - } - - fn should_ignore(&self, p: &PathBuf) -> bool { - // Ignore anything that is compiled python - if p.extension().map(|ext| ext == "pyc").unwrap_or(false) { - return true; - } - - // Only exclude the root Towerfile (base_dir/Towerfile). Since base_dir is already - // canonicalized, we can derive this path directly. Towerfiles in sub-directories are - // legitimate app content and must be preserved. - if p == &self.base_dir.join("Towerfile") { - return true; - } - - // Ignore a .gitignore file - if is_file(p, ".gitignore") { - return true; - } - - // Remove anything thats __pycache__ - if is_in_dir(p, "__pycache__") { - return true; - } - - // Ignore anything that lives within a .git directory - if is_in_dir(p, ".git") { - return true; - } - - // Ignore anything that's in a virtualenv, too - if is_in_dir(p, ".venv") { - return true; - } - - false - } - - fn logical_path<'a>(&self, physical_path: &'a Path) -> Option<&'a Path> { - if let Ok(p) = physical_path.strip_prefix(&self.base_dir) { - return Some(p); - } - - // Try each import path's parent as a prefix. This allows files within import paths - // (which may live outside base_dir) to be resolved with logical paths that preserve - // the import directory name (e.g. "shared_lib/foo.py"). - for import_path in &self.import_paths { - if let Some(parent) = import_path.parent() { - if let Ok(p) = physical_path.strip_prefix(parent) { - return Some(p); - } - } - } - - None - } - - async fn resolve_glob( - &self, - path: PathBuf, - file_paths: &mut HashMap, - ) -> Result<(), Error> { - let path_str = extract_glob_path(path); - debug!("resolving glob pattern: {}", path_str); - - let entries = glob(&path_str).map_err(|e| Error::InvalidGlob { - message: format!("{}: {}", path_str, e), - })?; - - for entry in entries { - match entry { - Ok(path) => self.resolve_path(&path, file_paths).await, - Err(e) => { - debug!("skipping glob entry: {}", e); - } - } - } - - Ok(()) - } - - async fn resolve_path(&self, path: &PathBuf, file_paths: &mut HashMap) { - let mut queue = VecDeque::new(); - queue.push_back(path.to_path_buf()); - - while let Some(current_path) = queue.pop_front() { - let canonical_path = current_path.canonicalize(); - - if canonical_path.is_err() { - debug!( - " - skipping path {}: {}", - current_path.display(), - canonical_path.unwrap_err() - ); - continue; - } - - // We can safely unwrap this because we understand that it's not going to fail at this - // point. - let physical_path = canonical_path.unwrap(); - - if physical_path.is_dir() { - let mut entries = tokio::fs::read_dir(&physical_path).await.unwrap(); - - while let Some(entry) = entries.next_entry().await.unwrap() { - queue.push_back(entry.path()); - } - } else { - if !self.should_ignore(&physical_path) { - let cp = physical_path.clone(); - - match self.logical_path(&cp) { - None => { - debug!( - " - skipping file {}: not in base directory {}: ...", - physical_path.display(), - self.base_dir.display(), - ); - continue; - } - Some(logical_path) => { - debug!( - " - resolved path {} to logical path {}", - physical_path.display(), - logical_path.display() - ); - file_paths.insert(physical_path, logical_path.to_path_buf()); - } - } - } - } - } - } -} - -// normalize_path converts a Path to a normalized string with forward slashes as separators. -fn normalize_path(path: &Path) -> Result { - let mut next = Vec::new(); - - for component in path.components() { - match component { - Component::Prefix(_) | Component::RootDir => { - // Skip Windows prefixes (C:) and root markers - // You might want to keep root as "/" depending on needs - } - Component::CurDir => { - // Skip "." components - } - Component::ParentDir => { - // If the user is trying to navigate up but that's not possible, we'll just return - // an error here. - if !next.is_empty() { - return Err(Error::InvalidPath); - } - } - Component::Normal(os_str) => { - if let Some(s) = os_str.to_str() { - next.push(s.to_string()); - } - } - } - } - - Ok(next.join("/")) -} - -fn compute_sha256_package(path_hashes: &HashMap) -> Result { - // We'll standardize all the paths into a set of strings with normalized path separators. This - // is in particular important on Windows. - let mut key_cache = HashMap::new(); - - for key in path_hashes.keys() { - let normalized = normalize_path(&key)?; - key_cache.insert(normalized, key.clone()); - } - - let mut sorted_keys: Vec<_> = key_cache.keys().collect(); - sorted_keys.sort(); - - // hasher that we'll use for computing the overall SHA256 hash. - let mut hasher = Sha256::new(); - - for key in sorted_keys { - // We need to sort the keys so that we can compute a consistent hash. - let path = key_cache.get(key).unwrap(); - let value = path_hashes.get(path).unwrap(); - - let combined = format!("{}:{}", key, value); - hasher.update(combined.as_bytes()); - } - - // Finalize and get the hash result - let result = hasher.finalize(); - - // Convert to hex string - Ok(format!("{:x}", result)) -} - -pub async fn compute_sha256_file(file_path: &PathBuf) -> Result { - // Open the file - let file = File::open(file_path).await?; - let mut reader = BufReader::new(file); - - // Create a SHA256 hasher - let mut hasher = Sha256::new(); - - // Read file in chunks to handle large files efficiently - let mut buffer = [0; 8192]; // 8KB buffer - loop { - let bytes_read = reader.read(&mut buffer).await?; - if bytes_read == 0 { - break; - } - hasher.update(&buffer[..bytes_read]); - } - - // Finalize and get the hash result - let result = hasher.finalize(); - - // Convert to hex string - Ok(format!("{:x}", result)) -} - -#[cfg(test)] -mod test { - use super::*; - use std::path::PathBuf; - - #[test] - fn test_should_ignore_pyc_files() { - let resolver = FileResolver::new(PathBuf::from("/project"), vec![]); - - // A .pyc file should be ignored - assert!(resolver.should_ignore(&PathBuf::from("/project/module.pyc"))); - - // A .pyc file in a subdirectory should be ignored - assert!(resolver.should_ignore(&PathBuf::from("/project/sub/module.pyc"))); - - // A .py file should not be ignored - assert!(!resolver.should_ignore(&PathBuf::from("/project/module.py"))); - } +#[cfg(feature = "native")] +pub use error::Error; +#[cfg(feature = "native")] +pub use native::{compute_sha256_file, Package, PackageSpec}; - #[tokio::test] - async fn test_normalize_path() { - let path = PathBuf::from(".") - .join("some") - .join("nested") - .join("path") - .join("to") - .join("file.txt"); - let normalized = normalize_path(&path).unwrap(); - assert_eq!(normalized, "some/nested/path/to/file.txt"); - } -} +#[cfg(feature = "wasm")] +mod wasm; diff --git a/crates/tower-package/src/native.rs b/crates/tower-package/src/native.rs new file mode 100644 index 00000000..e7a6d69a --- /dev/null +++ b/crates/tower-package/src/native.rs @@ -0,0 +1,471 @@ +use glob::glob; +use std::collections::{HashMap, VecDeque}; +use std::path::{Path, PathBuf}; +use std::pin::Pin; +use tmpdir::TmpDir; +use tokio::{ + fs::File, + io::{AsyncRead, AsyncReadExt, AsyncWriteExt, BufReader}, +}; +use tokio_tar::Archive; + +use async_compression::tokio::bufread::GzipDecoder; + +use tower_telemetry::debug; + +use crate::core::{ + build_package, compute_sha256_bytes, normalize_path, Entry, Manifest, PackageInputs, + CURRENT_PACKAGE_VERSION, +}; +use crate::error::Error; +use crate::towerfile::Towerfile; + +// PackageSpec describes how to build a package. Everything core needs (invoke, parameters, +// import_paths as manifest entries) is derived from the Towerfile on disk, so this struct only +// carries what the file resolver needs: where the Towerfile lives, what's considered the project +// root, which globs match app files, and which import paths to walk. +#[derive(Debug)] +pub struct PackageSpec { + pub towerfile_path: PathBuf, + + pub base_dir: PathBuf, + + pub file_globs: Vec, + + pub import_paths: Vec, +} + +impl PackageSpec { + pub fn from_towerfile(towerfile: &Towerfile) -> Self { + debug!("creating package spec from towerfile: {:?}", towerfile); + let towerfile_path = towerfile.file_path.clone(); + let base_dir = towerfile_path + .parent() + .unwrap_or_else(|| Path::new(".")) + .to_path_buf(); + + let import_paths = towerfile + .app + .import_paths + .iter() + .map(|p| p.to_string_lossy().to_string()) + .collect(); + + Self { + towerfile_path, + base_dir, + import_paths, + file_globs: towerfile.app.source.clone(), + } + } +} + +pub struct Package { + pub manifest: Manifest, + + // tmp_dir is used to keep the package directory around occasionally so the directory doesn't + // get deleted out from under the application. + pub tmp_dir: Option, + + // package_file_path is path to the packed file on disk. + pub package_file_path: Option, + + // unpacked_path is the path to the unpackaged package on disk. + pub unpacked_path: Option, +} + +impl Package { + pub fn default() -> Self { + Self { + tmp_dir: None, + package_file_path: None, + unpacked_path: None, + manifest: Manifest { + version: Some(CURRENT_PACKAGE_VERSION), + invoke: "".to_string(), + parameters: vec![], + schedule: None, + import_paths: vec![], + app_dir_name: "app".to_string(), + modules_dir_name: "modules".to_string(), + checksum: "".to_string(), + }, + } + } + + pub async fn from_unpacked_path(path: PathBuf) -> Result { + let manifest_path = path.join("MANIFEST"); + let mut file = File::open(&manifest_path).await?; + let mut contents = String::new(); + file.read_to_string(&mut contents).await?; + let manifest = Manifest::from_json(&contents)?; + + Ok(Self { + tmp_dir: None, + package_file_path: None, + unpacked_path: Some(path), + manifest, + }) + } + + // build creates a new package from a PackageSpec. PackageSpec is typically composed of fields + // copied from the Towerfile. The most important thing to know is that the collection of file + // globs to include in the package. + // + // The underlying package is just a TAR file with a special `MANIFEST` file that has also been + // GZip'd. + pub async fn build(spec: PackageSpec) -> Result { + debug!("building package from spec: {:?}", spec); + + // we canonicalize this because we want to treat all paths in the same keyspace more or + // less. + let base_dir = spec.base_dir.canonicalize()?; + + // Canonicalize import paths upfront so the resolver can whitelist files within them. + let canonical_import_paths: Vec = spec + .import_paths + .iter() + .map(|p| base_dir.join(p).canonicalize()) + .collect::, _>>()?; + + let resolver = FileResolver::new(base_dir.clone(), canonical_import_paths.clone()); + + // If the user didn't specify anything here we'll package everything under this directory. + let mut file_globs = spec.file_globs.clone(); + if file_globs.is_empty() { + debug!("no source files specified. using default paths."); + file_globs.push("./**/*".to_string()); + } + + // Resolve app file paths: physical -> logical (relative to base_dir or import parent). + let mut app_file_paths: HashMap = HashMap::new(); + for file_glob in file_globs { + let path = base_dir.join(file_glob); + resolver.resolve_glob(path, &mut app_file_paths).await?; + } + + let app_dir = PathBuf::from("app"); + let mut app_files: Vec = Vec::with_capacity(app_file_paths.len()); + for (physical_path, logical_path) in app_file_paths { + let archive_path = app_dir.join(logical_path); + let archive_name = normalize_path(&archive_path)?; + let bytes = tokio::fs::read(&physical_path).await?; + app_files.push(Entry { archive_name, bytes }); + } + + // Resolve modules. Archive names use the raw import_path basename so they stay in sync + // with the manifest entries core derives from the same Towerfile string. + let module_dir = PathBuf::from("modules"); + let mut module_files: Vec = Vec::new(); + + for (raw_import, canonical_import) in spec.import_paths.iter().zip(canonical_import_paths.iter()) { + let mut module_file_paths: HashMap = HashMap::new(); + resolver.resolve_path(canonical_import, &mut module_file_paths).await; + + let raw_basename = Path::new(raw_import) + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or(""); + let archive_prefix = module_dir.join(raw_basename); + + for (physical_path, _) in module_file_paths { + let rel = match physical_path.strip_prefix(canonical_import) { + Ok(p) => p, + Err(_) => continue, + }; + let archive_name = normalize_path(&archive_prefix.join(rel))?; + let bytes = tokio::fs::read(&physical_path).await?; + module_files.push(Entry { archive_name, bytes }); + } + } + + let towerfile_bytes = tokio::fs::read(&spec.towerfile_path).await?; + + let inputs = PackageInputs { + app_files, + module_files, + towerfile_bytes, + }; + + let built = build_package(inputs)?; + + let tmp_dir = TmpDir::new("tower-package").await?; + let package_path = tmp_dir.to_path_buf().join("package.tar"); + debug!("writing package to: {:?}", package_path); + + let mut file = File::create(&package_path).await?; + file.write_all(&built.bytes).await?; + file.shutdown().await?; + + Ok(Self { + manifest: built.manifest, + unpacked_path: None, + tmp_dir: Some(tmp_dir), + package_file_path: Some(package_path), + }) + } + + /// unpack is the primary interface in to unpacking a package. It will allocate a temporary + /// directory if one isn't already allocated and unpack the package contents into that location. + pub async fn unpack(&mut self) -> Result<(), Error> { + // If there's already a tmp_dir allocated to this package, then we'll use that. Otherwise, + // we allocate one and store it on this package for later use. + let path = if let Some(tmp_dir) = self.tmp_dir.as_ref() { + tmp_dir.to_path_buf() + } else { + let tmp_dir = TmpDir::new("tower-package").await?; + let path = tmp_dir.to_path_buf(); + self.tmp_dir = Some(tmp_dir); + path + }; + + // self.package_file_path should be set otherwise this is a bug. + let package_path = self.package_file_path.clone().unwrap(); + unpack_archive(&package_path, &path).await?; + self.unpacked_path = Some(path); + Ok(()) + } +} + +fn extract_glob_path(path: PathBuf) -> String { + let str = path.to_str().unwrap(); + + #[cfg(windows)] + { + // This is a nasty hack to get around a limitation in the `glob` crate on Windows. There's + // a (documented) bug that prevents it from globbing on canonicalized paths. + // + // See https://github.com/rust-lang/glob/issues/132 + str.strip_prefix(r"\\?\").ok_or(str).unwrap().to_string() + } + + #[cfg(not(windows))] + { + str.to_string() + } +} + +/// Check if a file is a valid gzip file by attempting to decompress it +async fn is_valid_gzip>(path: P) -> bool { + let file = match File::open(&path).await { + Ok(file) => file, + Err(_) => return false, + }; + + let reader = BufReader::new(file); + let mut decoder = GzipDecoder::new(reader); + + // Try to read a small amount of data. If we can, then we assume that it's a valid gzip file. + // Othwewise, it's not gzipped I suppose? + let mut buffer = [0u8; 1024]; + decoder.read(&mut buffer).await.is_ok() +} + +async fn unpack_archive>( + package_path: P, + output_path: P, +) -> Result<(), std::io::Error> { + let reader: Pin> = if is_valid_gzip(&package_path).await { + // gor gzipped files + let file = File::open(&package_path).await?; + let buf_reader = BufReader::new(file); + let decoder = GzipDecoder::new(buf_reader); + Box::pin(decoder) + } else { + // For regular files + let file = File::open(&package_path).await?; + Box::pin(file) + }; + + // Create and unpack the archive + let mut archive = Archive::new(reader); + archive.unpack(output_path).await?; + + Ok(()) +} + +fn is_in_dir(p: &PathBuf, dir: &str) -> bool { + let mut comps = p.components(); + comps.any(|comp| { + if let std::path::Component::Normal(name) = comp { + name == dir + } else { + false + } + }) +} + +fn is_file(p: &PathBuf, name: &str) -> bool { + if let Some(file_name) = p.file_name() { + file_name == name + } else { + false + } +} + +struct FileResolver { + // base_dir is the directory from which logical paths are computed. + base_dir: PathBuf, + + // import_paths are canonicalized paths to imported directories. Files within these directories + // are also allowed, with logical paths computed relative to each import path's parent. + import_paths: Vec, +} + +impl FileResolver { + fn new(base_dir: PathBuf, import_paths: Vec) -> Self { + Self { + base_dir, + import_paths, + } + } + + fn should_ignore(&self, p: &PathBuf) -> bool { + // Ignore anything that is compiled python + if p.extension().map(|ext| ext == "pyc").unwrap_or(false) { + return true; + } + + // Only exclude the root Towerfile (base_dir/Towerfile). Since base_dir is already + // canonicalized, we can derive this path directly. Towerfiles in sub-directories are + // legitimate app content and must be preserved. + if p == &self.base_dir.join("Towerfile") { + return true; + } + + // Ignore a .gitignore file + if is_file(p, ".gitignore") { + return true; + } + + // Remove anything thats __pycache__ + if is_in_dir(p, "__pycache__") { + return true; + } + + // Ignore anything that lives within a .git directory + if is_in_dir(p, ".git") { + return true; + } + + // Ignore anything that's in a virtualenv, too + if is_in_dir(p, ".venv") { + return true; + } + + false + } + + fn logical_path<'a>(&self, physical_path: &'a Path) -> Option<&'a Path> { + if let Ok(p) = physical_path.strip_prefix(&self.base_dir) { + return Some(p); + } + + // Try each import path's parent as a prefix. This allows files within import paths + // (which may live outside base_dir) to be resolved with logical paths that preserve + // the import directory name (e.g. "shared_lib/foo.py"). + for import_path in &self.import_paths { + if let Some(parent) = import_path.parent() { + if let Ok(p) = physical_path.strip_prefix(parent) { + return Some(p); + } + } + } + + None + } + + async fn resolve_glob( + &self, + path: PathBuf, + file_paths: &mut HashMap, + ) -> Result<(), Error> { + let path_str = extract_glob_path(path); + debug!("resolving glob pattern: {}", path_str); + + let entries = glob(&path_str).map_err(|e| Error::InvalidGlob { + message: format!("{}: {}", path_str, e), + })?; + + for entry in entries { + match entry { + Ok(path) => self.resolve_path(&path, file_paths).await, + Err(e) => { + debug!("skipping glob entry: {}", e); + } + } + } + + Ok(()) + } + + async fn resolve_path(&self, path: &PathBuf, file_paths: &mut HashMap) { + let mut queue = VecDeque::new(); + queue.push_back(path.to_path_buf()); + + while let Some(current_path) = queue.pop_front() { + let physical_path = match current_path.canonicalize() { + Ok(p) => p, + Err(e) => { + debug!(" - skipping path {}: {}", current_path.display(), e); + continue; + } + }; + + if physical_path.is_dir() { + let mut entries = tokio::fs::read_dir(&physical_path).await.unwrap(); + + while let Some(entry) = entries.next_entry().await.unwrap() { + queue.push_back(entry.path()); + } + } else { + if !self.should_ignore(&physical_path) { + let cp = physical_path.clone(); + match self.logical_path(&cp) { + None => { + debug!( + " - skipping file {}: not in base directory {}: ...", + physical_path.display(), + self.base_dir.display(), + ); + continue; + } + Some(logical_path) => { + debug!( + " - resolved path {} to logical path {}", + physical_path.display(), + logical_path.display() + ); + file_paths.insert(physical_path, logical_path.to_path_buf()); + } + } + } + } + } + } +} + +pub async fn compute_sha256_file(file_path: &PathBuf) -> Result { + let bytes = tokio::fs::read(file_path).await?; + Ok(compute_sha256_bytes(&bytes)) +} + +#[cfg(test)] +mod test { + use super::*; + use std::path::PathBuf; + + #[test] + fn test_should_ignore_pyc_files() { + let resolver = FileResolver::new(PathBuf::from("/project"), vec![]); + + // A .pyc file should be ignored + assert!(resolver.should_ignore(&PathBuf::from("/project/module.pyc"))); + + // A .pyc file in a subdirectory should be ignored + assert!(resolver.should_ignore(&PathBuf::from("/project/sub/module.pyc"))); + + // A .py file should not be ignored + assert!(!resolver.should_ignore(&PathBuf::from("/project/module.py"))); + } +} diff --git a/crates/config/src/towerfile.rs b/crates/tower-package/src/towerfile.rs similarity index 91% rename from crates/config/src/towerfile.rs rename to crates/tower-package/src/towerfile.rs index b0ab1d3c..e0de3b29 100644 --- a/crates/config/src/towerfile.rs +++ b/crates/tower-package/src/towerfile.rs @@ -1,8 +1,8 @@ -use crate::Error; +use crate::core::Error; use serde::{Deserialize, Serialize}; use std::path::PathBuf; -#[derive(Deserialize, Serialize, Debug)] +#[derive(Clone, Deserialize, Serialize, Debug)] pub struct Parameter { #[serde(default)] pub name: String, @@ -81,60 +81,65 @@ impl Towerfile { } } + /// set_parameter upserts a parameter by lookup name. If a parameter with the given name + /// exists, it is replaced. Otherwise, the parameter is appended. + pub fn set_parameter(&mut self, lookup_name: &str, param: Parameter) { + if let Some(existing) = self.parameters.iter_mut().find(|p| p.name == lookup_name) { + *existing = param; + } else { + self.parameters.push(param); + } + } + + /// remove_parameter removes a parameter by name, returning true if it was found + pub fn remove_parameter(&mut self, name: &str) -> bool { + let len_before = self.parameters.len(); + self.parameters.retain(|p| p.name != name); + self.parameters.len() < len_before + } +} + +#[cfg(feature = "native")] +impl Towerfile { /// from_path reads a Towerfile from a path and parses it as TOML content. - pub fn from_path(path: PathBuf) -> Result { + pub fn from_path(path: PathBuf) -> Result { + use crate::error::Error as OuterError; + if !path.exists() { - return Err(Error::MissingTowerfile); + return Err(OuterError::MissingTowerfile); } - let mut towerfile = Self::from_toml(&std::fs::read_to_string(path.to_path_buf())?)?; + let contents = + std::fs::read_to_string(&path).map_err(|source| OuterError::Io { source })?; + let mut towerfile = Self::from_toml(&contents)?; towerfile.file_path = path; Ok(towerfile) } /// from_local_file looks for a new, local Towerfile in the current working directory. - pub fn from_local_file() -> Result { + pub fn from_local_file() -> Result { Self::from_dir_str(".") } /// from_dir_str reads a Towerfile from a directory represented by a string. This is useful in /// the context of the `tower` CLI, where the user may specify a directory to read the /// Towerfile on the command line as an argument or whatever. - pub fn from_dir_str(dir: &str) -> Result { - let dir = PathBuf::from(dir); - let path = dir.join("Towerfile"); - - if !path.exists() { - Err(Error::MissingTowerfile) - } else { - Self::from_path(path) - } + pub fn from_dir_str(dir: &str) -> Result { + Self::from_path(PathBuf::from(dir).join("Towerfile")) } /// save writes the Towerfile as TOML to the specified path, defaulting to current dir - pub fn save(&self, path: Option<&std::path::Path>) -> Result<(), Error> { + pub fn save(&self, path: Option<&std::path::Path>) -> Result<(), crate::error::Error> { + use crate::error::Error as OuterError; + let target_path = path.unwrap_or_else(|| std::path::Path::new("Towerfile")); - std::fs::write(target_path, toml::to_string_pretty(self)?)?; + let serialized = toml::to_string_pretty(self).map_err(|err| OuterError::InvalidTowerfile { + message: err.to_string(), + })?; + std::fs::write(target_path, serialized).map_err(|source| OuterError::Io { source })?; Ok(()) } - - /// set_parameter upserts a parameter by lookup name. If a parameter with the given name - /// exists, it is replaced. Otherwise, the parameter is appended. - pub fn set_parameter(&mut self, lookup_name: &str, param: Parameter) { - if let Some(existing) = self.parameters.iter_mut().find(|p| p.name == lookup_name) { - *existing = param; - } else { - self.parameters.push(param); - } - } - - /// remove_parameter removes a parameter by name, returning true if it was found - pub fn remove_parameter(&mut self, name: &str) -> bool { - let len_before = self.parameters.len(); - self.parameters.retain(|p| p.name != name); - self.parameters.len() < len_before - } } #[cfg(test)] @@ -209,7 +214,7 @@ mod test { assert!(opt.is_some()); let err = opt.unwrap(); - assert!(matches!(err, crate::Error::MissingTowerfile)); + assert!(matches!(err, crate::error::Error::MissingTowerfile)); } #[test] diff --git a/crates/tower-package/src/wasm.rs b/crates/tower-package/src/wasm.rs new file mode 100644 index 00000000..0649f327 --- /dev/null +++ b/crates/tower-package/src/wasm.rs @@ -0,0 +1,58 @@ +use serde::Deserialize; +use crate::core::{build_package, Entry, PackageInputs}; +use wasm_bindgen::prelude::*; + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct JsEntry { + archive_name: String, + bytes: serde_bytes::ByteBuf, +} + +impl From for Entry { + fn from(e: JsEntry) -> Self { + Entry { + archive_name: e.archive_name, + bytes: e.bytes.into_vec(), + } + } +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct JsInputs { + app_files: Vec, + module_files: Vec, + towerfile_bytes: serde_bytes::ByteBuf, +} + +/// Build a Tower app package (gzipped tar) from in-memory file contents. +/// +/// Input shape (camelCase): +/// { +/// appFiles: [{ archiveName: string, bytes: Uint8Array }, ...], +/// moduleFiles: [{ archiveName: string, bytes: Uint8Array }, ...], +/// towerfileBytes: Uint8Array +/// } +/// +/// invoke, parameters, and import_paths in the manifest are derived from +/// towerfileBytes (parsed as TOML), so the caller cannot produce a package +/// whose manifest disagrees with the embedded Towerfile. +/// +/// Returns the gzipped tar archive as a Uint8Array, byte-identical across +/// runs for the same inputs. +#[wasm_bindgen(js_name = buildPackage)] +pub fn build_package_wasm(inputs: JsValue) -> Result, JsError> { + let js: JsInputs = serde_wasm_bindgen::from_value(inputs) + .map_err(|e| JsError::new(&format!("invalid inputs: {}", e)))?; + + let core_inputs = PackageInputs { + app_files: js.app_files.into_iter().map(Entry::from).collect(), + module_files: js.module_files.into_iter().map(Entry::from).collect(), + towerfile_bytes: js.towerfile_bytes.into_vec(), + }; + + let built = build_package(core_inputs) + .map_err(|e| JsError::new(&format!("build failed: {}", e)))?; + Ok(built.bytes) +} diff --git a/crates/tower-package/test/.gitignore b/crates/tower-package/test/.gitignore new file mode 100644 index 00000000..504afef8 --- /dev/null +++ b/crates/tower-package/test/.gitignore @@ -0,0 +1,2 @@ +node_modules/ +package-lock.json diff --git a/crates/tower-package/test/build.test.ts b/crates/tower-package/test/build.test.ts new file mode 100644 index 00000000..03376a32 --- /dev/null +++ b/crates/tower-package/test/build.test.ts @@ -0,0 +1,139 @@ +import { test } from "node:test"; +import assert from "node:assert/strict"; +import { gunzipSync } from "node:zlib"; + +import { + buildPackage, + type PackageInputs, +} from "../pkg/tower_package.js"; + +const enc = new TextEncoder(); +const dec = new TextDecoder(); + +interface TarEntry { + name: string; + data: Uint8Array; +} + +// Minimal ustar reader — enough to pull entry names and bodies out of the +// output. Not robust to long names, extensions, or PAX headers, which the +// builder never emits. +function parseTarEntries(data: Uint8Array): TarEntry[] { + const entries: TarEntry[] = []; + let offset = 0; + while (offset + 512 <= data.length) { + const header = data.subarray(offset, offset + 512); + if (header.every((b) => b === 0)) break; + + const name = dec.decode(header.subarray(0, 100)).replace(/\0.*$/, ""); + if (!name) break; + + const sizeOctal = dec + .decode(header.subarray(124, 136)) + .replace(/\0.*$/, "") + .trim(); + const size = parseInt(sizeOctal, 8); + + const body = data.subarray(offset + 512, offset + 512 + size); + entries.push({ name, data: body }); + + offset += 512 + Math.ceil(size / 512) * 512; + } + return entries; +} + +function minimalInputs(): PackageInputs { + return { + appFiles: [ + { archiveName: "app/main.py", bytes: enc.encode('print("hi")\n') }, + { archiveName: "app/helper.py", bytes: enc.encode("# helper\n") }, + ], + moduleFiles: [], + towerfileBytes: enc.encode('[app]\nname = "test"\nscript = "main.py"\n'), + }; +} + +function buildEntries(inputs: PackageInputs): TarEntry[] { + return parseTarEntries(gunzipSync(buildPackage(inputs))); +} + +function getManifest(entries: TarEntry[]): Record { + return JSON.parse(dec.decode(entries.find((e) => e.name === "MANIFEST")!.data)); +} + +test("returns a gzipped archive", () => { + const out = buildPackage(minimalInputs()); + assert.ok(out instanceof Uint8Array); + assert.equal(out[0], 0x1f); + assert.equal(out[1], 0x8b); +}); + +test("output is byte-deterministic across calls", () => { + const a = buildPackage(minimalInputs()); + const b = buildPackage(minimalInputs()); + assert.deepEqual(a, b); +}); + +test("entries are sorted by archive name with MANIFEST and Towerfile last", () => { + const entries = buildEntries(minimalInputs()); + assert.deepEqual( + entries.map((e) => e.name), + ["app/helper.py", "app/main.py", "MANIFEST", "Towerfile"], + ); +}); + +test("file contents round-trip through the archive", () => { + const entries = buildEntries(minimalInputs()); + const main = entries.find((e) => e.name === "app/main.py")!; + assert.equal(dec.decode(main.data), 'print("hi")\n'); +}); + +test("manifest matches the inputs", () => { + const manifest = getManifest(buildEntries(minimalInputs())); + assert.equal(manifest.version, 3); + assert.equal(manifest.invoke, "main.py"); + assert.equal(manifest.app_dir_name, "app"); + assert.equal(manifest.modules_dir_name, "modules"); + assert.equal(typeof manifest.checksum, "string"); + assert.equal((manifest.checksum as string).length, 64); +}); + +test("module files and import paths flow through", () => { + const inputs = minimalInputs(); + inputs.moduleFiles = [ + { + archiveName: "modules/shared/__init__.py", + bytes: enc.encode(""), + }, + { + archiveName: "modules/shared/util.py", + bytes: enc.encode("# util\n"), + }, + ]; + inputs.towerfileBytes = enc.encode( + '[app]\nname = "test"\nscript = "main.py"\nimport_paths = ["shared"]\n', + ); + + const entries = buildEntries(inputs); + const names = entries.map((e) => e.name); + assert.ok(names.includes("modules/shared/__init__.py")); + assert.ok(names.includes("modules/shared/util.py")); + + assert.deepEqual(getManifest(entries).import_paths, ["modules/shared"]); +}); + +test("different inputs produce different checksums", () => { + const other = minimalInputs(); + other.appFiles[0] = { + archiveName: "app/main.py", + bytes: enc.encode('print("bye")\n'), + }; + + const checksumA = getManifest(buildEntries(minimalInputs())).checksum; + const checksumB = getManifest(buildEntries(other)).checksum; + assert.notEqual(checksumA, checksumB); +}); + +test("invalid input shape throws", () => { + assert.throws(() => buildPackage({} as unknown as PackageInputs)); +}); diff --git a/crates/tower-package/test/package.json b/crates/tower-package/test/package.json new file mode 100644 index 00000000..ca3acf76 --- /dev/null +++ b/crates/tower-package/test/package.json @@ -0,0 +1,13 @@ +{ + "name": "tower-package-wasm-test", + "private": true, + "type": "module", + "scripts": { + "test": "tsx --test build.test.ts" + }, + "devDependencies": { + "@types/node": "^20.11.0", + "tsx": "^4.19.0", + "typescript": "^5.4.0" + } +} diff --git a/crates/tower-package/test/tsconfig.json b/crates/tower-package/test/tsconfig.json new file mode 100644 index 00000000..33b40dcb --- /dev/null +++ b/crates/tower-package/test/tsconfig.json @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "Bundler", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "noEmit": true, + "allowImportingTsExtensions": true + }, + "include": ["*.ts"] +} diff --git a/crates/tower-package/tests/package_test.rs b/crates/tower-package/tests/package_test.rs index 18aed248..2eeef5c1 100644 --- a/crates/tower-package/tests/package_test.rs +++ b/crates/tower-package/tests/package_test.rs @@ -10,19 +10,18 @@ use tokio::{ }; use tokio_stream::*; -use config::Towerfile; use tokio_tar::Archive; -use tower_package::{Manifest, Package, PackageSpec, Parameter}; +use tower_package::{Manifest, Package, PackageSpec, Towerfile}; use tower_telemetry::debug; - +const TRIVIAL_TOWERFILE: &str = "[app]\nname = \"test\"\nscript = \"main.py\"\n"; #[tokio::test] async fn it_creates_package() { let tmp_dir = TmpDir::new("example") .await .expect("Failed to create temp dir"); - create_test_file(tmp_dir.to_path_buf(), "Towerfile", "").await; + create_test_file(tmp_dir.to_path_buf(), "Towerfile", TRIVIAL_TOWERFILE).await; create_test_file(tmp_dir.to_path_buf(), "main.py", "print('Hello, world!')").await; create_test_file( tmp_dir.to_path_buf(), @@ -32,12 +31,9 @@ async fn it_creates_package() { .await; let spec = PackageSpec { - invoke: "main.py".to_string(), base_dir: tmp_dir.to_path_buf(), towerfile_path: tmp_dir.to_path_buf().join("Towerfile").to_path_buf(), file_globs: vec!["*.py".to_string()], - parameters: vec![], - schedule: None, import_paths: vec![], }; @@ -68,18 +64,15 @@ async fn it_respects_complex_file_globs() { let tmp_dir = TmpDir::new("example") .await .expect("Failed to create temp dir"); - create_test_file(tmp_dir.to_path_buf(), "Towerfile", "").await; + create_test_file(tmp_dir.to_path_buf(), "Towerfile", TRIVIAL_TOWERFILE).await; create_test_file(tmp_dir.to_path_buf(), "main.py", "print('Hello, world!')").await; create_test_file(tmp_dir.to_path_buf(), "pack/__init__.py", "").await; create_test_file(tmp_dir.to_path_buf(), "pack/pack.py", "").await; let spec = PackageSpec { - invoke: "main.py".to_string(), base_dir: tmp_dir.to_path_buf(), towerfile_path: tmp_dir.to_path_buf().join("Towerfile").to_path_buf(), file_globs: vec!["*.py".to_string(), "**/*.py".to_string()], - parameters: vec![], - schedule: Some("every 1 minute".to_string()), import_paths: vec![], }; @@ -87,10 +80,6 @@ async fn it_respects_complex_file_globs() { assert_eq!(package.manifest.version, Some(3)); assert_eq!(package.manifest.invoke, "main.py"); - assert_eq!( - package.manifest.schedule, - Some("every 1 minute".to_string()) - ); let package_file_path = package.package_file_path.clone().unwrap(); assert!(!package_file_path.as_os_str().is_empty()); @@ -119,18 +108,15 @@ async fn it_packages_all_files_by_default() { let tmp_dir = TmpDir::new("all-files-by-default") .await .expect("Failed to create temp dir"); - create_test_file(tmp_dir.to_path_buf(), "Towerfile", "").await; + create_test_file(tmp_dir.to_path_buf(), "Towerfile", TRIVIAL_TOWERFILE).await; create_test_file(tmp_dir.to_path_buf(), "main.py", "print('Hello, world!')").await; create_test_file(tmp_dir.to_path_buf(), "pack/__init__.py", "").await; create_test_file(tmp_dir.to_path_buf(), "pack/pack.py", "").await; let spec = PackageSpec { - invoke: "main.py".to_string(), base_dir: tmp_dir.to_path_buf(), towerfile_path: tmp_dir.to_path_buf().join("Towerfile").to_path_buf(), file_globs: vec![], - parameters: vec![], - schedule: Some("every 1 minute".to_string()), import_paths: vec![], }; @@ -167,19 +153,16 @@ async fn it_packages_directory_contents() { let tmp_dir = TmpDir::new("directory-contents") .await .expect("Failed to create temp dir"); - create_test_file(tmp_dir.to_path_buf(), "Towerfile", "").await; + create_test_file(tmp_dir.to_path_buf(), "Towerfile", TRIVIAL_TOWERFILE).await; create_test_file(tmp_dir.to_path_buf(), "main.py", "print('Hello, world!')").await; create_test_file(tmp_dir.to_path_buf(), "pack/__init__.py", "").await; create_test_file(tmp_dir.to_path_buf(), "pack/pack.py", "").await; create_test_file(tmp_dir.to_path_buf(), "pack/submodule/pack.py", "").await; let spec = PackageSpec { - invoke: "main.py".to_string(), base_dir: tmp_dir.to_path_buf(), towerfile_path: tmp_dir.to_path_buf().join("Towerfile").to_path_buf(), file_globs: vec!["main.py".to_string(), "pack".to_string()], - parameters: vec![], - schedule: Some("every 1 minute".to_string()), import_paths: vec![], }; @@ -221,7 +204,12 @@ async fn it_packages_import_paths() { let tmp_dir = TmpDir::new("example") .await .expect("Failed to create temp dir"); - create_test_file(tmp_dir.to_path_buf(), "app/Towerfile", "").await; + create_test_file( + tmp_dir.to_path_buf(), + "app/Towerfile", + "[app]\nname = \"test\"\nscript = \"main.py\"\nimport_paths = [\"../shared\"]\n", + ) + .await; create_test_file( tmp_dir.to_path_buf(), "app/main.py", @@ -232,7 +220,6 @@ async fn it_packages_import_paths() { create_test_file(tmp_dir.to_path_buf(), "shared/module/test.py", "").await; let spec = PackageSpec { - invoke: "main.py".to_string(), base_dir: tmp_dir.to_path_buf().join("app"), towerfile_path: tmp_dir .to_path_buf() @@ -240,8 +227,6 @@ async fn it_packages_import_paths() { .join("Towerfile") .to_path_buf(), file_globs: vec!["**/*.py".to_string()], - parameters: vec![], - schedule: None, import_paths: vec!["../shared".to_string()], }; @@ -249,7 +234,6 @@ async fn it_packages_import_paths() { assert_eq!(package.manifest.version, Some(3)); assert_eq!(package.manifest.invoke, "main.py"); - assert_eq!(package.manifest.schedule, None); let files = read_package_files(package).await; @@ -276,7 +260,6 @@ async fn it_packages_import_paths() { // Let's decode the manifest and make sure import paths are set correctly. let manifest = Manifest::from_json(files.get("MANIFEST").unwrap()) - .await .expect("Manifest was not valid JSON"); // Archive paths are always normalized to forward slashes regardless of OS. @@ -301,18 +284,20 @@ async fn it_packages_import_paths_nested_within_base_dir() { let tmp_dir = TmpDir::new("nested-import") .await .expect("Failed to create temp dir"); - create_test_file(tmp_dir.to_path_buf(), "Towerfile", "").await; + create_test_file( + tmp_dir.to_path_buf(), + "Towerfile", + "[app]\nname = \"test\"\nscript = \"main.py\"\nimport_paths = [\"libs/shared\"]\n", + ) + .await; create_test_file(tmp_dir.to_path_buf(), "main.py", "print('Hello')").await; create_test_file(tmp_dir.to_path_buf(), "libs/shared/__init__.py", "").await; create_test_file(tmp_dir.to_path_buf(), "libs/shared/util.py", "# util").await; let spec = PackageSpec { - invoke: "main.py".to_string(), base_dir: tmp_dir.to_path_buf(), towerfile_path: tmp_dir.to_path_buf().join("Towerfile"), file_globs: vec!["main.py".to_string()], - parameters: vec![], - schedule: None, import_paths: vec!["libs/shared".to_string()], }; @@ -339,7 +324,6 @@ async fn it_packages_import_paths_nested_within_base_dir() { // Verify the manifest import_paths entry matches the actual package structure. let manifest = Manifest::from_json(files.get("MANIFEST").unwrap()) - .await .expect("Manifest was not valid JSON"); assert!( @@ -354,7 +338,7 @@ async fn it_excludes_various_content_that_should_not_be_there() { let tmp_dir = TmpDir::new("example") .await .expect("Failed to create temp dir"); - create_test_file(tmp_dir.to_path_buf(), "Towerfile", "").await; + create_test_file(tmp_dir.to_path_buf(), "Towerfile", TRIVIAL_TOWERFILE).await; create_test_file(tmp_dir.to_path_buf(), "main.py", "print('Hello, world!')").await; create_test_file( tmp_dir.to_path_buf(), @@ -377,12 +361,9 @@ async fn it_excludes_various_content_that_should_not_be_there() { create_test_file(tmp_dir.to_path_buf(), ".git/some-file", "").await; let spec = PackageSpec { - invoke: "main.py".to_string(), base_dir: tmp_dir.to_path_buf(), towerfile_path: tmp_dir.to_path_buf().join("Towerfile").to_path_buf(), file_globs: vec![], - parameters: vec![], - schedule: None, import_paths: vec![], }; @@ -417,15 +398,12 @@ async fn building_package_spec_from_towerfile() { "#; let mut towerfile = Towerfile::from_toml(toml).unwrap(); - - // we have to set the file_path on the Towerfile otherwise we can't build a package spec from - // it. towerfile.file_path = PathBuf::from("./Towerfile"); let spec = PackageSpec::from_towerfile(&towerfile); - assert_eq!(spec.invoke, "./script.py"); - assert_eq!(spec.schedule, Some("0 0 * * *".to_string())); + assert_eq!(spec.file_globs, vec!["*.py".to_string()]); + assert_eq!(spec.towerfile_path, PathBuf::from("./Towerfile")); } #[tokio::test] @@ -438,7 +416,12 @@ async fn it_includes_subapp_towerfiles_but_excludes_root_towerfile() { .expect("Failed to create temp dir"); // Root app files - create_test_file(tmp_dir.to_path_buf(), "Towerfile", "[app]\nname = \"root\"").await; + create_test_file( + tmp_dir.to_path_buf(), + "Towerfile", + "[app]\nname = \"root\"\nscript = \"main.py\"\n", + ) + .await; create_test_file(tmp_dir.to_path_buf(), "main.py", "print('Hello, world!')").await; // Sub-app with its own Towerfile @@ -446,12 +429,9 @@ async fn it_includes_subapp_towerfiles_but_excludes_root_towerfile() { create_test_file(tmp_dir.to_path_buf(), "subapp/main.py", "print('subapp')").await; let spec = PackageSpec { - invoke: "main.py".to_string(), base_dir: tmp_dir.to_path_buf(), towerfile_path: tmp_dir.to_path_buf().join("Towerfile"), file_globs: vec![], - parameters: vec![], - schedule: None, import_paths: vec![], }; @@ -497,29 +477,29 @@ async fn it_includes_hidden_parameters_in_manifest() { let tmp_dir = TmpDir::new("hidden-params") .await .expect("Failed to create temp dir"); - create_test_file(tmp_dir.to_path_buf(), "Towerfile", "").await; + let towerfile = r#"[app] +name = "test" +script = "main.py" + +[[parameters]] +name = "visible_param" +description = "A visible parameter" +default = "" +hidden = false + +[[parameters]] +name = "hidden_param" +description = "A hidden parameter" +default = "secret" +hidden = true +"#; + create_test_file(tmp_dir.to_path_buf(), "Towerfile", towerfile).await; create_test_file(tmp_dir.to_path_buf(), "main.py", "print('Hello, world!')").await; let spec = PackageSpec { - invoke: "main.py".to_string(), base_dir: tmp_dir.to_path_buf(), towerfile_path: tmp_dir.to_path_buf().join("Towerfile").to_path_buf(), file_globs: vec!["*.py".to_string()], - parameters: vec![ - Parameter { - name: "visible_param".to_string(), - description: Some("A visible parameter".to_string()), - default: "".to_string(), - hidden: false, - }, - Parameter { - name: "hidden_param".to_string(), - description: Some("A hidden parameter".to_string()), - default: "secret".to_string(), - hidden: true, - }, - ], - schedule: None, import_paths: vec![], }; @@ -527,7 +507,6 @@ async fn it_includes_hidden_parameters_in_manifest() { let files = read_package_files(package).await; let manifest = Manifest::from_json(files.get("MANIFEST").unwrap()) - .await .expect("Manifest was not valid JSON"); assert_eq!(manifest.parameters.len(), 2); diff --git a/crates/tower-package/types.d.ts b/crates/tower-package/types.d.ts new file mode 100644 index 00000000..03057e46 --- /dev/null +++ b/crates/tower-package/types.d.ts @@ -0,0 +1,23 @@ +export interface PackageEntry { + archiveName: string; + bytes: Uint8Array; +} + +export interface PackageInputs { + appFiles: PackageEntry[]; + moduleFiles: PackageEntry[]; + towerfileBytes: Uint8Array; +} + +/** + * Build a Tower app package (gzipped tar) from in-memory file contents. + * + * invoke, parameters, and import paths in the manifest are derived from + * towerfileBytes, so the caller cannot produce a package whose manifest + * disagrees with the embedded Towerfile. + * + * Output is byte-identical across runs for the same inputs: entries are + * sorted by archiveName, tar headers are normalized (zero mtime/uid/gid, + * mode 0644), and the gzip header embeds no mtime. + */ +export function buildPackage(inputs: PackageInputs): Uint8Array; diff --git a/flake.lock b/flake.lock index e163b99b..0bfec00f 100644 --- a/flake.lock +++ b/flake.lock @@ -48,11 +48,11 @@ ] }, "locked": { - "lastModified": 1752689277, - "narHash": "sha256-uldUBFkZe/E7qbvxa3mH1ItrWZyT6w1dBKJQF/3ZSsc=", + "lastModified": 1776200608, + "narHash": "sha256-broZ6RFQr4Fv0wT73gGmzNX14A43TmTFF8g4wDKlNss=", "owner": "nix-community", "repo": "naersk", - "rev": "0e72363d0938b0208d6c646d10649164c43f4d64", + "rev": "8b23250ab45c2a38cd91031aee26478ca4d0a28e", "type": "github" }, "original": { @@ -63,11 +63,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1755186698, - "narHash": "sha256-wNO3+Ks2jZJ4nTHMuks+cxAiVBGNuEBXsT29Bz6HASo=", + "lastModified": 1776548001, + "narHash": "sha256-ZSK0NL4a1BwVbbTBoSnWgbJy9HeZFXLYQizjb2DPF24=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "fbcf476f790d8a217c3eab4e12033dc4a0f6d23c", + "rev": "b12141ef619e0a9c1c84dc8c684040326f27cdcc", "type": "github" }, "original": { @@ -109,11 +109,11 @@ ] }, "locked": { - "lastModified": 1755571033, - "narHash": "sha256-V8gmZBfMiFGCyGJQx/yO81LFJ4d/I5Jxs2id96rLxrM=", + "lastModified": 1776741231, + "narHash": "sha256-k9G98qzn+7npROUaks8VqCFm7cFtEG8ulQLBBo5lItg=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "95487740bb7ac11553445e9249041a6fa4b5eccf", + "rev": "02061303f7c4c964f7b4584dabd9e985b4cd442b", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index d3fdb424..29ab86e4 100644 --- a/flake.nix +++ b/flake.nix @@ -63,7 +63,9 @@ isMuslTarget = target: target == "x86_64-unknown-linux-musl" || target == "aarch64-unknown-linux-musl"; - rustToolchain = pkgs.rust-bin.stable.latest.default; + rustToolchain = pkgs.rust-bin.stable.latest.default.override { + targets = [ "wasm32-unknown-unknown" ]; + }; python = pkgs.python312; naersk-native = naersk.lib.${system}.override { @@ -261,6 +263,9 @@ behave pkg-config openssl + wasm-pack + wasm-bindgen-cli + binaryen ]; buildInputs = commonBuildInputs; diff --git a/tests/tower/test_build_package.py b/tests/tower/test_build_package.py index ab3b162b..6aeb2c88 100644 --- a/tests/tower/test_build_package.py +++ b/tests/tower/test_build_package.py @@ -126,23 +126,6 @@ def test_nested_source_files(self, tmp_path): assert "app/pkg/__init__.py" in entries assert "app/pkg/module.py" in entries - def test_manifest_contains_schedule(self, tmp_path): - towerfile = """\ -[app] -name = "scheduled-app" -script = "job.py" -source = ["*.py"] -schedule = "0 0 * * *" -""" - app_dir = _make_app(tmp_path, towerfile, {"job.py": "print('run')"}) - output = str(tmp_path / "out.tar.gz") - - tower.packages.build_package(app_dir, output) - - entries = _read_package(output) - manifest = json.loads(entries["MANIFEST"]) - assert manifest["schedule"] == "0 0 * * *" - def test_manifest_contains_parameters(self, tmp_path): towerfile = """\ [app] From 21e3b20ae5f320a6f5fc9f5f5c22fbd67555bda3 Mon Sep 17 00:00:00 2001 From: Ben Lovell Date: Wed, 22 Apr 2026 15:48:16 +0200 Subject: [PATCH 7/7] feat: publish multi-arch Docker image on release (#254) --- .github/workflows/publish-docker.yml | 44 ++++++++++++++++++++++++++++ .github/workflows/release.yml | 23 ++++++--------- Dockerfile | 18 ++++++++++++ dist-workspace.toml | 4 +-- 4 files changed, 73 insertions(+), 16 deletions(-) create mode 100644 .github/workflows/publish-docker.yml create mode 100644 Dockerfile diff --git a/.github/workflows/publish-docker.yml b/.github/workflows/publish-docker.yml new file mode 100644 index 00000000..e47b062e --- /dev/null +++ b/.github/workflows/publish-docker.yml @@ -0,0 +1,44 @@ +# Publish a release Docker image to GHCR. +# +# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish job +# within `cargo-dist`. Runs after `host`, so the GitHub Release and its binary artifacts exist; +# the Dockerfile fetches the prebuilt musl binary from the release at image-build time. + +name: "[tower] Publish Docker image" + +on: + workflow_call: + inputs: + plan: + required: true + type: string + +jobs: + docker-publish: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + steps: + - uses: actions/checkout@v6 + - uses: docker/setup-buildx-action@v3 + - uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Build and push + env: + TAG: ${{ fromJson(inputs.plan).announcement_tag }} + PRERELEASE: ${{ fromJson(inputs.plan).announcement_is_prerelease }} + run: | + VERSION="${TAG#v}" + TAGS=(--tag "ghcr.io/tower/tower-cli:$VERSION") + if [ "$PRERELEASE" != "true" ]; then + TAGS+=(--tag "ghcr.io/tower/tower-cli:latest") + fi + docker buildx build \ + --platform linux/amd64,linux/arm64 \ + --build-arg VERSION="$VERSION" \ + "${TAGS[@]}" \ + --push . diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 15429ade..d4cecc44 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -312,20 +312,15 @@ jobs: plan: ${{ needs.plan.outputs.val }} secrets: inherit - announce: + custom-publish-docker: needs: - plan - host - - custom-publish-pypi - # use "always() && ..." to allow us to wait for all publish jobs while - # still allowing individual publish jobs to skip themselves (for prereleases). - # "host" however must run to completion, no skipping allowed! - if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') }} - runs-on: "ubuntu-22.04" - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - steps: - - uses: actions/checkout@v6 - with: - persist-credentials: false - submodules: recursive + if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }} + uses: ./.github/workflows/publish-docker.yml + with: + plan: ${{ needs.plan.outputs.val }} + secrets: inherit + permissions: + "contents": "read" + "packages": "write" diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..46368135 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,18 @@ +FROM --platform=$BUILDPLATFORM alpine:3 AS fetch +ARG TARGETARCH +ARG VERSION +RUN apk add --no-cache curl tar xz \ + && case "$TARGETARCH" in \ + amd64) ARCH=x86_64 ;; \ + arm64) ARCH=aarch64 ;; \ + *) echo "unsupported TARGETARCH: $TARGETARCH" >&2; exit 1 ;; \ + esac \ + && curl -fsSL -o /tmp/tower.tar.xz \ + "https://github.com/tower/tower-cli/releases/download/v${VERSION}/tower-${ARCH}-unknown-linux-musl.tar.xz" \ + && mkdir -p /out \ + && tar -xJf /tmp/tower.tar.xz -C /out --strip-components=1 \ + && chmod +x /out/tower + +FROM gcr.io/distroless/static-debian12 +COPY --from=fetch /out/tower /usr/local/bin/tower +ENTRYPOINT ["/usr/local/bin/tower"] diff --git a/dist-workspace.toml b/dist-workspace.toml index 646bc15f..f3e372dd 100644 --- a/dist-workspace.toml +++ b/dist-workspace.toml @@ -12,11 +12,11 @@ installers = ["shell", "homebrew", "msi"] # A GitHub repo to push Homebrew formulas to tap = "tower/tower-cli" # Target platforms to build apps for (Rust target-triple syntax) -targets = ["aarch64-apple-darwin", "aarch64-unknown-linux-gnu", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-unknown-linux-musl"] +targets = ["aarch64-apple-darwin", "aarch64-unknown-linux-gnu", "aarch64-unknown-linux-musl", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-unknown-linux-musl"] # Path that installers should place binaries in install-path = "CARGO_HOME" # Publish jobs to run in CI -publish-jobs = ["./publish-pypi"] +publish-jobs = ["./publish-pypi", "./publish-docker"] # Whether to install an updater program install-updater = false # Whether dist should create a Github Release or use an existing draft