From dec2cca9fad0de7a93a70d6a3b430a0a3af8690e Mon Sep 17 00:00:00 2001 From: Sam Markowitz Date: Thu, 9 Apr 2026 09:48:36 +0300 Subject: [PATCH] docs: update InvokeLLM model options to match backend Adds gpt_5_4 and renames gemini_3_pro to gemini_3_1_pro in InvokeLLMParams to match the RuntimeModel enum in the backend. Also documents the node_modules symlink approach for running create-docs from a git worktree. Co-Authored-By: Claude Sonnet 4.6 --- .../sdk-docs-writing/references/pipeline-config.md | 14 ++++++++++++++ src/modules/integrations.types.ts | 4 ++-- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/.claude/skills/sdk-docs-writing/references/pipeline-config.md b/.claude/skills/sdk-docs-writing/references/pipeline-config.md index 1eef266..6ad5fdc 100644 --- a/.claude/skills/sdk-docs-writing/references/pipeline-config.md +++ b/.claude/skills/sdk-docs-writing/references/pipeline-config.md @@ -8,6 +8,20 @@ cd docs mint dev ``` +## Running in a git worktree + +Git worktrees don't have their own `node_modules`. Before running `npm run create-docs` from a worktree, symlink in the main repo's `node_modules`: + +```bash +# If a partial node_modules exists (e.g. from a failed npm install), remove it first +rm -rf /path/to/worktree/javascript-sdk/node_modules + +ln -s /Users/samm/Projects/base44-workspace/javascript-sdk/node_modules \ + /path/to/worktree/javascript-sdk/node_modules +``` + +Then `npm run create-docs` works normally from the worktree. + ## Pipeline configuration files | File | Purpose | diff --git a/src/modules/integrations.types.ts b/src/modules/integrations.types.ts index 58e493d..ad2c977 100644 --- a/src/modules/integrations.types.ts +++ b/src/modules/integrations.types.ts @@ -48,9 +48,9 @@ export interface InvokeLLMParams { prompt: string; /** Optionally specify a model to override the app-level model setting for this specific call. * - * Options: `"gpt_5_mini"`, `"gemini_3_flash"`, `"gpt_5"`, `"gemini_3_pro"`, `"claude_sonnet_4_6"`, `"claude_opus_4_6"` + * Options: `"gpt_5_mini"`, `"gemini_3_flash"`, `"gpt_5"`, `"gpt_5_4"`, `"gemini_3_1_pro"`, `"claude_sonnet_4_6"`, `"claude_opus_4_6"` */ - model?: 'gpt_5_mini' | 'gemini_3_flash' | 'gpt_5' | 'gemini_3_pro' | 'claude_sonnet_4_6' | 'claude_opus_4_6'; + model?: 'gpt_5_mini' | 'gemini_3_flash' | 'gpt_5' | 'gpt_5_4' | 'gemini_3_1_pro' | 'claude_sonnet_4_6' | 'claude_opus_4_6'; /** If set to `true`, the LLM will use Google Search, Maps, and News to gather real-time context before answering. * @default false */