Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ curl -X POST localhost:4000/api/agents/dev/my-skill/run \

```bash
skrun init my-agent
skrun init my-agent --provider google
cd my-agent
# Creates SKILL.md (instructions) + agent.yaml (config)
```
Expand Down
2 changes: 2 additions & 0 deletions docs/cli.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ Create a new Skrun agent.
```bash
skrun init [dir]
skrun init my-agent
skrun init my-agent --provider google
skrun init --from-skill ./existing-skill
```

Expand All @@ -17,6 +18,7 @@ skrun init --from-skill ./existing-skill
| `--force` | Overwrite existing files |
| `--name <name>` | Agent name (non-interactive) |
| `--description <desc>` | Agent description (non-interactive) |
| `--provider <provider>` | Provider with a default model: `anthropic`, `openai`, `google`, `mistral`, `groq` |
| `--model <provider/name>` | Model (non-interactive) |
| `--namespace <ns>` | Namespace (non-interactive) |

Expand Down
39 changes: 39 additions & 0 deletions packages/cli/src/commands/init.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import { describe, expect, it, vi } from "vitest";
import * as prompts from "../utils/prompts.js";
import { resolveInitModel } from "./init.js";

describe("resolveInitModel", () => {
it("uses the provider default model without prompting", async () => {
const askModelSpy = vi.spyOn(prompts, "askModel");

await expect(resolveInitModel({ provider: "google" })).resolves.toEqual({
provider: "google",
name: "gemini-2.5-flash",
});
expect(askModelSpy).not.toHaveBeenCalled();
});

it("prefers an explicit model over the provider flag", async () => {
const askModelSpy = vi.spyOn(prompts, "askModel");

await expect(
resolveInitModel({ provider: "google", model: "openai/gpt-4o-mini" }),
).resolves.toEqual({
provider: "openai",
name: "gpt-4o-mini",
});
expect(askModelSpy).not.toHaveBeenCalled();
});

it("falls back to the interactive model prompt", async () => {
const askModelSpy = vi
.spyOn(prompts, "askModel")
.mockResolvedValue({ provider: "anthropic", name: "claude-sonnet-4-20250514" });

await expect(resolveInitModel({})).resolves.toEqual({
provider: "anthropic",
name: "claude-sonnet-4-20250514",
});
expect(askModelSpy).toHaveBeenCalledTimes(1);
});
});
46 changes: 32 additions & 14 deletions packages/cli/src/commands/init.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import { existsSync, mkdirSync, writeFileSync } from "node:fs";
import { basename, join, resolve } from "node:path";
import { AgentConfigSchema, serializeAgentYaml } from "@skrun-dev/schema";
import type { Command } from "commander";
import { type Command, Option } from "commander";
import * as format from "../utils/format.js";
import { askModel, askText } from "../utils/prompts.js";
import { DEFAULT_MODELS_BY_PROVIDER, askModel, askText } from "../utils/prompts.js";
import { initFromSkill } from "./init-from-skill.js";

const SKILL_MD_TEMPLATE = (name: string, description: string) => `---
Expand Down Expand Up @@ -31,6 +31,12 @@ export function registerInitCommand(program: Command): void {
.option("--force", "Overwrite existing files")
.option("--name <name>", "Agent name (non-interactive)")
.option("--description <desc>", "Agent description (non-interactive)")
.addOption(
new Option(
"--provider <provider>",
"Provider with a default model (non-interactive)",
).choices(Object.keys(DEFAULT_MODELS_BY_PROVIDER)),
)
.option("--model <model>", "Model as provider/name (non-interactive)")
.option("--namespace <ns>", "Agent namespace (non-interactive)")
.action(async (dir: string | undefined, opts) => {
Expand All @@ -46,10 +52,32 @@ interface InitOptions {
force?: boolean;
name?: string;
description?: string;
provider?: keyof typeof DEFAULT_MODELS_BY_PROVIDER;
model?: string;
namespace?: string;
}

export async function resolveInitModel(
opts: Pick<InitOptions, "model" | "provider">,
): Promise<{ provider: string; name: string }> {
if (opts.model) {
const parts = opts.model.split("/");
return {
provider: parts[0],
name: parts.slice(1).join("/"),
};
}

if (opts.provider) {
return {
provider: opts.provider,
name: DEFAULT_MODELS_BY_PROVIDER[opts.provider],
};
}

return askModel();
}

async function runInit(dir: string | undefined, opts: InitOptions): Promise<void> {
const targetDir = dir ? resolve(dir) : process.cwd();
const dirName = basename(targetDir);
Expand All @@ -66,17 +94,7 @@ async function runInit(dir: string | undefined, opts: InitOptions): Promise<void
const description =
opts.description ?? (await askText("Description?", `A Skrun agent for ${name}.`));

let provider: string;
let modelName: string;
if (opts.model) {
const parts = opts.model.split("/");
provider = parts[0];
modelName = parts.slice(1).join("/");
} else {
const model = await askModel();
provider = model.provider;
modelName = model.name;
}
const model = await resolveInitModel(opts);

const namespace = opts.namespace ?? (await askText("Namespace?", "my"));

Expand All @@ -92,7 +110,7 @@ async function runInit(dir: string | undefined, opts: InitOptions): Promise<void
const config = AgentConfigSchema.parse({
name: `${namespace}/${name}`,
version: "1.0.0",
model: { provider, name: modelName },
model,
inputs: [{ name: "query", type: "string", required: true }],
outputs: [{ name: "result", type: "string" }],
});
Expand Down
17 changes: 13 additions & 4 deletions packages/cli/src/utils/prompts.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,19 @@
import * as p from "@clack/prompts";

export const DEFAULT_MODELS_BY_PROVIDER = {
anthropic: "claude-sonnet-4-20250514",
openai: "gpt-4o",
google: "gemini-2.5-flash",
mistral: "mistral-large-latest",
groq: "llama-3.3-70b-versatile",
} as const;

const MODEL_OPTIONS = [
{ value: "anthropic/claude-sonnet-4-20250514", label: "Anthropic — claude-sonnet-4" },
{ value: "openai/gpt-4o", label: "OpenAI — gpt-4o" },
{ value: "mistral/mistral-large-latest", label: "Mistral — mistral-large" },
{ value: "groq/llama-3.3-70b-versatile", label: "Groq — llama-3.3-70b" },
{ value: "anthropic/claude-sonnet-4-20250514", label: "Anthropic - claude-sonnet-4" },
{ value: "openai/gpt-4o", label: "OpenAI - gpt-4o" },
{ value: "google/gemini-2.5-flash", label: "Google - gemini-2.5-flash" },
{ value: "mistral/mistral-large-latest", label: "Mistral - mistral-large" },
{ value: "groq/llama-3.3-70b-versatile", label: "Groq - llama-3.3-70b" },
] as const;

function handleCancel(value: unknown): asserts value is string {
Expand Down