Skip to content

Commit 7f369c5

Browse files
committed
fix(ci): 修复桌面 CI 构建 - 纳入 lib 模块并移除无效 action 参数
Made-with: Cursor
1 parent 335064f commit 7f369c5

5 files changed

Lines changed: 290 additions & 2 deletions

File tree

.github/workflows/build-desktop-ci.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,4 +51,3 @@ jobs:
5151
with:
5252
projectPath: desktop
5353
args: ${{ matrix.args }}
54-
uploadWorkflowArtifacts: true

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,8 @@ dist/
1010
downloads/
1111
eggs/
1212
.eggs/
13-
lib/
13+
# 仅忽略仓库根目录的 lib(Python 等),不忽略 desktop/src/lib(前端类型与配置)
14+
/lib/
1415
lib64/
1516
parts/
1617
sdist/

desktop/src/lib/apiConfig.ts

Lines changed: 101 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,101 @@
1+
const STORAGE_KEY = "comsol-agent-api-config";
2+
3+
export type LLMBackendId = "deepseek" | "kimi" | "ollama" | "openai-compatible";
4+
5+
export interface ApiConfig {
6+
preferred_backend: LLMBackendId | null;
7+
deepseek_api_key: string;
8+
deepseek_model: string;
9+
kimi_api_key: string;
10+
kimi_model: string;
11+
openai_compatible_base_url: string;
12+
openai_compatible_api_key: string;
13+
openai_compatible_model: string;
14+
ollama_url: string;
15+
ollama_model: string;
16+
comsol_jar_path: string;
17+
}
18+
19+
const defaultConfig: ApiConfig = {
20+
preferred_backend: null,
21+
deepseek_api_key: "",
22+
deepseek_model: "deepseek-reasoner",
23+
kimi_api_key: "",
24+
kimi_model: "moonshot-v1-8k",
25+
openai_compatible_base_url: "",
26+
openai_compatible_api_key: "",
27+
openai_compatible_model: "gpt-3.5-turbo",
28+
ollama_url: "http://localhost:11434",
29+
ollama_model: "llama3",
30+
comsol_jar_path: "",
31+
};
32+
33+
export function loadApiConfig(): ApiConfig {
34+
try {
35+
const raw = localStorage.getItem(STORAGE_KEY);
36+
if (raw) {
37+
const parsed = JSON.parse(raw) as Partial<ApiConfig>;
38+
return { ...defaultConfig, ...parsed };
39+
}
40+
} catch (_) {}
41+
return { ...defaultConfig };
42+
}
43+
44+
export function saveApiConfig(config: ApiConfig): void {
45+
try {
46+
localStorage.setItem(STORAGE_KEY, JSON.stringify(config));
47+
} catch (_) {}
48+
}
49+
50+
/** 转为 .env 风格的键值对,供后端 config_save 使用 */
51+
export function apiConfigToEnv(config: ApiConfig): Record<string, string> {
52+
const env: Record<string, string> = {};
53+
if (config.preferred_backend) env.LLM_BACKEND = config.preferred_backend;
54+
if (config.deepseek_api_key) env.DEEPSEEK_API_KEY = config.deepseek_api_key;
55+
if (config.deepseek_model) env.DEEPSEEK_MODEL = config.deepseek_model;
56+
if (config.kimi_api_key) env.KIMI_API_KEY = config.kimi_api_key;
57+
if (config.kimi_model) env.KIMI_MODEL = config.kimi_model;
58+
if (config.openai_compatible_base_url)
59+
env.OPENAI_COMPATIBLE_BASE_URL = config.openai_compatible_base_url;
60+
if (config.openai_compatible_api_key)
61+
env.OPENAI_COMPATIBLE_API_KEY = config.openai_compatible_api_key;
62+
if (config.openai_compatible_model)
63+
env.OPENAI_COMPATIBLE_MODEL = config.openai_compatible_model;
64+
if (config.ollama_url) env.OLLAMA_URL = config.ollama_url;
65+
if (config.ollama_model) env.OLLAMA_MODEL = config.ollama_model;
66+
if (config.comsol_jar_path) env.COMSOL_JAR_PATH = config.comsol_jar_path;
67+
return env;
68+
}
69+
70+
/** 根据当前后端从 config 中取出 API 相关 payload,供 run/plan 请求使用 */
71+
export function getPayloadFromConfig(
72+
backend: string | null,
73+
config: ApiConfig
74+
): Record<string, unknown> {
75+
const payload: Record<string, unknown> = {};
76+
switch (backend) {
77+
case "deepseek":
78+
if (config.deepseek_api_key) payload.api_key = config.deepseek_api_key;
79+
if (config.deepseek_model) payload.model = config.deepseek_model;
80+
break;
81+
case "kimi":
82+
if (config.kimi_api_key) payload.api_key = config.kimi_api_key;
83+
if (config.kimi_model) payload.model = config.kimi_model;
84+
break;
85+
case "openai-compatible":
86+
if (config.openai_compatible_base_url)
87+
payload.base_url = config.openai_compatible_base_url;
88+
if (config.openai_compatible_api_key)
89+
payload.api_key = config.openai_compatible_api_key;
90+
if (config.openai_compatible_model)
91+
payload.model = config.openai_compatible_model;
92+
break;
93+
case "ollama":
94+
if (config.ollama_url) payload.ollama_url = config.ollama_url;
95+
if (config.ollama_model) payload.model = config.ollama_model;
96+
break;
97+
default:
98+
break;
99+
}
100+
return payload;
101+
}
Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
import type { Conversation } from "./types";
2+
import type { ChatMessage } from "./types";
3+
4+
const CONVERSATIONS_KEY = "comsol-agent-conversations";
5+
const MESSAGES_KEY = "comsol-agent-messages";
6+
const CURRENT_ID_KEY = "comsol-agent-current-conversation-id";
7+
8+
export function loadConversations(): Conversation[] {
9+
try {
10+
const raw = localStorage.getItem(CONVERSATIONS_KEY);
11+
if (raw) {
12+
const parsed = JSON.parse(raw) as unknown;
13+
return Array.isArray(parsed) ? (parsed as Conversation[]) : [];
14+
}
15+
} catch (_) {}
16+
return [];
17+
}
18+
19+
export function saveConversations(conversations: Conversation[]): void {
20+
try {
21+
localStorage.setItem(CONVERSATIONS_KEY, JSON.stringify(conversations));
22+
} catch (_) {}
23+
}
24+
25+
export function loadMessagesByConversation(): Record<string, ChatMessage[]> {
26+
try {
27+
const raw = localStorage.getItem(MESSAGES_KEY);
28+
if (raw) {
29+
const parsed = JSON.parse(raw) as unknown;
30+
return typeof parsed === "object" && parsed !== null
31+
? (parsed as Record<string, ChatMessage[]>)
32+
: {};
33+
}
34+
} catch (_) {}
35+
return {};
36+
}
37+
38+
export function saveMessagesByConversation(
39+
data: Record<string, ChatMessage[] | unknown[]>
40+
): void {
41+
try {
42+
localStorage.setItem(MESSAGES_KEY, JSON.stringify(data));
43+
} catch (_) {}
44+
}
45+
46+
export function loadCurrentConversationId(): string | null {
47+
try {
48+
const raw = localStorage.getItem(CURRENT_ID_KEY);
49+
return typeof raw === "string" && raw ? raw : null;
50+
} catch (_) {}
51+
return null;
52+
}
53+
54+
export function saveCurrentConversationId(id: string): void {
55+
try {
56+
localStorage.setItem(CURRENT_ID_KEY, id);
57+
} catch (_) {}
58+
}

desktop/src/lib/types.ts

Lines changed: 129 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,129 @@
1+
/** 消息角色 */
2+
export type MessageRole = "user" | "assistant" | "system";
3+
4+
/** 单条对话消息 */
5+
export interface ChatMessage {
6+
id: string;
7+
role: MessageRole;
8+
text: string;
9+
success?: boolean;
10+
events?: RunEvent[];
11+
/** 时间戳(可选,用于展示) */
12+
time?: number;
13+
}
14+
15+
/** 运行/流式事件(与后端 bridge-event 一致) */
16+
export interface RunEvent {
17+
_event?: boolean;
18+
type: string;
19+
data?: Record<string, unknown>;
20+
}
21+
22+
/** 对话框类型 */
23+
export type DialogType =
24+
| null
25+
| "help"
26+
| "backend"
27+
| "context"
28+
| "exec"
29+
| "output"
30+
| "settings"
31+
| "ops";
32+
33+
/** 会话摘要 */
34+
export interface Conversation {
35+
id: string;
36+
title: string;
37+
createdAt: number;
38+
}
39+
40+
/** 后端 bridge_send 返回 */
41+
export interface BridgeResponse {
42+
ok: boolean;
43+
message: string;
44+
/** 部分命令(如 list_models)返回的列表 */
45+
models?: MyComsolModel[];
46+
}
47+
48+
/** 设置页「我创建的模型」列表项 */
49+
export interface MyComsolModel {
50+
path: string;
51+
title: string;
52+
is_latest?: boolean;
53+
}
54+
55+
/** 斜杠命令项(Prompt 下拉用) */
56+
export interface SlashCommandItem {
57+
name: string;
58+
display: string;
59+
description: string;
60+
}
61+
62+
export const SLASH_COMMANDS: SlashCommandItem[] = [
63+
{ name: "help", display: "/help", description: "显示帮助" },
64+
{ name: "ops", display: "/ops", description: "支持的 COMSOL 操作" },
65+
{ name: "run", display: "/run", description: "默认模式(自然语言 → 模型)" },
66+
{ name: "plan", display: "/plan", description: "计划模式(自然语言 → JSON)" },
67+
{ name: "exec", display: "/exec", description: "根据 JSON 创建模型" },
68+
{ name: "backend", display: "/backend", description: "选择 LLM 后端" },
69+
{ name: "context", display: "/context", description: "查看或清除对话历史" },
70+
{ name: "output", display: "/output", description: "设置默认输出文件名" },
71+
{ name: "demo", display: "/demo", description: "演示示例" },
72+
{ name: "doctor", display: "/doctor", description: "环境诊断" },
73+
{ name: "exit", display: "/exit", description: "退出" },
74+
];
75+
76+
/** 常用场景快捷提示(MessageList 空状态) */
77+
export interface QuickPromptItem {
78+
label: string;
79+
text: string;
80+
}
81+
82+
export interface QuickPromptGroup {
83+
title: string;
84+
hint?: string;
85+
prompts: QuickPromptItem[];
86+
}
87+
88+
export const QUICK_PROMPT_GROUPS: QuickPromptGroup[] = [
89+
{
90+
title: "几何",
91+
hint: "2D/3D",
92+
prompts: [
93+
{ label: "矩形", text: "创建一个宽 1 米、高 0.5 米的矩形" },
94+
{ label: "圆", text: "创建一个半径为 0.2 米的圆" },
95+
{ label: "长方体", text: "创建一个 1×0.5×0.3 米的长方体" },
96+
],
97+
},
98+
{
99+
title: "物理与求解",
100+
hint: "传热/稳态",
101+
prompts: [
102+
{ label: "传热稳态", text: "添加固体传热物理场并做稳态研究" },
103+
{ label: "结构静力学", text: "添加固体力学并做稳态研究" },
104+
],
105+
},
106+
{
107+
title: "诊断与命令",
108+
hint: "环境/帮助",
109+
prompts: [
110+
{ label: "环境诊断", text: "/doctor" },
111+
{ label: "帮助", text: "/help" },
112+
],
113+
},
114+
];
115+
116+
/** COMSOL 操作说明(/ops 弹窗) */
117+
export interface ComsolOp {
118+
action: string;
119+
label: string;
120+
description: string;
121+
}
122+
123+
export const COMSOL_OPS: ComsolOp[] = [
124+
{ action: "geometry", label: "几何", description: "创建/编辑几何体与布尔运算" },
125+
{ action: "physics", label: "物理场", description: "添加物理场与边界条件" },
126+
{ action: "mesh", label: "网格", description: "划分网格" },
127+
{ action: "study", label: "研究", description: "稳态/瞬态/特征值等研究" },
128+
{ action: "material", label: "材料", description: "材料分配与属性" },
129+
];

0 commit comments

Comments
 (0)