Fundamental_Analysis/frontend/src/stores/useWorkflowStore.ts
Lv, Qi 0c975bb8f1 Refactor: Remove legacy analysis results and implement workflow history
- **Common Contracts**: Updated DTOs and models to support workflow history; removed legacy analysis result DTOs.
- **Data Persistence Service**:
    - Removed `analysis_results` table logic and API endpoints.
    - Implemented `workflow_history` API and DB access (`history.rs`).
    - Fixed compilation errors and updated tests.
    - Exposed Postgres port in `docker-compose.yml` for easier debugging/offline checks.
- **API Gateway**:
    - Implemented `history` endpoints (get history list, get by ID).
    - Removed legacy `analysis-results` endpoints.
    - Fixed routing and handler logic in `api.rs`.
- **Report Generator Service**:
    - Removed dependency on legacy `analysis-results` persistence calls.
    - Fixed compilation errors.
- **Workflow Orchestrator**: Fixed warnings and minor logic issues.
- **Providers**: Updated provider services (alphavantage, tushare, finnhub, yfinance, mock) to align with contract changes.
- **Frontend**:
    - Updated `ReportPage` and stores to use new workflow history.
    - Added `RecentReportsDropdown` component.
    - Cleaned up `RealtimeLogs` component.
- **Documentation**: Moved completed design tasks to `completed/` and added refactoring context docs.

Confirmed all services pass `cargo check`.
2025-11-29 14:46:44 +08:00

286 lines
9.1 KiB
TypeScript

import { create } from 'zustand';
import { schemas } from '../api/schema.gen';
import { WorkflowDag, TaskState, TaskStatus, WorkflowEvent, WorkflowStatus, ConnectionStatus } from '../types/workflow';
interface WorkflowStoreState {
requestId: string | null;
status: WorkflowStatus;
mode: 'realtime' | 'historical';
dag: WorkflowDag | null;
tasks: Record<string, TaskState>;
error: string | null;
activeTab: string; // For UI linking
// Actions
initialize: (requestId: string) => void;
setMode: (mode: 'realtime' | 'historical') => void;
setDag: (dag: WorkflowDag) => void;
updateTaskStatus: (taskId: string, status: TaskStatus, message?: string, progress?: number, inputCommit?: string, outputCommit?: string) => void;
updateTaskContent: (taskId: string, delta: string) => void; // Stream content (append)
setTaskContent: (taskId: string, content: string) => void; // Set full content
appendTaskLog: (taskId: string, log: string) => void;
setActiveTab: (tabId: string) => void;
completeWorkflow: (result: unknown) => void;
failWorkflow: (reason: string) => void;
handleEvent: (event: WorkflowEvent) => void;
loadFromSnapshot: (snapshotPayload: any) => void;
reset: () => void;
}
export const useWorkflowStore = create<WorkflowStoreState>((set, get) => ({
requestId: null,
status: ConnectionStatus.Idle,
mode: 'realtime',
dag: null,
tasks: {},
error: null,
activeTab: 'overview',
initialize: (requestId) => set({
requestId,
status: ConnectionStatus.Connecting,
mode: 'realtime',
error: null,
tasks: {},
activeTab: 'overview'
}),
setMode: (mode) => set({ mode }),
setDag: (dag) => {
// Initialize tasks based on DAG
const initialTasks: Record<string, TaskState> = {};
dag.nodes.forEach(node => {
initialTasks[node.id] = {
status: node.initial_status,
logs: [],
progress: 0,
content: ''
};
});
set({ dag, tasks: initialTasks, status: schemas.TaskStatus.enum.Running });
},
updateTaskStatus: (taskId, status, message, progress, inputCommit, outputCommit) => {
set(state => {
let task = state.tasks[taskId];
// Create task if it doesn't exist (handle orphan events or pre-DAG events)
if (!task) {
task = {
status: schemas.TaskStatus.enum.Pending, // Default initial status
logs: [],
progress: 0,
content: ''
};
}
const newLogs = [...task.logs];
if (message) {
newLogs.push(`[${new Date().toLocaleTimeString()}] ${message}`);
}
return {
tasks: {
...state.tasks,
[taskId]: {
...task,
status,
message: message || task.message,
progress: progress !== undefined ? progress : task.progress,
logs: newLogs,
inputCommit: inputCommit || task.inputCommit,
outputCommit: outputCommit || task.outputCommit
}
}
};
});
},
updateTaskContent: (taskId, delta) => {
set(state => {
const task = state.tasks[taskId];
if (!task) return state;
return {
tasks: {
...state.tasks,
[taskId]: {
...task,
content: (task.content || "") + delta
}
}
};
});
},
setTaskContent: (taskId, content) => {
set(state => {
const task = state.tasks[taskId];
if (!task) return state;
return {
tasks: {
...state.tasks,
[taskId]: {
...task,
content
}
}
};
});
},
appendTaskLog: (taskId, log) => {
set(state => {
let task = state.tasks[taskId];
if (!task) {
task = {
status: schemas.TaskStatus.enum.Pending,
logs: [],
progress: 0,
content: ''
};
}
return {
tasks: {
...state.tasks,
[taskId]: {
...task,
logs: [...task.logs, log]
}
}
};
});
},
setActiveTab: (tabId) => set({ activeTab: tabId }),
completeWorkflow: (_result) => set({ status: schemas.TaskStatus.enum.Completed }),
failWorkflow: (reason) => set({ status: schemas.TaskStatus.enum.Failed, error: reason }),
handleEvent: (event: WorkflowEvent) => {
const state = get();
// console.log('Handling Event:', event.type, event);
switch (event.type) {
case 'WorkflowStarted':
state.setDag(event.payload.task_graph);
break;
case 'TaskStateChanged': {
const p = event.payload;
// @ts-ignore
state.updateTaskStatus(
p.task_id,
p.status,
p.message || undefined,
p.progress || undefined,
p.input_commit,
p.output_commit
);
break;
}
case 'TaskStreamUpdate': {
const p = event.payload;
state.updateTaskContent(p.task_id, p.content_delta);
break;
}
// @ts-ignore
case 'TaskLog': {
const p = event.payload;
const time = new Date(p.timestamp).toLocaleTimeString();
const log = `[${time}] [${p.level}] ${p.message}`;
state.appendTaskLog(p.task_id, log);
break;
}
case 'WorkflowCompleted': {
state.completeWorkflow(event.payload.result_summary);
break;
}
case 'WorkflowFailed': {
state.failWorkflow(event.payload.reason);
break;
}
case 'WorkflowStateSnapshot': {
// Used for real-time rehydration (e.g. page refresh)
if (event.payload.task_graph) {
state.setDag(event.payload.task_graph);
}
const currentTasks = get().tasks;
const newTasks = { ...currentTasks };
if (event.payload.tasks_status) {
Object.entries(event.payload.tasks_status).forEach(([taskId, status]) => {
if (newTasks[taskId] && status) {
newTasks[taskId] = { ...newTasks[taskId], status: status as TaskStatus };
}
});
}
if (event.payload.tasks_output) {
Object.entries(event.payload.tasks_output).forEach(([taskId, outputCommit]) => {
if (newTasks[taskId] && outputCommit) {
newTasks[taskId] = { ...newTasks[taskId], outputCommit: outputCommit as string };
}
});
}
if (event.payload.tasks_metadata) {
Object.entries(event.payload.tasks_metadata).forEach(([taskId, metadata]) => {
if (newTasks[taskId] && metadata) {
// Note: The generated client types define metadata as TaskMetadata which includes optional paths.
// We store it directly as it matches our TaskState.metadata shape partially.
newTasks[taskId] = { ...newTasks[taskId], metadata: metadata };
}
});
}
set({ tasks: newTasks });
break;
}
}
},
loadFromSnapshot: (payload: any) => {
const dag = payload.task_graph;
const tasks_status = payload.tasks_status;
const tasks_output = payload.tasks_output;
const tasks_metadata = payload.tasks_metadata;
const newTasks: Record<string, TaskState> = {};
if (dag) {
dag.nodes.forEach((node: any) => {
newTasks[node.id] = {
status: tasks_status?.[node.id] || node.initial_status,
logs: [],
progress: 100,
content: '', // Content is not in snapshot, needs on-demand loading
outputCommit: tasks_output?.[node.id],
metadata: tasks_metadata?.[node.id]
};
});
}
set({
dag,
tasks: newTasks,
status: schemas.TaskStatus.enum.Completed,
mode: 'historical',
error: null
});
},
reset: () => set({
requestId: null,
status: ConnectionStatus.Idle,
mode: 'realtime',
dag: null,
tasks: {},
error: null,
activeTab: 'overview'
})
}));