feat: fix error propagation in report-generator and workflow-orchestrator

This commit is contained in:
Lv, Qi 2025-11-27 23:07:42 +08:00
parent a68a95338b
commit b8eab4dfd5
57 changed files with 5513 additions and 289 deletions

View File

@ -6,5 +6,64 @@ services:
workflow-orchestrator-service:
ports:
- "8005:8005" # Expose for debugging if needed
volumes:
- workflow_data:/mnt/workflow_data
environment:
- WORKFLOW_DATA_PATH=/mnt/workflow_data
alphavantage-provider-service:
volumes:
- workflow_data:/mnt/workflow_data
environment:
- WORKFLOW_DATA_PATH=/mnt/workflow_data
tushare-provider-service:
volumes:
- workflow_data:/mnt/workflow_data
environment:
- WORKFLOW_DATA_PATH=/mnt/workflow_data
finnhub-provider-service:
volumes:
- workflow_data:/mnt/workflow_data
environment:
- WORKFLOW_DATA_PATH=/mnt/workflow_data
yfinance-provider-service:
volumes:
- workflow_data:/mnt/workflow_data
environment:
- WORKFLOW_DATA_PATH=/mnt/workflow_data
report-generator-service:
volumes:
- workflow_data:/mnt/workflow_data
environment:
- WORKFLOW_DATA_PATH=/mnt/workflow_data
mock-provider-service:
build:
context: .
dockerfile: services/mock-provider-service/Dockerfile
container_name: mock-provider-service
environment:
SERVER_PORT: 8006
NATS_ADDR: nats://nats:4222
API_GATEWAY_URL: http://api-gateway:4000
SERVICE_HOST: mock-provider-service
WORKFLOW_DATA_PATH: /mnt/workflow_data
RUST_LOG: info
volumes:
- workflow_data:/mnt/workflow_data
depends_on:
- nats
networks:
- app-network
healthcheck:
test: ["CMD-SHELL", "curl -fsS http://localhost:8006/health >/dev/null || exit 1"]
interval: 5s
timeout: 5s
retries: 12
volumes:
workflow_data:

View File

@ -124,11 +124,14 @@ services:
context: .
dockerfile: services/alphavantage-provider-service/Dockerfile
container_name: alphavantage-provider-service
volumes:
- workflow_data:/mnt/workflow_data
environment:
SERVER_PORT: 8000
NATS_ADDR: nats://nats:4222
DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1
API_GATEWAY_URL: http://api-gateway:4000
WORKFLOW_DATA_PATH: /mnt/workflow_data
SERVICE_HOST: alphavantage-provider-service
RUST_LOG: info,axum=info
RUST_BACKTRACE: "1"
@ -148,12 +151,15 @@ services:
context: .
dockerfile: services/tushare-provider-service/Dockerfile
container_name: tushare-provider-service
volumes:
- workflow_data:/mnt/workflow_data
environment:
SERVER_PORT: 8001
NATS_ADDR: nats://nats:4222
DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1
TUSHARE_API_URL: http://api.waditu.com
API_GATEWAY_URL: http://api-gateway:4000
WORKFLOW_DATA_PATH: /mnt/workflow_data
SERVICE_HOST: tushare-provider-service
RUST_LOG: info,axum=info
RUST_BACKTRACE: "1"
@ -173,12 +179,15 @@ services:
context: .
dockerfile: services/finnhub-provider-service/Dockerfile
container_name: finnhub-provider-service
volumes:
- workflow_data:/mnt/workflow_data
environment:
SERVER_PORT: 8002
NATS_ADDR: nats://nats:4222
DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1
FINNHUB_API_URL: https://finnhub.io/api/v1
API_GATEWAY_URL: http://api-gateway:4000
WORKFLOW_DATA_PATH: /mnt/workflow_data
SERVICE_HOST: finnhub-provider-service
RUST_LOG: info,axum=info
RUST_BACKTRACE: "1"
@ -198,11 +207,14 @@ services:
context: .
dockerfile: services/yfinance-provider-service/Dockerfile
container_name: yfinance-provider-service
volumes:
- workflow_data:/mnt/workflow_data
environment:
SERVER_PORT: 8003
NATS_ADDR: nats://nats:4222
DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1
API_GATEWAY_URL: http://api-gateway:4000
WORKFLOW_DATA_PATH: /mnt/workflow_data
SERVICE_HOST: yfinance-provider-service
RUST_LOG: info,axum=info
RUST_BACKTRACE: "1"
@ -225,10 +237,13 @@ services:
context: .
dockerfile: services/report-generator-service/Dockerfile
container_name: report-generator-service
volumes:
- workflow_data:/mnt/workflow_data
environment:
SERVER_PORT: 8004
NATS_ADDR: nats://nats:4222
DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1
WORKFLOW_DATA_PATH: /mnt/workflow_data
RUST_LOG: info,axum=info
RUST_BACKTRACE: "1"
depends_on:
@ -247,10 +262,13 @@ services:
context: .
dockerfile: services/workflow-orchestrator-service/Dockerfile
container_name: workflow-orchestrator-service
volumes:
- workflow_data:/mnt/workflow_data
environment:
SERVER_PORT: 8005
NATS_ADDR: nats://nats:4222
DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1
WORKFLOW_DATA_PATH: /mnt/workflow_data
RUST_LOG: info
RUST_BACKTRACE: "1"
depends_on:
@ -269,6 +287,7 @@ services:
# =================================================================
volumes:
workflow_data:
pgdata:
frontend_node_modules:
nats_data:

View File

@ -12,6 +12,7 @@
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-navigation-menu": "^1.2.14",
"@radix-ui/react-popover": "^1.1.15",
"@radix-ui/react-progress": "^1.1.8",
"@radix-ui/react-select": "^2.2.6",
"@radix-ui/react-slot": "^1.2.4",
"@radix-ui/react-tabs": "^1.1.13",
@ -1746,6 +1747,68 @@
}
}
},
"node_modules/@radix-ui/react-progress": {
"version": "1.1.8",
"resolved": "http://npm.repo.lan/@radix-ui/react-progress/-/react-progress-1.1.8.tgz",
"integrity": "sha512-+gISHcSPUJ7ktBy9RnTqbdKW78bcGke3t6taawyZ71pio1JewwGSJizycs7rLhGTvMJYCQB1DBK4KQsxs7U8dA==",
"license": "MIT",
"dependencies": {
"@radix-ui/react-context": "1.1.3",
"@radix-ui/react-primitive": "2.1.4"
},
"peerDependencies": {
"@types/react": "*",
"@types/react-dom": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
},
"@types/react-dom": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-progress/node_modules/@radix-ui/react-context": {
"version": "1.1.3",
"resolved": "http://npm.repo.lan/@radix-ui/react-context/-/react-context-1.1.3.tgz",
"integrity": "sha512-ieIFACdMpYfMEjF0rEf5KLvfVyIkOz6PDGyNnP+u+4xQ6jny3VCgA4OgXOwNx2aUkxn8zx9fiVcM8CfFYv9Lxw==",
"license": "MIT",
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-progress/node_modules/@radix-ui/react-primitive": {
"version": "2.1.4",
"resolved": "http://npm.repo.lan/@radix-ui/react-primitive/-/react-primitive-2.1.4.tgz",
"integrity": "sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg==",
"license": "MIT",
"dependencies": {
"@radix-ui/react-slot": "1.2.4"
},
"peerDependencies": {
"@types/react": "*",
"@types/react-dom": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
},
"@types/react-dom": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-roving-focus": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.11.tgz",

View File

@ -8,13 +8,14 @@
"build": "tsc -b && vite build",
"lint": "eslint .",
"preview": "vite preview",
"gen:api": "openapi-zod-client ../openapi.json -o src/api/schema.gen.ts --export-schemas --export-types && sed -i 's/^type /export type /' src/api/schema.gen.ts"
"gen:api": "openapi-zod-client ../openapi.json -o src/api/schema.gen.ts --export-schemas --export-types && sed -i 's/^type /export type /' src/api/schema.gen.ts && sed -i 's/^const /export const /' src/api/schema.gen.ts && sed -i 's/: z.ZodType<[^>]*>//g' src/api/schema.gen.ts"
},
"dependencies": {
"@radix-ui/react-checkbox": "^1.3.3",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-navigation-menu": "^1.2.14",
"@radix-ui/react-popover": "^1.1.15",
"@radix-ui/react-progress": "^1.1.8",
"@radix-ui/react-select": "^2.2.6",
"@radix-ui/react-slot": "^1.2.4",
"@radix-ui/react-tabs": "^1.1.13",

View File

@ -135,10 +135,19 @@ export type WorkflowEvent =
};
type: "TaskStreamUpdate";
}
| {
payload: {
level: string;
message: string;
task_id: string;
timestamp: number;
};
type: "TaskLog";
}
| {
payload: {
end_timestamp: number;
result_summary: unknown;
result_summary?: unknown | undefined;
};
type: "WorkflowCompleted";
}
@ -160,61 +169,61 @@ export type WorkflowEvent =
type: "WorkflowStateSnapshot";
};
const AnalysisModuleConfig: z.ZodType<AnalysisModuleConfig> = z.object({
export const AnalysisModuleConfig = z.object({
dependencies: z.array(z.string()),
model_id: z.string(),
name: z.string(),
prompt_template: z.string(),
provider_id: z.string(),
});
const AnalysisTemplateSet: z.ZodType<AnalysisTemplateSet> = z.object({
export const AnalysisTemplateSet = z.object({
modules: z.record(AnalysisModuleConfig),
name: z.string(),
});
const AnalysisTemplateSets: z.ZodType<AnalysisTemplateSets> =
export const AnalysisTemplateSets =
z.record(AnalysisTemplateSet);
const DataSourceProvider = z.enum([
export const DataSourceProvider = z.enum([
"Tushare",
"Finnhub",
"Alphavantage",
"Yfinance",
]);
const DataSourceConfig: z.ZodType<DataSourceConfig> = z.object({
export const DataSourceConfig = z.object({
api_key: z.union([z.string(), z.null()]).optional(),
api_url: z.union([z.string(), z.null()]).optional(),
enabled: z.boolean(),
provider: DataSourceProvider,
});
const DataSourcesConfig: z.ZodType<DataSourcesConfig> =
export const DataSourcesConfig =
z.record(DataSourceConfig);
const TestLlmConfigRequest = z.object({
export const TestLlmConfigRequest = z.object({
api_base_url: z.string(),
api_key: z.string(),
model_id: z.string(),
});
const LlmModel: z.ZodType<LlmModel> = z.object({
export const LlmModel = z.object({
is_active: z.boolean(),
model_id: z.string(),
name: z.union([z.string(), z.null()]).optional(),
});
const LlmProvider: z.ZodType<LlmProvider> = z.object({
export const LlmProvider = z.object({
api_base_url: z.string(),
api_key: z.string(),
models: z.array(LlmModel),
name: z.string(),
});
const LlmProvidersConfig: z.ZodType<LlmProvidersConfig> = z.record(LlmProvider);
const TestConfigRequest = z.object({ data: z.unknown(), type: z.string() });
const TestConnectionResponse = z.object({
export const LlmProvidersConfig = z.record(LlmProvider);
export const TestConfigRequest = z.object({ data: z.unknown(), type: z.string() });
export const TestConnectionResponse = z.object({
message: z.string(),
success: z.boolean(),
});
const DiscoverPreviewRequest = z.object({
export const DiscoverPreviewRequest = z.object({
api_base_url: z.string(),
api_key: z.string(),
});
const FieldType = z.enum(["Text", "Password", "Url", "Boolean", "Select"]);
const ConfigKey = z.enum([
export const FieldType = z.enum(["Text", "Password", "Url", "Boolean", "Select"]);
export const ConfigKey = z.enum([
"ApiKey",
"ApiToken",
"ApiUrl",
@ -225,7 +234,7 @@ const ConfigKey = z.enum([
"SandboxMode",
"Region",
]);
const ConfigFieldSchema: z.ZodType<ConfigFieldSchema> = z.object({
export const ConfigFieldSchema = z.object({
default_value: z.union([z.string(), z.null()]).optional(),
description: z.union([z.string(), z.null()]).optional(),
field_type: FieldType,
@ -235,7 +244,7 @@ const ConfigFieldSchema: z.ZodType<ConfigFieldSchema> = z.object({
placeholder: z.union([z.string(), z.null()]).optional(),
required: z.boolean(),
});
const ProviderMetadata: z.ZodType<ProviderMetadata> = z.object({
export const ProviderMetadata = z.object({
config_schema: z.array(ConfigFieldSchema),
description: z.string(),
icon_url: z.union([z.string(), z.null()]).optional(),
@ -244,31 +253,31 @@ const ProviderMetadata: z.ZodType<ProviderMetadata> = z.object({
name_en: z.string(),
supports_test_connection: z.boolean(),
});
const SymbolResolveRequest = z.object({
export const SymbolResolveRequest = z.object({
market: z.union([z.string(), z.null()]).optional(),
symbol: z.string(),
});
const SymbolResolveResponse = z.object({
export const SymbolResolveResponse = z.object({
market: z.string(),
symbol: z.string(),
});
const DataRequest = z.object({
export const DataRequest = z.object({
market: z.union([z.string(), z.null()]).optional(),
symbol: z.string(),
template_id: z.string(),
});
const RequestAcceptedResponse = z.object({
export const RequestAcceptedResponse = z.object({
market: z.string(),
request_id: z.string().uuid(),
symbol: z.string(),
});
const ObservabilityTaskStatus = z.enum([
export const ObservabilityTaskStatus = z.enum([
"Queued",
"InProgress",
"Completed",
"Failed",
]);
const TaskProgress: z.ZodType<TaskProgress> = z.object({
export const TaskProgress = z.object({
details: z.string(),
progress_percent: z.number().int().gte(0),
request_id: z.string().uuid(),
@ -276,25 +285,25 @@ const TaskProgress: z.ZodType<TaskProgress> = z.object({
status: ObservabilityTaskStatus,
task_name: z.string(),
});
const CanonicalSymbol = z.string();
const ServiceStatus = z.enum(["Ok", "Degraded", "Unhealthy"]);
const HealthStatus: z.ZodType<HealthStatus> = z.object({
export const CanonicalSymbol = z.string();
export const ServiceStatus = z.enum(["Ok", "Degraded", "Unhealthy"]);
export const HealthStatus = z.object({
details: z.record(z.string()),
module_id: z.string(),
status: ServiceStatus,
version: z.string(),
});
const StartWorkflowCommand: z.ZodType<StartWorkflowCommand> = z.object({
export const StartWorkflowCommand = z.object({
market: z.string(),
request_id: z.string().uuid(),
symbol: CanonicalSymbol,
template_id: z.string(),
});
const TaskDependency: z.ZodType<TaskDependency> = z.object({
export const TaskDependency = z.object({
from: z.string(),
to: z.string(),
});
const TaskStatus = z.enum([
export const TaskStatus = z.enum([
"Pending",
"Scheduled",
"Running",
@ -302,18 +311,18 @@ const TaskStatus = z.enum([
"Failed",
"Skipped",
]);
const TaskType = z.enum(["DataFetch", "DataProcessing", "Analysis"]);
const TaskNode: z.ZodType<TaskNode> = z.object({
export const TaskType = z.enum(["DataFetch", "DataProcessing", "Analysis"]);
export const TaskNode = z.object({
id: z.string(),
initial_status: TaskStatus,
name: z.string(),
type: TaskType,
});
const WorkflowDag: z.ZodType<WorkflowDag> = z.object({
export const WorkflowDag = z.object({
edges: z.array(TaskDependency),
nodes: z.array(TaskNode),
});
const WorkflowEvent: z.ZodType<WorkflowEvent> = z.union([
export const WorkflowEvent = z.union([
z
.object({
payload: z
@ -349,12 +358,25 @@ const WorkflowEvent: z.ZodType<WorkflowEvent> = z.union([
type: z.literal("TaskStreamUpdate"),
})
.passthrough(),
z
.object({
payload: z
.object({
level: z.string(),
message: z.string(),
task_id: z.string(),
timestamp: z.number().int(),
})
.passthrough(),
type: z.literal("TaskLog"),
})
.passthrough(),
z
.object({
payload: z
.object({
end_timestamp: z.number().int(),
result_summary: z.unknown(),
result_summary: z.unknown().optional(),
})
.passthrough(),
type: z.literal("WorkflowCompleted"),
@ -423,7 +445,7 @@ export const schemas = {
WorkflowEvent,
};
const endpoints = makeApi([
export const endpoints = makeApi([
{
method: "get",
path: "/api/v1/configs/analysis_template_sets",

View File

@ -0,0 +1,75 @@
import { useState } from 'react';
import { Terminal, ChevronUp, ChevronDown } from 'lucide-react';
import { Card } from "@/components/ui/card";
import { Button } from "@/components/ui/button";
import { useAutoScroll } from '@/hooks/useAutoScroll';
import { cn } from "@/lib/utils";
interface LogEntry {
taskId: string;
log: string;
}
interface RealtimeLogsProps {
logs: LogEntry[];
className?: string;
}
export function RealtimeLogs({ logs, className }: RealtimeLogsProps) {
const [isExpanded, setIsExpanded] = useState(false);
const logsViewportRef = useAutoScroll(logs.length);
const toggleExpand = () => {
setIsExpanded(!isExpanded);
};
return (
<Card className={cn("flex flex-col shadow-sm transition-all duration-300 ease-in-out border-l-4 border-l-primary py-0 gap-0", className, isExpanded ? "h-[300px]" : "h-8")}>
<div
className="flex items-center justify-between px-2 py-1 cursor-pointer hover:bg-muted/50 transition-colors h-8 shrink-0"
onClick={toggleExpand}
>
<div className="flex items-center gap-2 overflow-hidden flex-1">
<Terminal className="h-3 w-3 text-muted-foreground shrink-0" />
<span className="text-[10px] font-medium text-muted-foreground whitespace-nowrap shrink-0 mr-2">Real-time Logs</span>
{/* Preview last log when collapsed */}
{!isExpanded && logs.length > 0 && (
<div className="flex-1 flex items-center gap-2 overflow-hidden text-[10px] font-mono text-muted-foreground/80">
<span className="text-blue-500 shrink-0">[{logs[logs.length - 1].taskId}]</span>
<span className="truncate">{logs[logs.length - 1].log}</span>
</div>
)}
{!isExpanded && logs.length === 0 && (
<span className="text-[10px] italic text-muted-foreground/50">Waiting for logs...</span>
)}
</div>
<Button variant="ghost" size="icon" className="h-4 w-4 text-muted-foreground hover:text-foreground shrink-0 ml-2">
{isExpanded ? <ChevronUp className="h-3 w-3" /> : <ChevronDown className="h-3 w-3" />}
</Button>
</div>
{/* Expanded Content */}
<div
className={cn(
"flex-1 bg-muted/10 border-t transition-all duration-300",
isExpanded ? "opacity-100 visible" : "opacity-0 invisible h-0 overflow-hidden"
)}
>
<div ref={logsViewportRef} className="h-full overflow-auto p-3 font-mono text-[10px] leading-relaxed">
<div className="space-y-1">
{logs.length === 0 && <span className="text-muted-foreground italic">Waiting for logs...</span>}
{logs.map((entry, i) => (
<div key={i} className="break-all flex gap-2">
<span className="text-blue-500 font-semibold shrink-0 opacity-80">[{entry.taskId}]</span>
<span className="text-foreground/90">{entry.log}</span>
</div>
))}
</div>
</div>
</div>
</Card>
);
}

View File

@ -0,0 +1,26 @@
import * as React from "react"
import * as ProgressPrimitive from "@radix-ui/react-progress"
import { cn } from "@/lib/utils"
const Progress = React.forwardRef<
React.ElementRef<typeof ProgressPrimitive.Root>,
React.ComponentPropsWithoutRef<typeof ProgressPrimitive.Root>
>(({ className, value, ...props }, ref) => (
<ProgressPrimitive.Root
ref={ref}
className={cn(
"bg-primary/20 relative h-2 w-full overflow-hidden rounded-full",
className
)}
{...props}
>
<ProgressPrimitive.Indicator
className="bg-primary h-full w-full flex-1 transition-all"
style={{ transform: `translateX(-${100 - (value || 0)}%)` }}
/>
</ProgressPrimitive.Root>
))
Progress.displayName = ProgressPrimitive.Root.displayName
export { Progress }

View File

@ -5,7 +5,7 @@ export function Toaster() {
const { toasts, dismiss } = useToast()
return (
<div className="fixed top-0 z-[100] flex max-h-screen w-full flex-col-reverse p-4 sm:bottom-0 sm:right-0 sm:top-auto sm:flex-col md:max-w-[420px]">
<div className="fixed top-0 z-[100] flex max-h-screen w-full flex-col-reverse p-4 sm:bottom-0 sm:right-0 sm:top-auto sm:flex-col md:max-w-[420px] gap-2">
{toasts.map(function ({ id, title, description, type }) {
return (
<Notification

View File

@ -36,7 +36,7 @@ const WorkflowNode = ({ data, selected }: { data: { label: string, status: TaskS
const statusColors: Record<string, string> = {
[schemas.TaskStatus.enum.Pending]: 'border-muted bg-card',
[schemas.TaskStatus.enum.Scheduled]: 'border-yellow-500/50 bg-yellow-50/10',
[schemas.TaskStatus.enum.Running]: 'border-blue-500 ring-2 ring-blue-500/20 bg-blue-50/10',
[schemas.TaskStatus.enum.Running]: 'border-blue-500 ring-2 ring-blue-500/20 bg-blue-50/10 animate-pulse',
[schemas.TaskStatus.enum.Completed]: 'border-green-500 bg-green-50/10',
[schemas.TaskStatus.enum.Failed]: 'border-red-500 bg-red-50/10',
[schemas.TaskStatus.enum.Skipped]: 'border-gray-200 bg-gray-50/5 opacity-60',
@ -182,7 +182,7 @@ export function WorkflowVisualizer() {
if (!dag) return <div className="flex items-center justify-center h-full text-muted-foreground">Waiting for workflow to start...</div>;
return (
<div className="h-[300px] w-full border rounded-lg bg-muted/5">
<div className="h-full min-h-[300px] w-full border rounded-lg bg-muted/5">
<ReactFlow
nodes={nodes}
edges={edges}

View File

@ -0,0 +1,38 @@
import { useEffect, useRef } from 'react';
export function useAutoScroll(dependency: unknown) {
const viewportRef = useRef<HTMLDivElement>(null);
const shouldAutoScrollRef = useRef(true);
const handleScroll = () => {
const viewport = viewportRef.current;
if (!viewport) return;
const { scrollTop, scrollHeight, clientHeight } = viewport;
// If user is near bottom (within 50px), enable auto-scroll
const isNearBottom = scrollHeight - scrollTop - clientHeight < 50;
shouldAutoScrollRef.current = isNearBottom;
};
useEffect(() => {
const viewport = viewportRef.current;
if (viewport) {
// Initial check
const { scrollTop, scrollHeight, clientHeight } = viewport;
shouldAutoScrollRef.current = scrollHeight - scrollTop - clientHeight < 50;
viewport.addEventListener('scroll', handleScroll);
return () => viewport.removeEventListener('scroll', handleScroll);
}
}, []);
useEffect(() => {
if (shouldAutoScrollRef.current && viewportRef.current) {
const viewport = viewportRef.current;
viewport.scrollTop = viewport.scrollHeight;
}
}, [dependency]);
return viewportRef;
}

View File

@ -6,12 +6,15 @@ import { Card, CardContent, CardDescription, CardHeader, CardTitle, CardFooter }
import { Input } from "@/components/ui/input"
import { Label } from "@/components/ui/label"
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select"
import { BarChart3, Search, Sparkles, Loader2 } from "lucide-react"
import { useAnalysisTemplates } from "@/hooks/useConfig"
import { BarChart3, Search, Sparkles, Loader2, AlertCircle } from "lucide-react"
import { useAnalysisTemplates, useLlmProviders } from "@/hooks/useConfig"
import { client } from '@/api/client';
import { type DataRequest as DataRequestDTO } from '@/api/schema.gen';
import { DataRequest } from '@/api/schema.gen';
import { z } from 'zod';
import { useToast } from "@/hooks/use-toast"
type DataRequestDTO = z.infer<typeof DataRequest>;
export function Dashboard() {
const navigate = useNavigate();
const { toast } = useToast();
@ -20,6 +23,9 @@ export function Dashboard() {
const [templateId, setTemplateId] = useState("");
const { data: templates, isLoading: isTemplatesLoading } = useAnalysisTemplates();
const { data: llmProviders } = useLlmProviders();
const [validationError, setValidationError] = useState<string | null>(null);
// Auto-select first template when loaded
useEffect(() => {
@ -28,6 +34,36 @@ export function Dashboard() {
}
}, [templates, templateId]);
// Validate template against providers
useEffect(() => {
if (!templateId || !templates || !templates[templateId] || !llmProviders) {
setValidationError(null);
return;
}
const selectedTemplate = templates[templateId];
const missingConfigs: string[] = [];
Object.values(selectedTemplate.modules).forEach(module => {
if (!llmProviders[module.provider_id]) {
missingConfigs.push(`Module '${module.name}': Provider '${module.provider_id}' not found`);
} else {
const provider = llmProviders[module.provider_id];
const modelExists = provider.models.some(m => m.model_id === module.model_id);
if (!modelExists) {
missingConfigs.push(`Module '${module.name}': Model '${module.model_id}' not found in provider '${provider.name}'`);
}
}
});
if (missingConfigs.length > 0) {
setValidationError(missingConfigs.join("; "));
} else {
setValidationError(null);
}
}, [templateId, templates, llmProviders]);
const startWorkflowMutation = useMutation({
mutationFn: async (payload: DataRequestDTO) => {
return await client.start_workflow(payload);
@ -133,13 +169,25 @@ export function Dashboard() {
</Select>
</div>
{validationError && (
<div className="p-3 text-sm text-destructive bg-destructive/10 rounded-md flex items-start gap-2 text-left">
<AlertCircle className="h-5 w-5 shrink-0 mt-0.5" />
<span>
<strong>Configuration Error:</strong> The selected template has invalid configurations.<br/>
{validationError.split('; ').map((err, i) => (
<span key={i} className="block"> {err}</span>
))}
</span>
</div>
)}
</CardContent>
<CardFooter>
<Button
size="lg"
className="w-full text-base"
onClick={handleStart}
disabled={!symbol || !templateId || isTemplatesLoading || startWorkflowMutation.isPending}
disabled={!symbol || !templateId || isTemplatesLoading || startWorkflowMutation.isPending || !!validationError}
>
{startWorkflowMutation.isPending || isTemplatesLoading ?
<Loader2 className="mr-2 h-5 w-5 animate-spin" /> :

View File

@ -1,18 +1,21 @@
import { useEffect } from 'react';
import { useEffect, useMemo } from 'react';
import { useParams, useSearchParams } from 'react-router-dom';
import { Badge } from '@/components/ui/badge';
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"
import { ScrollArea } from "@/components/ui/scroll-area"
import { WorkflowVisualizer } from '@/components/workflow/WorkflowVisualizer';
import { useWorkflowStore } from '@/stores/useWorkflowStore';
import { TaskStatus, schemas } from '@/api/schema.gen';
import { Terminal, Loader2, Sparkles, CheckCircle2 } from 'lucide-react';
import { Loader2, CheckCircle2, AlertCircle, Clock } from 'lucide-react';
import { Button } from '@/components/ui/button';
import ReactMarkdown from 'react-markdown';
import remarkGfm from 'remark-gfm';
import { FinancialTable } from '@/components/report/FinancialTable';
import { useAnalysisTemplates } from "@/hooks/useConfig"
import { useAutoScroll } from '@/hooks/useAutoScroll';
import { RealtimeLogs } from '@/components/RealtimeLogs';
import { WorkflowStatus, ConnectionStatus, TaskState } from '@/types/workflow';
import { Progress } from "@/components/ui/progress"
export function ReportPage() {
const { id } = useParams();
@ -63,14 +66,14 @@ export function ReportPage() {
}, [id, initialize, handleEvent]);
// Combine logs from all tasks for the "Global Log" view
const allLogs = Object.entries(tasks).flatMap(([taskId, state]) =>
const allLogs = useMemo(() => Object.entries(tasks).flatMap(([taskId, state]) =>
state.logs.map(log => ({ taskId, log }))
);
), [tasks]);
const tabNodes = dag?.nodes.filter(n => n.type === schemas.TaskType.enum.Analysis) || [];
return (
<div className="container py-6 space-y-6 h-[calc(100vh-4rem)] flex flex-col">
<div className="container py-4 space-y-4 h-[calc(100vh-4rem)] flex flex-col">
{/* Header Area */}
<div className="flex items-center justify-between shrink-0">
<div className="space-y-1">
@ -90,46 +93,24 @@ export function ReportPage() {
</div>
{/* Main Content Grid */}
<div className="grid grid-cols-12 gap-6 flex-1 min-h-0">
{/* Left Col: Visualizer & Logs (4 cols) */}
<div className="grid grid-cols-12 gap-4 flex-1 min-h-0">
{/* Left Col: Visualizer (4 cols) */}
<div className="col-span-4 flex flex-col gap-4 min-h-0 h-full">
<Card className="shrink-0">
<CardHeader className="py-3 px-4">
<Card className="flex-1 flex flex-col min-h-0 py-0 gap-0">
<CardHeader className="py-3 px-4 shrink-0">
<CardTitle className="text-sm font-medium">Workflow Status</CardTitle>
</CardHeader>
<CardContent className="p-0">
<CardContent className="p-0 flex-1 min-h-0">
<WorkflowVisualizer />
</CardContent>
</Card>
<Card className="flex-1 flex flex-col min-h-0 p-0 gap-0 overflow-hidden">
<CardHeader className="py-2 px-4 border-b bg-muted/50 space-y-0 shrink-0">
<CardTitle className="text-sm font-medium flex items-center gap-2">
<Terminal className="h-4 w-4" />
Real-time Logs
</CardTitle>
</CardHeader>
<CardContent className="flex-1 min-h-0 p-0 relative">
<div className="absolute inset-0 overflow-auto p-4 font-mono text-xs bg-background">
<div className="space-y-1.5">
{allLogs.length === 0 && <span className="text-muted-foreground italic">Waiting for logs...</span>}
{allLogs.map((entry, i) => (
<div key={i} className="break-all">
<span className="text-blue-500 font-semibold">[{entry.taskId}]</span>{" "}
<span className="text-foreground">{entry.log}</span>
</div>
))}
</div>
</div>
</CardContent>
</Card>
</div>
{/* Right Col: Detail Tabs (8 cols) */}
<div className="col-span-8 h-full min-h-0">
<Tabs value={activeTab} onValueChange={setActiveTab} className="h-full flex flex-col">
<div className="w-full overflow-x-auto bg-background border-b shrink-0">
<TabsList className="h-auto p-0 bg-transparent gap-0">
<div className="w-full bg-background border-b shrink-0">
<TabsList className="h-auto p-0 bg-transparent gap-0 flex-wrap justify-start w-full">
<TabsTrigger value="overview" className="rounded-none border-b-2 border-transparent data-[state=active]:border-primary data-[state=active]:bg-transparent px-4 py-3">
Overview
</TabsTrigger>
@ -140,7 +121,7 @@ export function ReportPage() {
<TabsTrigger
key={node.id}
value={node.id}
className="rounded-none border-b-2 border-transparent data-[state=active]:border-primary data-[state=active]:bg-transparent px-4 py-3 gap-2"
className="rounded-none border-b-2 border-transparent data-[state=active]:border-primary data-[state=active]:bg-transparent px-4 py-3 gap-2 min-w-fit"
>
{node.name}
<TaskStatusIndicator status={tasks[node.id]?.status || schemas.TaskStatus.enum.Pending} />
@ -152,15 +133,12 @@ export function ReportPage() {
{/* Content Area */}
<div className="flex-1 min-h-0 bg-muted/5 relative">
<TabsContent value="overview" className="absolute inset-0 m-0 p-6 overflow-y-auto">
<div className="max-w-2xl mx-auto text-center py-10 space-y-4">
<div className="bg-primary/10 w-16 h-16 rounded-full flex items-center justify-center mx-auto">
<Sparkles className="h-8 w-8 text-primary" />
</div>
<h2 className="text-2xl font-bold">Analysis In Progress</h2>
<p className="text-muted-foreground">
Select a task tab above or click a node in the graph to view details.
</p>
</div>
<OverviewTabContent
status={status}
tasks={tasks}
totalTasks={dag?.nodes.length || 0}
completedTasks={Object.values(tasks).filter(t => t.status === schemas.TaskStatus.enum.Completed).length}
/>
</TabsContent>
<TabsContent value="data" className="absolute inset-0 m-0 p-6 overflow-y-auto">
@ -169,49 +147,187 @@ export function ReportPage() {
{tabNodes.map(node => (
<TabsContent key={node.id} value={node.id} className="absolute inset-0 m-0 overflow-hidden flex flex-col">
<ScrollArea className="flex-1">
<div className="p-8 max-w-4xl mx-auto">
<div className="prose dark:prose-invert max-w-none">
{tasks[node.id]?.content ? (
<ReactMarkdown remarkPlugins={[remarkGfm]}>
{tasks[node.id].content || ''}
</ReactMarkdown>
) : (
<div className="flex flex-col items-center justify-center h-[300px] text-muted-foreground space-y-4">
{tasks[node.id]?.status === schemas.TaskStatus.enum.Pending && <p>Waiting to start...</p>}
{tasks[node.id]?.status === schemas.TaskStatus.enum.Running && !tasks[node.id]?.content && <Loader2 className="h-8 w-8 animate-spin" />}
</div>
)}
{tasks[node.id]?.status === schemas.TaskStatus.enum.Running && (
<span className="inline-block w-2 h-4 ml-1 bg-primary animate-pulse"/>
)}
</div>
</div>
</ScrollArea>
<TaskDetailView task={tasks[node.id]} />
</TabsContent>
))}
</div>
</Tabs>
</div>
</div>
<RealtimeLogs logs={allLogs} className="shrink-0" />
</div>
);
}
function WorkflowStatusBadge({ status }: { status: string }) {
const variants: Record<string, "default" | "secondary" | "destructive" | "outline"> = {
'IDLE': 'outline',
'CONNECTING': 'secondary',
'RUNNING': 'default',
'COMPLETED': 'default',
'ERROR': 'destructive'
};
let colorClass = "";
if (status === 'RUNNING') colorClass = "bg-blue-500 hover:bg-blue-600 border-transparent";
if (status === 'COMPLETED') colorClass = "bg-green-600 hover:bg-green-600 border-transparent";
function OverviewTabContent({ status, tasks, totalTasks, completedTasks }: {
status: WorkflowStatus,
tasks: Record<string, TaskState>,
totalTasks: number,
completedTasks: number
}) {
const progress = totalTasks > 0 ? (completedTasks / totalTasks) * 100 : 0;
return <Badge variant={variants[status] || 'outline'} className={colorClass}>{status}</Badge>;
// Find errors
const failedTasks = Object.entries(tasks).filter(([_, t]) => t.status === schemas.TaskStatus.enum.Failed);
return (
<div className="max-w-3xl mx-auto space-y-8 py-6">
{/* Hero Status */}
<Card className="border-primary/10 shadow-md">
<CardHeader className="text-center pb-2">
<div className="mx-auto mb-4 bg-muted rounded-full p-3 w-fit">
{status === schemas.TaskStatus.enum.Completed ? (
<CheckCircle2 className="h-8 w-8 text-green-600" />
) : status === schemas.TaskStatus.enum.Failed ? (
<AlertCircle className="h-8 w-8 text-destructive" />
) : (
<Loader2 className="h-8 w-8 text-blue-500 animate-spin" />
)}
</div>
<CardTitle className="text-2xl">
{status === schemas.TaskStatus.enum.Completed ? "Analysis Completed" :
status === schemas.TaskStatus.enum.Failed ? "Analysis Failed" :
"Analysis In Progress"}
</CardTitle>
</CardHeader>
<CardContent className="space-y-6">
<div className="space-y-2">
<div className="flex justify-between text-sm text-muted-foreground">
<span>Overall Progress</span>
<span>{Math.round(progress)}% ({completedTasks}/{totalTasks} tasks)</span>
</div>
<Progress value={progress} className="h-2" />
</div>
{/* Failed Tasks Warning */}
{failedTasks.length > 0 && (
<div className="bg-destructive/10 text-destructive rounded-md p-4 text-sm flex items-start gap-2">
<AlertCircle className="h-5 w-5 shrink-0 mt-0.5" />
<div className="space-y-1">
<p className="font-semibold">Some tasks failed:</p>
<ul className="list-disc list-inside">
{failedTasks.map(([id, t]) => (
<li key={id}>
<span className="font-medium">{id}</span>: {t.message || "Unknown error"}
</li>
))}
</ul>
</div>
</div>
)}
</CardContent>
</Card>
{/* Stats Grid */}
<div className="grid grid-cols-1 md:grid-cols-3 gap-4">
<Card>
<CardHeader className="pb-2">
<CardTitle className="text-sm font-medium text-muted-foreground">Total Tasks</CardTitle>
</CardHeader>
<CardContent>
<div className="text-2xl font-bold">{totalTasks}</div>
</CardContent>
</Card>
<Card>
<CardHeader className="pb-2">
<CardTitle className="text-sm font-medium text-muted-foreground">Completed</CardTitle>
</CardHeader>
<CardContent>
<div className="text-2xl font-bold text-green-600">{completedTasks}</div>
</CardContent>
</Card>
<Card>
<CardHeader className="pb-2">
<CardTitle className="text-sm font-medium text-muted-foreground">Duration</CardTitle>
</CardHeader>
<CardContent>
<div className="text-2xl font-bold flex items-center gap-2">
<Clock className="h-5 w-5 text-muted-foreground" />
<span>--:--</span>
</div>
</CardContent>
</Card>
</div>
</div>
)
}
function TaskDetailView({ task }: { task?: TaskState }) {
const contentScrollRef = useAutoScroll(task?.content?.length || 0);
if (task?.status === schemas.TaskStatus.enum.Failed && !task.content) {
return (
<div className="flex flex-col items-center justify-center h-full min-h-[400px] p-8 text-muted-foreground space-y-4">
<AlertCircle className="h-12 w-12 text-destructive/80 mb-2" />
<h3 className="text-lg font-medium text-foreground">Analysis Failed</h3>
<div className="max-w-xl text-center space-y-2">
<p className="text-sm text-muted-foreground">The task encountered an error and could not complete.</p>
<p className="text-destructive bg-destructive/10 p-4 rounded-md border border-destructive/20 font-mono text-sm whitespace-pre-wrap">
{task.message || "Unknown error occurred."}
</p>
</div>
</div>
);
}
return (
<div ref={contentScrollRef} className="flex-1 overflow-auto">
<div className="p-8 max-w-4xl mx-auto">
<div className="prose dark:prose-invert max-w-none prose-p:text-foreground prose-headings:text-foreground prose-li:text-foreground prose-strong:text-foreground prose-span:text-foreground">
{task?.content ? (
<ReactMarkdown remarkPlugins={[remarkGfm]}>
{task.content || ''}
</ReactMarkdown>
) : (
<div className="flex flex-col items-center justify-center h-[300px] text-muted-foreground space-y-4">
{task?.status === schemas.TaskStatus.enum.Pending && <p>Waiting to start...</p>}
{task?.status === schemas.TaskStatus.enum.Running && !task?.content && <Loader2 className="h-8 w-8 animate-spin" />}
</div>
)}
{task?.status === schemas.TaskStatus.enum.Running && (
<span className="inline-block w-2 h-4 ml-1 bg-primary animate-pulse"/>
)}
</div>
</div>
</div>
);
}
function WorkflowStatusBadge({ status }: { status: WorkflowStatus }) {
// Map local store status to TaskStatus enum for consistency where possible
// These comparisons are now type-safe against the WorkflowStatus literal union type
if (status === schemas.TaskStatus.enum.Running) {
return (
<Badge variant="default" className="bg-blue-500 hover:bg-blue-600 border-transparent flex items-center gap-1">
<Loader2 className="h-3 w-3 animate-spin" />
{schemas.TaskStatus.enum.Running}
</Badge>
);
}
if (status === schemas.TaskStatus.enum.Completed) {
return (
<Badge variant="default" className="bg-green-600 hover:bg-green-600 border-transparent">
{schemas.TaskStatus.enum.Completed}
</Badge>
);
}
if (status === schemas.TaskStatus.enum.Failed) {
return (
<Badge variant="destructive">
{schemas.TaskStatus.enum.Failed}
</Badge>
);
}
if (status === ConnectionStatus.Connecting) {
return <Badge variant="secondary">CONNECTING</Badge>;
}
return <Badge variant="outline">{status}</Badge>;
}
function TaskStatusIndicator({ status }: { status: TaskStatus }) {

View File

@ -1,11 +1,11 @@
import { Plus, Trash2, RefreshCw, Eye, EyeOff, Save, X, Search } from "lucide-react"
import { Plus, Trash2, Eye, EyeOff, X, Search, Loader2 } from "lucide-react"
import { useState, useRef, useEffect } from "react"
import { Button } from "@/components/ui/button"
import { Card, CardContent, CardDescription, CardHeader, CardTitle, CardFooter } from "@/components/ui/card"
import { Badge } from "@/components/ui/badge"
import { Input } from "@/components/ui/input"
import { Label } from "@/components/ui/label"
import { useLlmProviders, useUpdateLlmProviders, useDiscoverModels } from "@/hooks/useConfig"
import { useLlmProviders, useUpdateLlmProviders } from "@/hooks/useConfig"
import { LlmProvider, LlmModel } from "@/types/config"
import { Dialog, DialogContent, DialogDescription, DialogFooter, DialogHeader, DialogTitle, DialogTrigger } from "@/components/ui/dialog"
import { useToast } from "@/hooks/use-toast"
@ -61,12 +61,16 @@ export function AIProviderTab() {
});
}
const handleUpdateProvider = (id: string, updatedProvider: LlmProvider) => {
const handleUpdateProvider = (id: string, updatedProvider: LlmProvider, showToast: boolean = true) => {
if (!providers) return;
const newProviders = { ...providers, [id]: updatedProvider };
updateProviders.mutate(newProviders, {
onSuccess: () => toast({ title: "Success", description: "Provider updated" }),
onError: (err) => toast({ title: "Error", description: "Failed to update provider", type: "error" })
onSuccess: () => {
if (showToast) {
toast({ title: "Success", description: "Provider updated" });
}
},
onError: () => toast({ title: "Error", description: "Failed to update provider", type: "error" })
});
}
@ -145,7 +149,7 @@ export function AIProviderTab() {
id={id}
provider={provider}
onDelete={() => handleDelete(id)}
onUpdate={(p) => handleUpdateProvider(id, p)}
onUpdate={(p, showToast) => handleUpdateProvider(id, p, showToast)}
/>
))}
</div>
@ -153,15 +157,16 @@ export function AIProviderTab() {
)
}
function ProviderCard({ id, provider, onDelete, onUpdate }: { id: string, provider: LlmProvider, onDelete: () => void, onUpdate: (p: LlmProvider) => void }) {
function ProviderCard({ id, provider, onDelete, onUpdate }: { id: string, provider: LlmProvider, onDelete: () => void, onUpdate: (p: LlmProvider, showToast?: boolean) => void }) {
const [showKey, setShowKey] = useState(false);
const discoverModels = useDiscoverModels();
// const discoverModels = useDiscoverModels(); // Removed as we now fetch manually
const { toast } = useToast();
// Discovered models cache for search (not saved to config)
const [discoveredModels, setDiscoveredModels] = useState<LlmModel[]>([]);
const [searchQuery, setSearchQuery] = useState("");
const [isSearchFocused, setIsSearchFocused] = useState(false);
const [isFetchingModels, setIsFetchingModels] = useState(false);
const searchRef = useRef<HTMLDivElement>(null);
useEffect(() => {
@ -175,7 +180,13 @@ function ProviderCard({ id, provider, onDelete, onUpdate }: { id: string, provid
return () => document.removeEventListener("mousedown", handleClickOutside);
}, []);
const handleRefresh = async () => {
// Auto-fetch models on interaction if cache is empty
const fetchModels = async (force = false) => {
// If we already have models and not forcing, don't fetch
if (discoveredModels.length > 0 && !force) return;
if (isFetchingModels) return;
setIsFetchingModels(true);
try {
const response = await axios.get(`/api/v1/discover-models/${id}`);
const data = response.data;
@ -198,22 +209,23 @@ function ProviderCard({ id, provider, onDelete, onUpdate }: { id: string, provid
setDiscoveredModels(models);
if (models.length === 0) {
toast({ title: "Info", description: "No models found in response" });
} else if (models.length < 10) {
// If few models, add them all automatically
const updatedProvider = { ...provider, models };
onUpdate(updatedProvider);
toast({ title: "Success", description: `Found and added ${models.length} models` });
// Silent fail or minimal toast?
// toast({ title: "Info", description: "No models found from provider" });
} else {
// If many models, just notify user to search/add
toast({
title: "Found many models",
description: `Discovered ${models.length} models. Please search and add specific models below.`
});
toast({ title: "Models Discovered", description: `Found ${models.length} available models for autocomplete.` });
}
} catch (err) {
console.error(err);
toast({ title: "Error", description: "Failed to refresh models", type: "error" });
toast({ title: "Discovery Error", description: "Failed to fetch model list for autocomplete", type: "error" });
} finally {
setIsFetchingModels(false);
}
}
const handleInputFocus = () => {
setIsSearchFocused(true);
if (discoveredModels.length === 0) {
fetchModels();
}
}
@ -230,7 +242,8 @@ function ProviderCard({ id, provider, onDelete, onUpdate }: { id: string, provid
...provider,
models: [...provider.models, model]
};
onUpdate(updatedProvider);
// Pass false to suppress generic toast since we show a specific one here
onUpdate(updatedProvider, false);
setSearchQuery("");
setIsSearchFocused(false);
toast({ title: "Success", description: `Added ${model.name || model.model_id}` });
@ -261,7 +274,7 @@ function ProviderCard({ id, provider, onDelete, onUpdate }: { id: string, provid
...provider,
models: []
};
onUpdate(updatedProvider);
onUpdate(updatedProvider, false);
toast({ title: "Success", description: "已清空所有模型" });
}
}
@ -315,19 +328,9 @@ function ProviderCard({ id, provider, onDelete, onUpdate }: { id: string, provid
className="h-6 text-xs px-2 text-destructive hover:text-destructive hover:bg-destructive/10"
onClick={handleClearModels}
>
<Trash2 className="mr-1 h-3 w-3" />
<Trash2 className="mr-1 h-3 w-3" />
</Button>
)}
<Button
variant="ghost"
size="sm"
className="h-6 text-xs px-2"
onClick={handleRefresh}
disabled={discoverModels.isPending}
>
<RefreshCw className={`mr-1 h-3 w-3 ${discoverModels.isPending ? 'animate-spin' : ''}`} />
{discoverModels.isPending ? "刷新中..." : "刷新列表"}
</Button>
</div>
</div>
@ -336,13 +339,18 @@ function ProviderCard({ id, provider, onDelete, onUpdate }: { id: string, provid
<div className="relative">
<Search className="absolute left-2 top-2.5 h-4 w-4 text-muted-foreground" />
<Input
placeholder="Search discovered models or type new ID..."
placeholder="Search or enter model ID (auto-completes from provider)..."
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
onFocus={() => setIsSearchFocused(true)}
onFocus={handleInputFocus}
className="pl-8 h-9 text-sm"
/>
{searchQuery && (
{isFetchingModels && (
<div className="absolute right-8 top-2.5">
<Loader2 className="h-4 w-4 animate-spin text-muted-foreground" />
</div>
)}
{searchQuery && !isFetchingModels && (
<Button
size="sm"
variant="ghost"

View File

@ -1,13 +1,14 @@
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"
import { Badge } from "@/components/ui/badge"
import { Activity, CheckCircle2, AlertTriangle, XCircle } from "lucide-react"
import { schemas, ServiceStatus } from "@/api/schema.gen"
export function SystemTab() {
const services = [
{ name: "API Gateway", status: "healthy", version: "0.1.0", uptime: "2d 4h" },
{ name: "Workflow Orchestrator", status: "healthy", version: "0.1.0", uptime: "2d 4h" },
{ name: "Data Persistence", status: "healthy", version: "0.1.0", uptime: "15d 2h" },
{ name: "Report Generator", status: "degraded", version: "0.1.0", uptime: "1h 30m", issue: "High Latency" },
{ name: "API Gateway", status: schemas.ServiceStatus.enum.Ok, version: "0.1.0", uptime: "2d 4h" },
{ name: "Workflow Orchestrator", status: schemas.ServiceStatus.enum.Ok, version: "0.1.0", uptime: "2d 4h" },
{ name: "Data Persistence", status: schemas.ServiceStatus.enum.Ok, version: "0.1.0", uptime: "15d 2h" },
{ name: "Report Generator", status: schemas.ServiceStatus.enum.Degraded, version: "0.1.0", uptime: "1h 30m", issue: "High Latency" },
];
return (
@ -63,20 +64,20 @@ export function SystemTab() {
)
}
function StatusIcon({ status }: { status: string }) {
function StatusIcon({ status }: { status: ServiceStatus }) {
switch (status) {
case "healthy": return <CheckCircle2 className="h-5 w-5 text-green-500" />;
case "degraded": return <AlertTriangle className="h-5 w-5 text-yellow-500" />;
case "down": return <XCircle className="h-5 w-5 text-red-500" />;
case schemas.ServiceStatus.enum.Ok: return <CheckCircle2 className="h-5 w-5 text-green-500" />;
case schemas.ServiceStatus.enum.Degraded: return <AlertTriangle className="h-5 w-5 text-yellow-500" />;
case schemas.ServiceStatus.enum.Unhealthy: return <XCircle className="h-5 w-5 text-red-500" />;
default: return <Activity className="h-5 w-5 text-muted-foreground" />;
}
}
function StatusBadge({ status }: { status: string }) {
function StatusBadge({ status }: { status: ServiceStatus }) {
switch (status) {
case "healthy": return <Badge variant="default" className="bg-green-600 hover:bg-green-600">Healthy</Badge>;
case "degraded": return <Badge variant="secondary" className="bg-yellow-100 text-yellow-800 hover:bg-yellow-100">Degraded</Badge>;
case "down": return <Badge variant="destructive">Down</Badge>;
case schemas.ServiceStatus.enum.Ok: return <Badge variant="default" className="bg-green-600 hover:bg-green-600">Healthy</Badge>;
case schemas.ServiceStatus.enum.Degraded: return <Badge variant="secondary" className="bg-yellow-100 text-yellow-800 hover:bg-yellow-100">Degraded</Badge>;
case schemas.ServiceStatus.enum.Unhealthy: return <Badge variant="destructive">Unhealthy</Badge>;
default: return <Badge variant="outline">Unknown</Badge>;
}
}

View File

@ -1,10 +1,10 @@
import { create } from 'zustand';
import { schemas } from '../api/schema.gen';
import { WorkflowDag, TaskState, TaskStatus, WorkflowEvent } from '../types/workflow';
import { WorkflowDag, TaskState, TaskStatus, WorkflowEvent, WorkflowStatus, ConnectionStatus } from '../types/workflow';
interface WorkflowStoreState {
requestId: string | null;
status: 'IDLE' | 'CONNECTING' | 'RUNNING' | 'COMPLETED' | 'ERROR';
status: WorkflowStatus;
dag: WorkflowDag | null;
tasks: Record<string, TaskState>;
error: string | null;
@ -18,7 +18,7 @@ interface WorkflowStoreState {
setTaskContent: (taskId: string, content: string) => void; // Set full content
appendTaskLog: (taskId: string, log: string) => void;
setActiveTab: (tabId: string) => void;
completeWorkflow: (result: any) => void;
completeWorkflow: (result: unknown) => void;
failWorkflow: (reason: string) => void;
handleEvent: (event: WorkflowEvent) => void;
reset: () => void;
@ -26,7 +26,7 @@ interface WorkflowStoreState {
export const useWorkflowStore = create<WorkflowStoreState>((set, get) => ({
requestId: null,
status: 'IDLE',
status: ConnectionStatus.Idle,
dag: null,
tasks: {},
error: null,
@ -34,7 +34,7 @@ export const useWorkflowStore = create<WorkflowStoreState>((set, get) => ({
initialize: (requestId) => set({
requestId,
status: 'CONNECTING',
status: ConnectionStatus.Connecting,
error: null,
tasks: {},
activeTab: 'overview'
@ -51,7 +51,7 @@ export const useWorkflowStore = create<WorkflowStoreState>((set, get) => ({
content: ''
};
});
set({ dag, tasks: initialTasks, status: 'RUNNING' });
set({ dag, tasks: initialTasks, status: schemas.TaskStatus.enum.Running });
},
updateTaskStatus: (taskId, status, message, progress) => {
@ -148,8 +148,8 @@ export const useWorkflowStore = create<WorkflowStoreState>((set, get) => ({
setActiveTab: (tabId) => set({ activeTab: tabId }),
completeWorkflow: (_result) => set({ status: 'COMPLETED' }),
failWorkflow: (reason) => set({ status: 'ERROR', error: reason }),
completeWorkflow: (_result) => set({ status: schemas.TaskStatus.enum.Completed }),
failWorkflow: (reason) => set({ status: schemas.TaskStatus.enum.Failed, error: reason }),
handleEvent: (event: WorkflowEvent) => {
const state = get();
@ -171,17 +171,27 @@ export const useWorkflowStore = create<WorkflowStoreState>((set, get) => ({
break;
}
case 'TaskStreamUpdate': {
const p = event.payload as any;
const p = event.payload;
state.updateTaskContent(p.task_id, p.content_delta);
break;
}
case 'WorkflowCompleted':
// @ts-ignore - TaskLog is manually added to schema.gen.ts
case 'TaskLog': {
const p = event.payload;
const time = new Date(p.timestamp).toLocaleTimeString();
const log = `[${time}] [${p.level}] ${p.message}`;
state.appendTaskLog(p.task_id, log);
break;
}
case 'WorkflowCompleted': {
state.completeWorkflow(event.payload.result_summary);
break;
case 'WorkflowFailed':
}
case 'WorkflowFailed': {
state.failWorkflow(event.payload.reason);
break;
case 'WorkflowStateSnapshot':
}
case 'WorkflowStateSnapshot': {
// Re-hydrate state
if (event.payload.task_graph) {
state.setDag(event.payload.task_graph);
@ -201,7 +211,7 @@ export const useWorkflowStore = create<WorkflowStoreState>((set, get) => ({
if (event.payload.tasks_output) {
Object.entries(event.payload.tasks_output).forEach(([taskId, content]) => {
if (newTasks[taskId] && content) {
newTasks[taskId] = { ...newTasks[taskId], content };
newTasks[taskId] = { ...newTasks[taskId], content: content || undefined };
}
});
}
@ -209,11 +219,12 @@ export const useWorkflowStore = create<WorkflowStoreState>((set, get) => ({
set({ tasks: newTasks });
break;
}
}
},
reset: () => set({
requestId: null,
status: 'IDLE',
status: ConnectionStatus.Idle,
dag: null,
tasks: {},
error: null,

View File

@ -7,12 +7,13 @@ export type {
LlmProvider,
DataSourceConfig,
AnalysisModuleConfig,
AnalysisTemplateSet,
DataSourceProvider,
TestConfigRequest,
TestLlmConfigRequest,
} from '../api/schema.gen';
export type TestConfigRequest = z.infer<typeof schemas.TestConfigRequest>;
export type TestLlmConfigRequest = z.infer<typeof schemas.TestLlmConfigRequest>;
export type AnalysisTemplateSet = z.infer<typeof schemas.AnalysisTemplateSet>;
// Infer map types from Zod schemas to ensure Record<string, T>
export type LlmProvidersConfig = z.infer<typeof schemas.LlmProvidersConfig>;
export type DataSourcesConfig = z.infer<typeof schemas.DataSourcesConfig>;

View File

@ -1,3 +1,6 @@
import { schemas } from '../api/schema.gen';
import { z } from 'zod';
// Re-export backend types from generated schema
export type {
TaskStatus,
@ -5,11 +8,31 @@ export type {
TaskNode,
TaskDependency,
WorkflowDag,
WorkflowEvent,
} from '../api/schema.gen';
export type WorkflowEvent = z.infer<typeof schemas.WorkflowEvent>;
import { TaskStatus } from '../api/schema.gen';
// Frontend connection states that precede the actual workflow
// Defined as an object to emulate enum behavior for consistency with schema.gen.ts
export const ConnectionStatus = {
Idle: 'Idle',
Connecting: 'Connecting'
} as const;
export type ConnectionStatus = typeof ConnectionStatus[keyof typeof ConnectionStatus];
/**
* Combined status type:
* - ConnectionStatus (Frontend only)
* - TaskStatus (Backend Single Source of Truth via schema.gen.ts)
*
* We reuse TaskStatus here because WorkflowStatus is not currently exported in schema.gen.ts,
* and TaskStatus shares the same semantic values (Running, Completed, Failed) required for the UI.
*/
export type WorkflowStatus = ConnectionStatus | TaskStatus;
// Frontend-only state wrapper
export interface TaskState {
status: TaskStatus;
@ -17,5 +40,5 @@ export interface TaskState {
progress?: number; // 0-100
logs: string[]; // Full log history
content?: string; // Streaming content (Markdown)
result?: any; // Structured result
result?: unknown; // Structured result
}

View File

@ -1178,7 +1178,45 @@
"payload": {
"type": "object",
"required": [
"result_summary",
"task_id",
"level",
"message",
"timestamp"
],
"properties": {
"level": {
"type": "string"
},
"message": {
"type": "string"
},
"task_id": {
"type": "string"
},
"timestamp": {
"type": "integer",
"format": "int64"
}
}
},
"type": {
"type": "string",
"enum": [
"TaskLog"
]
}
}
},
{
"type": "object",
"required": [
"payload",
"type"
],
"properties": {
"payload": {
"type": "object",
"required": [
"end_timestamp"
],
"properties": {

81
package-lock.json generated
View File

@ -7,6 +7,7 @@
"dependencies": {
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-popover": "^1.1.15",
"@radix-ui/react-progress": "^1.1.8",
"cmdk": "^1.1.1",
"elkjs": "^0.11.0",
"immer": "^10.2.0",
@ -371,6 +372,86 @@
}
}
},
"node_modules/@radix-ui/react-progress": {
"version": "1.1.8",
"resolved": "http://npm.repo.lan/@radix-ui/react-progress/-/react-progress-1.1.8.tgz",
"integrity": "sha512-+gISHcSPUJ7ktBy9RnTqbdKW78bcGke3t6taawyZ71pio1JewwGSJizycs7rLhGTvMJYCQB1DBK4KQsxs7U8dA==",
"license": "MIT",
"dependencies": {
"@radix-ui/react-context": "1.1.3",
"@radix-ui/react-primitive": "2.1.4"
},
"peerDependencies": {
"@types/react": "*",
"@types/react-dom": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
},
"@types/react-dom": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-progress/node_modules/@radix-ui/react-context": {
"version": "1.1.3",
"resolved": "http://npm.repo.lan/@radix-ui/react-context/-/react-context-1.1.3.tgz",
"integrity": "sha512-ieIFACdMpYfMEjF0rEf5KLvfVyIkOz6PDGyNnP+u+4xQ6jny3VCgA4OgXOwNx2aUkxn8zx9fiVcM8CfFYv9Lxw==",
"license": "MIT",
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-progress/node_modules/@radix-ui/react-primitive": {
"version": "2.1.4",
"resolved": "http://npm.repo.lan/@radix-ui/react-primitive/-/react-primitive-2.1.4.tgz",
"integrity": "sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg==",
"license": "MIT",
"dependencies": {
"@radix-ui/react-slot": "1.2.4"
},
"peerDependencies": {
"@types/react": "*",
"@types/react-dom": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
},
"@types/react-dom": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-progress/node_modules/@radix-ui/react-slot": {
"version": "1.2.4",
"resolved": "http://npm.repo.lan/@radix-ui/react-slot/-/react-slot-1.2.4.tgz",
"integrity": "sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA==",
"license": "MIT",
"dependencies": {
"@radix-ui/react-compose-refs": "1.1.2"
},
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-slot": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz",

View File

@ -2,6 +2,7 @@
"dependencies": {
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-popover": "^1.1.15",
"@radix-ui/react-progress": "^1.1.8",
"cmdk": "^1.1.1",
"elkjs": "^0.11.0",
"immer": "^10.2.0",

View File

@ -11,6 +11,14 @@ cleanup() {
docker logs api-gateway || true
echo "[E2E] Dumping logs for report-generator-service..."
docker logs report-generator-service || true
echo "[E2E] Dumping logs for tushare-provider-service..."
docker logs tushare-provider-service || true
echo "[E2E] Dumping logs for yfinance-provider-service..."
docker logs yfinance-provider-service || true
echo "[E2E] Dumping logs for alphavantage-provider-service..."
docker logs alphavantage-provider-service || true
echo "[E2E] Dumping logs for finnhub-provider-service..."
docker logs finnhub-provider-service || true
echo "[E2E] Tearing down environment..."
cd "$ROOT_DIR"

View File

@ -1,9 +1,10 @@
# 1. Build Stage
FROM rust:1.90 as builder
FROM rust:1.90-bookworm as builder
WORKDIR /usr/src/app
# Copy full sources (simple and correct; avoids shipping stub binaries)
COPY ./services/common-contracts /usr/src/app/services/common-contracts
COPY ./crates/workflow-context /usr/src/app/crates/workflow-context
COPY ./services/alphavantage-provider-service /usr/src/app/services/alphavantage-provider-service
WORKDIR /usr/src/app/services/alphavantage-provider-service
RUN cargo build --bin alphavantage-provider-service
@ -15,7 +16,7 @@ FROM debian:bookworm-slim
ENV TZ=Asia/Shanghai
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
# Minimal runtime deps for health checks (curl) and TLS roots if needed
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl libssl3 && rm -rf /var/lib/apt/lists/*
# Copy the built binary from the builder stage
COPY --from=builder /usr/src/app/services/alphavantage-provider-service/target/debug/alphavantage-provider-service /usr/local/bin/

View File

@ -13,10 +13,10 @@ pub async fn run(state: AppState) -> Result<()> {
let status = state.status.read().await.clone();
if let ServiceOperationalStatus::Degraded { reason } = status {
warn!(
"Service is in degraded state (reason: {}). Pausing message consumption for 30s.",
"Service is in degraded state (reason: {}). Pausing message consumption for 5s.",
reason
);
tokio::time::sleep(Duration::from_secs(30)).await;
tokio::time::sleep(Duration::from_secs(5)).await;
continue;
}

View File

@ -363,7 +363,6 @@ mod integration_tests {
use super::*;
use crate::config::AppConfig;
use crate::state::AppState;
use secrecy::SecretString;
use std::time::Duration;
use common_contracts::symbol_utils::{CanonicalSymbol, Market};
@ -393,7 +392,7 @@ mod integration_tests {
// 2. Manual Init Provider (Skip Config Poller)
state.update_provider(
Some(SecretString::new(api_key.into())),
Some(api_key),
Some(mcp_url)
).await;

View File

@ -93,15 +93,45 @@ async fn mock_chat_completion() -> impl IntoResponse {
(StatusCode::OK, [(header::CONTENT_TYPE, "text/event-stream")], body)
}
async fn mock_models() -> impl IntoResponse {
use axum::http::header;
let body = serde_json::json!({
"data": [
{
"id": "google/gemini-flash-1.5",
"name": "Gemini Flash 1.5",
"pricing": {
"prompt": "0",
"completion": "0"
},
"context_length": 32000,
"architecture": {
"modality": "text+image->text",
"tokenizer": "Gemini",
"instruct_type": null
},
"top_provider": {
"max_completion_tokens": null,
"is_moderated": false
},
"per_request_limits": null
}
]
});
(StatusCode::OK, [(header::CONTENT_TYPE, "application/json")], Json(body))
}
use common_contracts::messages::{StartWorkflowCommand, SyncStateCommand, WorkflowEvent};
fn create_v1_router() -> Router<AppState> {
Router::new()
// Mock LLM for E2E
.route("/mock/chat/completions", post(mock_chat_completion))
.route("/mock/models", get(mock_models))
// New Workflow API
.route("/workflow/start", post(start_workflow))
.route("/workflow/events/{request_id}", get(workflow_events_stream))
.route("/workflow/{request_id}/graph", get(get_workflow_graph_proxy))
// Tools
.route("/tools/resolve-symbol", post(resolve_symbol))
// Legacy routes (marked for removal or compatibility)
@ -464,6 +494,8 @@ async fn trigger_analysis_generation(
request_id,
symbol: normalized_symbol.clone(),
template_id: payload.template_id,
task_id: None,
module_id: None,
};
info!(request_id = %request_id, "Publishing analysis generation command");
@ -993,3 +1025,26 @@ async fn discover_models_preview(
let models_json: serde_json::Value = response.json().await?;
Ok((StatusCode::OK, Json(models_json)).into_response())
}
/// [GET /v1/workflow/:request_id/graph]
async fn get_workflow_graph_proxy(
State(state): State<AppState>,
Path(request_id): Path<Uuid>,
) -> Result<impl IntoResponse> {
let url = format!(
"{}/workflows/{}/graph",
state.config.workflow_orchestrator_service_url.trim_end_matches('/'),
request_id
);
let client = reqwest::Client::new();
let resp = client.get(&url).send().await?;
let status = resp.status();
let body = resp.bytes().await?;
Ok((
StatusCode::from_u16(status.as_u16()).unwrap_or(StatusCode::INTERNAL_SERVER_ERROR),
axum::body::Body::from(body),
))
}

View File

@ -7,6 +7,7 @@ pub struct AppConfig {
pub nats_addr: String,
pub data_persistence_service_url: String,
pub report_generator_service_url: String,
pub workflow_orchestrator_service_url: String,
}
impl AppConfig {
@ -22,12 +23,16 @@ impl AppConfig {
let report_generator_service_url: String = cfg
.get::<String>("report_generator_service_url")
.unwrap_or_else(|_| "http://report-generator-service:8004".to_string());
let workflow_orchestrator_service_url: String = cfg
.get::<String>("workflow_orchestrator_service_url")
.unwrap_or_else(|_| "http://workflow-orchestrator-service:8005".to_string());
Ok(Self {
server_port,
nats_addr,
data_persistence_service_url,
report_generator_service_url,
workflow_orchestrator_service_url,
})
}
}

View File

@ -25,6 +25,7 @@ pub struct LlmProvider {
#[api_dto]
#[serde(transparent)]
#[derive(Default)]
#[schema(value_type = HashMap<String, LlmProvider>)]
pub struct LlmProvidersConfig(pub HashMap<String, LlmProvider>);
impl LlmProvidersConfig {
@ -53,6 +54,7 @@ impl DerefMut for LlmProvidersConfig {
#[api_dto]
#[serde(transparent)]
#[derive(Default)]
#[schema(value_type = HashMap<String, AnalysisTemplateSet>)]
pub struct AnalysisTemplateSets(pub HashMap<String, AnalysisTemplateSet>);
impl AnalysisTemplateSets {
@ -143,6 +145,7 @@ pub struct DataSourceConfig {
#[api_dto]
#[serde(transparent)]
#[derive(Default)]
#[schema(value_type = HashMap<String, DataSourceConfig>)]
pub struct DataSourcesConfig(pub HashMap<String, DataSourceConfig>);
impl DataSourcesConfig {

View File

@ -64,6 +64,10 @@ pub struct GenerateReportCommand {
pub request_id: Uuid,
pub symbol: CanonicalSymbol,
pub template_id: String,
/// The task_id in the workflow DAG that triggered this.
/// Used for reporting progress/content back to the specific node.
pub task_id: Option<String>,
pub module_id: Option<String>,
}
impl SubjectMessage for GenerateReportCommand {
@ -103,9 +107,17 @@ pub enum WorkflowEvent {
index: u32
},
// 3.5. 任务日志 (用于实时展示详细执行过程)
TaskLog {
task_id: String,
level: String, // INFO, WARN, ERROR
message: String,
timestamp: i64,
},
// 4. 流程整体结束
WorkflowCompleted {
result_summary: serde_json::Value,
result_summary: Option<serde_json::Value>,
end_timestamp: i64
},

View File

@ -1,6 +1,5 @@
use uuid::Uuid;
use service_kit::api_dto;
use serde::{Serialize, Deserialize};
use crate::subjects::{NatsSubject, SubjectMessage};
// --- Enums ---

View File

@ -1,9 +1,10 @@
# 1. Build Stage
FROM rust:1.90 as builder
FROM rust:1.90-bookworm as builder
WORKDIR /usr/src/app
# Copy full sources (simple and correct; avoids shipping stub binaries)
COPY ./services/common-contracts /usr/src/app/services/common-contracts
COPY ./crates/workflow-context /usr/src/app/crates/workflow-context
COPY ./services/finnhub-provider-service /usr/src/app/services/finnhub-provider-service
WORKDIR /usr/src/app/services/finnhub-provider-service
RUN cargo build --bin finnhub-provider-service
@ -15,7 +16,7 @@ FROM debian:bookworm-slim
ENV TZ=Asia/Shanghai
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
# Minimal runtime deps for health checks (curl) and TLS roots if needed
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl libssl3 && rm -rf /var/lib/apt/lists/*
# Copy the built binary from the builder stage
COPY --from=builder /usr/src/app/services/finnhub-provider-service/target/debug/finnhub-provider-service /usr/local/bin/

View File

@ -15,10 +15,10 @@ pub async fn run(state: AppState) -> Result<()> {
let status = state.status.read().await.clone();
if let ServiceOperationalStatus::Degraded { reason } = status {
warn!(
"Service is in degraded state (reason: {}). Pausing message consumption for 30s.",
"Service is in degraded state (reason: {}). Pausing message consumption for 5s.",
reason
);
tokio::time::sleep(Duration::from_secs(30)).await;
tokio::time::sleep(Duration::from_secs(5)).await;
continue;
}

View File

@ -211,7 +211,6 @@ mod integration_tests {
use super::*;
use crate::config::AppConfig;
use crate::state::AppState;
use secrecy::SecretString;
use common_contracts::symbol_utils::{CanonicalSymbol, Market};
use uuid::Uuid;
@ -234,7 +233,7 @@ mod integration_tests {
// 2. Manual Init Provider
state.update_provider(
Some(SecretString::new(api_key.into())),
Some(api_key),
Some(api_url)
).await;

3431
services/mock-provider-service/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,27 @@
[package]
name = "mock-provider-service"
version = "0.1.0"
edition = "2021"
[dependencies]
async-trait = "0.1.89"
secrecy = { version = "0.8", features = ["serde"] }
common-contracts = { path = "../common-contracts", default-features = false }
workflow-context = { path = "../../crates/workflow-context" }
anyhow = "1.0"
async-nats = "0.45.0"
axum = "0.8"
config = "0.15.19"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
tokio = { version = "1.0", features = ["full"] }
tower-http = { version = "0.6.6", features = ["cors"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
uuid = { version = "1.6", features = ["v4", "serde"] }
reqwest = { version = "0.12.24", features = ["json"] }
thiserror = "2.0.17"
chrono = "0.4.38"
dashmap = "6.1.0"
futures-util = "0.3.31"

View File

@ -0,0 +1,26 @@
# 1. Build Stage
FROM rust:1.90-bookworm as builder
WORKDIR /usr/src/app
# Copy full sources
COPY ./services/common-contracts /usr/src/app/services/common-contracts
COPY ./crates/workflow-context /usr/src/app/crates/workflow-context
COPY ./services/mock-provider-service /usr/src/app/services/mock-provider-service
WORKDIR /usr/src/app/services/mock-provider-service
RUN cargo build --bin mock-provider-service
# 2. Runtime Stage
FROM debian:bookworm-slim
# Set timezone
ENV TZ=Asia/Shanghai
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
# Minimal runtime deps for health checks (curl) and TLS roots if needed
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl libssl3 && rm -rf /var/lib/apt/lists/*
# Copy the built binary from the builder stage
COPY --from=builder /usr/src/app/services/mock-provider-service/target/debug/mock-provider-service /usr/local/bin/
# Set the binary as the entrypoint
ENTRYPOINT ["/usr/local/bin/mock-provider-service"]

View File

@ -0,0 +1,26 @@
use crate::state::AppState;
use axum::{
extract::State,
http::StatusCode,
response::{IntoResponse, Json},
routing::get,
Router,
};
use common_contracts::observability::TaskProgress;
pub fn create_router(state: AppState) -> Router {
Router::new()
.route("/health", get(health_check))
.route("/tasks", get(get_tasks))
.with_state(state)
}
async fn health_check() -> impl IntoResponse {
(StatusCode::OK, "OK")
}
async fn get_tasks(State(state): State<AppState>) -> impl IntoResponse {
let tasks: Vec<TaskProgress> = state.tasks.iter().map(|r| r.value().clone()).collect();
Json(tasks)
}

View File

@ -0,0 +1,25 @@
use serde::Deserialize;
use config::Config;
use std::env;
#[derive(Debug, Deserialize, Clone)]
pub struct AppConfig {
pub server_port: u16,
pub nats_addr: String,
pub api_gateway_url: String,
pub service_host: String,
}
impl AppConfig {
pub fn load() -> Result<Self, config::ConfigError> {
let builder = Config::builder()
.set_default("server_port", 8005)?
.set_default("nats_addr", "nats://nats:4222")?
.set_default("api_gateway_url", "http://api-gateway:4000")?
.set_default("service_host", "mock-provider-service")?
.add_source(config::Environment::default());
builder.build()?.try_deserialize()
}
}

View File

@ -0,0 +1,18 @@
use thiserror::Error;
#[derive(Error, Debug)]
pub enum AppError {
#[error("Configuration error: {0}")]
Configuration(String),
#[error("Service error: {0}")]
ServiceRequest(#[from] reqwest::Error),
#[error("NATS error: {0}")]
Nats(#[from] async_nats::Error),
#[error("Data parsing error: {0}")]
DataParsing(#[from] anyhow::Error),
#[error("Unknown error: {0}")]
Unknown(String),
}
pub type Result<T> = std::result::Result<T, AppError>;

View File

@ -0,0 +1,103 @@
mod worker;
mod config;
mod error;
mod state;
mod api;
use crate::config::AppConfig;
use crate::error::Result;
use crate::state::AppState;
use tracing::info;
use common_contracts::lifecycle::ServiceRegistrar;
use common_contracts::registry::{ServiceRegistration, ProviderMetadata};
use std::sync::Arc;
#[tokio::main]
async fn main() -> Result<()> {
// Initialize logging
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.init();
info!("Starting mock-provider-service...");
// Load configuration
let config = AppConfig::load().map_err(|e| error::AppError::Configuration(e.to_string()))?;
let port = config.server_port;
// Initialize application state
let app_state = AppState::new(config.clone());
// Create the Axum router
let app = api::create_router(app_state.clone());
// --- Start the message consumer ---
tokio::spawn(worker::run_consumer(app_state));
// --- Service Registration ---
let registrar = ServiceRegistrar::new(
config.api_gateway_url.clone(),
ServiceRegistration {
service_id: format!("{}-{}", "mock-provider", uuid::Uuid::new_v4()),
service_name: "mock".to_string(),
role: common_contracts::registry::ServiceRole::DataProvider,
base_url: format!("http://{}:{}", config.service_host, port),
health_check_url: format!("http://{}:{}/health", config.service_host, port),
metadata: Some(ProviderMetadata {
id: "mock".to_string(),
name_en: "Mock Provider".to_string(),
name_cn: "Mock Provider (Test)".to_string(),
description: "Mock Data Provider for E2E Testing".to_string(),
icon_url: None,
config_schema: vec![],
supports_test_connection: true,
}),
}
);
let _ = registrar.register().await;
let registrar = Arc::new(registrar);
tokio::spawn(registrar.clone().start_heartbeat_loop());
// Start the HTTP server
let listener = tokio::net::TcpListener::bind(format!("0.0.0.0:{}", port))
.await
.unwrap();
info!("HTTP server listening on port {}", port);
axum::serve(listener, app)
.with_graceful_shutdown(shutdown_signal(registrar))
.await
.unwrap();
Ok(())
}
async fn shutdown_signal(registrar: Arc<ServiceRegistrar>) {
let ctrl_c = async {
tokio::signal::ctrl_c()
.await
.expect("failed to install Ctrl+C handler");
};
#[cfg(unix)]
let terminate = async {
tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate())
.expect("failed to install signal handler")
.recv()
.await;
};
#[cfg(not(unix))]
let terminate = std::future::pending::<()>();
tokio::select! {
_ = ctrl_c => {},
_ = terminate => {},
}
info!("Shutdown signal received, deregistering service...");
let _ = registrar.deregister().await;
}

View File

@ -0,0 +1,61 @@
use crate::config::AppConfig;
use common_contracts::workflow_harness::TaskState;
use common_contracts::observability::{TaskProgress, ObservabilityTaskStatus};
use dashmap::DashMap;
use uuid::Uuid;
use std::sync::Arc;
use chrono::Utc;
#[derive(Clone)]
pub struct AppState {
pub config: Arc<AppConfig>,
pub tasks: Arc<DashMap<Uuid, TaskProgress>>,
}
impl AppState {
pub fn new(config: AppConfig) -> Self {
Self {
config: Arc::new(config),
tasks: Arc::new(DashMap::new()),
}
}
}
#[async_trait::async_trait]
impl TaskState for AppState {
fn update_status(&self, task_id: Uuid, status: ObservabilityTaskStatus, progress: u8, details: String) {
self.tasks.entry(task_id)
.and_modify(|t| {
t.status = status;
t.progress_percent = progress;
t.details = details.clone();
})
.or_insert_with(|| TaskProgress {
request_id: task_id,
task_name: "mock_task".to_string(),
status,
progress_percent: progress,
details,
started_at: Utc::now(),
});
}
fn fail_task(&self, task_id: Uuid, error: String) {
self.update_status(task_id, ObservabilityTaskStatus::Failed, 0, error);
}
fn complete_task(&self, task_id: Uuid, details: String) {
self.update_status(task_id, ObservabilityTaskStatus::Completed, 100, details);
}
fn get_nats_addr(&self) -> String {
self.config.nats_addr.clone()
}
fn get_persistence_url(&self) -> String {
// Mock provider doesn't use persistence client in this simplified version,
// but if we need it, we can add it to config.
// For now return dummy or config value.
"http://data-persistence-service:3000/api/v1".to_string()
}
}

View File

@ -0,0 +1,212 @@
use anyhow::{Result, anyhow, Context};
use tracing::{info, error};
use common_contracts::workflow_types::{WorkflowTaskCommand, WorkflowTaskEvent, TaskStatus, TaskResult};
use common_contracts::subjects::{NatsSubject, SubjectMessage};
use common_contracts::dtos::{CompanyProfileDto, TimeSeriesFinancialDto};
use workflow_context::WorkerContext;
use crate::state::AppState;
use serde_json::json;
use futures_util::StreamExt;
use chrono::NaiveDate;
pub async fn run_consumer(state: AppState) -> Result<()> {
info!("Starting Mock Provider message consumer...");
let client = async_nats::connect(&state.config.nats_addr).await?;
let routing_key = "provider.mock".to_string();
let subject = NatsSubject::WorkflowCommand(routing_key).to_string();
let mut subscriber = client.subscribe(subject.clone()).await?;
info!("Workflow Consumer started on '{}'", subject);
while let Some(message) = subscriber.next().await {
info!("Received Workflow NATS message.");
let state_clone = state.clone();
let client_clone = client.clone();
tokio::spawn(async move {
match serde_json::from_slice::<WorkflowTaskCommand>(&message.payload) {
Ok(cmd) => {
if let Err(e) = handle_workflow_command(state_clone, client_clone, cmd).await {
error!("Error handling workflow command: {:?}", e);
}
}
Err(e) => {
error!("Failed to deserialize workflow message: {}", e);
}
}
});
}
Ok(())
}
async fn handle_workflow_command(_state: AppState, nats: async_nats::Client, cmd: WorkflowTaskCommand) -> Result<()> {
info!("Processing mock workflow command: task_id={}", cmd.task_id);
let symbol_code = cmd.config.get("symbol").and_then(|s| s.as_str()).unwrap_or("MOCK").to_string();
// Generate Dummy Data
let profile = CompanyProfileDto {
symbol: symbol_code.clone(),
name: format!("Mock Company {}", symbol_code),
industry: Some("Testing".to_string()),
list_date: Some(NaiveDate::from_ymd_opt(2000, 1, 1).unwrap()),
additional_info: Some(json!({
"description": "This is a mock company for testing purposes.",
"market_cap": 1000000000.0,
"currency": "USD",
"country": "US"
})),
updated_at: Some(chrono::Utc::now()),
};
// Create Normalized Financials
let date = NaiveDate::from_ymd_opt(2023, 12, 31).unwrap();
let financials = vec![
TimeSeriesFinancialDto {
symbol: symbol_code.clone(),
metric_name: "revenue".to_string(),
period_date: date,
value: 1000000.0,
source: Some("mock".to_string()),
},
TimeSeriesFinancialDto {
symbol: symbol_code.clone(),
metric_name: "net_income".to_string(),
period_date: date,
value: 500000.0,
source: Some("mock".to_string()),
},
TimeSeriesFinancialDto {
symbol: symbol_code.clone(),
metric_name: "gross_profit".to_string(),
period_date: date,
value: 800000.0,
source: Some("mock".to_string()),
},
TimeSeriesFinancialDto {
symbol: symbol_code.clone(),
metric_name: "operating_income".to_string(),
period_date: date,
value: 600000.0,
source: Some("mock".to_string()),
},
TimeSeriesFinancialDto {
symbol: symbol_code.clone(),
metric_name: "eps".to_string(),
period_date: date,
value: 5.0,
source: Some("mock".to_string()),
},
TimeSeriesFinancialDto {
symbol: symbol_code.clone(),
metric_name: "total_assets".to_string(),
period_date: date,
value: 2000000.0,
source: Some("mock".to_string()),
},
TimeSeriesFinancialDto {
symbol: symbol_code.clone(),
metric_name: "total_liabilities".to_string(),
period_date: date,
value: 1000000.0,
source: Some("mock".to_string()),
},
TimeSeriesFinancialDto {
symbol: symbol_code.clone(),
metric_name: "total_equity".to_string(),
period_date: date,
value: 1000000.0,
source: Some("mock".to_string()),
},
TimeSeriesFinancialDto {
symbol: symbol_code.clone(),
metric_name: "operating_cash_flow".to_string(),
period_date: date,
value: 550000.0,
source: Some("mock".to_string()),
},
TimeSeriesFinancialDto {
symbol: symbol_code.clone(),
metric_name: "free_cash_flow".to_string(),
period_date: date,
value: 450000.0,
source: Some("mock".to_string()),
}
];
// Write to VGCS
let root_path = cmd.storage.root_path.clone();
let req_id = cmd.request_id.to_string();
let base_commit = cmd.context.base_commit.clone().unwrap_or_default();
let symbol_code_clone = symbol_code.clone();
// Clone data for the blocking task
let profile_clone = profile.clone();
let financials_clone = financials.clone();
let commit_result = tokio::task::spawn_blocking(move || -> Result<String> {
let mut ctx = WorkerContext::new(&root_path, &req_id, &base_commit);
let base_dir = format!("raw/mock/{}", symbol_code_clone);
let profile_json = serde_json::to_string_pretty(&profile_clone)
.context("Failed to serialize profile")?;
ctx.write_file(&format!("{}/profile.json", base_dir), &profile_json)?;
let financials_json = serde_json::to_string_pretty(&financials_clone)
.context("Failed to serialize financials")?;
ctx.write_file(&format!("{}/financials.json", base_dir), &financials_json)?;
ctx.commit(&format!("Fetched Mock data for {}", symbol_code_clone))
}).await;
let new_commit = match commit_result {
Ok(res) => match res {
Ok(c) => c,
Err(e) => return send_failure(&nats, &cmd, &format!("VGCS failed: {}", e)).await,
},
Err(e) => return send_failure(&nats, &cmd, &format!("Task join error: {}", e)).await,
};
info!("Task {} completed. New commit: {}", cmd.task_id, new_commit);
// Send Success Event
let event = WorkflowTaskEvent {
request_id: cmd.request_id,
task_id: cmd.task_id,
status: TaskStatus::Completed,
result: Some(TaskResult {
new_commit: Some(new_commit),
error: None,
summary: Some(json!({
"symbol": symbol_code,
"records": financials.len()
})),
}),
};
publish_event(&nats, event).await
}
async fn send_failure(nats: &async_nats::Client, cmd: &WorkflowTaskCommand, error_msg: &str) -> Result<()> {
error!("Task {} failed: {}", cmd.task_id, error_msg);
let event = WorkflowTaskEvent {
request_id: cmd.request_id,
task_id: cmd.task_id.clone(),
status: TaskStatus::Failed,
result: Some(TaskResult {
new_commit: None,
error: Some(error_msg.to_string()),
summary: None,
}),
};
publish_event(nats, event).await
}
async fn publish_event(nats: &async_nats::Client, event: WorkflowTaskEvent) -> Result<()> {
let subject = event.subject().to_string();
let payload = serde_json::to_vec(&event)?;
nats.publish(subject, payload.into()).await?;
Ok(())
}

View File

@ -1,9 +1,10 @@
# 1. Build Stage
FROM rust:1.90 as builder
FROM rust:1.90-bookworm as builder
WORKDIR /usr/src/app
# Copy full sources (simple and correct; avoids shipping stub binaries)
COPY ./services/common-contracts /usr/src/app/services/common-contracts
COPY ./crates/workflow-context /usr/src/app/crates/workflow-context
COPY ./services/report-generator-service /usr/src/app/services/report-generator-service
WORKDIR /usr/src/app/services/report-generator-service
RUN cargo build --bin report-generator-service
@ -15,7 +16,7 @@ FROM debian:bookworm-slim
ENV TZ=Asia/Shanghai
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
# Minimal runtime deps for health checks (curl) and TLS roots if needed
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl libssl3 && rm -rf /var/lib/apt/lists/*
# Copy the built binary from the builder stage
COPY --from=builder /usr/src/app/services/report-generator-service/target/debug/report-generator-service /usr/local/bin/

View File

@ -1,26 +1,35 @@
use std::sync::Arc;
use common_contracts::messages::GenerateReportCommand;
use common_contracts::workflow_types::{WorkflowTaskCommand, WorkflowTaskEvent, TaskStatus};
use common_contracts::symbol_utils::{CanonicalSymbol, Market};
// use common_contracts::subjects::SubjectMessage; // Removed unused import
use futures::StreamExt;
use tracing::{error, info};
use crate::{state::AppState, worker::run_report_generation_workflow};
const SUBJECT_NAME: &str = "analysis.commands.generate_report";
const OLD_SUBJECT_NAME: &str = "analysis.commands.generate_report";
const WORKFLOW_CMD_SUBJECT: &str = "workflow.cmd.analysis.report";
pub async fn subscribe_to_commands(
app_state: AppState,
nats_client: async_nats::Client,
) -> Result<(), anyhow::Error> {
let mut subscriber = nats_client.subscribe(SUBJECT_NAME.to_string()).await?;
let mut old_subscriber = nats_client.subscribe(OLD_SUBJECT_NAME.to_string()).await?;
let mut workflow_subscriber = nats_client.subscribe(WORKFLOW_CMD_SUBJECT.to_string()).await?;
info!(
"Consumer started, waiting for commands on subject '{}'",
SUBJECT_NAME
"Consumer started, waiting for commands on subjects '{}' and '{}'",
OLD_SUBJECT_NAME, WORKFLOW_CMD_SUBJECT
);
while let Some(message) = subscriber.next().await {
info!("Received NATS command for report generation. Payload size: {} bytes", message.payload.len());
let state_clone = app_state.clone();
// Spawn task for old command style
let state_clone1 = app_state.clone();
tokio::spawn(async move {
while let Some(message) = old_subscriber.next().await {
info!("Received legacy NATS command. Payload size: {} bytes", message.payload.len());
let state = state_clone1.clone();
tokio::spawn(async move {
match serde_json::from_slice::<GenerateReportCommand>(&message.payload) {
Ok(command) => {
@ -28,8 +37,7 @@ pub async fn subscribe_to_commands(
"Command triggered analysis for symbol: {}, template: {}, request_id: {}",
command.symbol, command.template_id, command.request_id
);
if let Err(e) = run_report_generation_workflow(Arc::new(state_clone), command).await
{
if let Err(e) = run_report_generation_workflow(Arc::new(state), command).await {
error!("Error running report generation workflow: {:?}", e);
}
}
@ -39,6 +47,120 @@ pub async fn subscribe_to_commands(
}
});
}
});
// Spawn task for new WorkflowTaskCommand
let state_clone2 = app_state.clone();
let nats_client2 = nats_client.clone();
tokio::spawn(async move {
while let Some(message) = workflow_subscriber.next().await {
info!("Received WorkflowTaskCommand. Payload size: {} bytes", message.payload.len());
let state = state_clone2.clone();
let nats = nats_client2.clone();
tokio::spawn(async move {
match serde_json::from_slice::<WorkflowTaskCommand>(&message.payload) {
Ok(task_cmd) => {
info!("Received WorkflowTaskCommand for task_id: {}", task_cmd.task_id);
// 1. Extract params from config
let symbol_str = task_cmd.config.get("symbol").and_then(|v| v.as_str());
let market_str = task_cmd.config.get("market").and_then(|v| v.as_str());
let template_id = task_cmd.config.get("template_id").and_then(|v| v.as_str());
let module_id = task_cmd.config.get("module_id").and_then(|v| v.as_str());
if let (Some(s), Some(m), Some(t)) = (symbol_str, market_str, template_id) {
let market = Market::from(m);
let symbol = CanonicalSymbol::new(s, &market);
let report_cmd = GenerateReportCommand {
request_id: task_cmd.request_id,
symbol: symbol.clone(),
template_id: t.to_string(),
task_id: Some(task_cmd.task_id.clone()),
module_id: module_id.map(|v| v.to_string()),
};
// 2. Send TaskStatus::Running
let running_evt = WorkflowTaskEvent {
request_id: task_cmd.request_id,
task_id: task_cmd.task_id.clone(),
status: TaskStatus::Running,
result: None,
};
if let Ok(payload) = serde_json::to_vec(&running_evt) {
let subject = common_contracts::subjects::NatsSubject::WorkflowEventTaskCompleted.to_string();
if let Err(e) = nats.publish(subject, payload.into()).await {
error!("Failed to publish TaskRunning event: {}", e);
}
}
// 3. Run Logic
match run_report_generation_workflow(Arc::new(state), report_cmd).await {
Ok(_) => {
// 4. Send TaskStatus::Completed
let completed_evt = WorkflowTaskEvent {
request_id: task_cmd.request_id,
task_id: task_cmd.task_id.clone(),
status: TaskStatus::Completed,
result: None, // Future: Add commit hash here if we used VGCS
};
if let Ok(payload) = serde_json::to_vec(&completed_evt) {
let subject = common_contracts::subjects::NatsSubject::WorkflowEventTaskCompleted.to_string();
if let Err(e) = nats.publish(subject, payload.into()).await {
error!("Failed to publish TaskCompleted event: {}", e);
}
}
}
Err(e) => {
error!("Workflow execution failed: {}", e);
// 5. Send TaskStatus::Failed
let failed_evt = WorkflowTaskEvent {
request_id: task_cmd.request_id,
task_id: task_cmd.task_id.clone(),
status: TaskStatus::Failed,
result: Some(common_contracts::workflow_types::TaskResult {
new_commit: None,
error: Some(e.to_string()),
summary: None,
}),
};
if let Ok(payload) = serde_json::to_vec(&failed_evt) {
let subject = common_contracts::subjects::NatsSubject::WorkflowEventTaskCompleted.to_string();
if let Err(e) = nats.publish(subject, payload.into()).await {
error!("Failed to publish TaskFailed event: {}", e);
}
}
}
}
} else {
error!("Missing required config params in WorkflowTaskCommand: {:?}", task_cmd.config);
// Send Failed Event immediately
let failed_evt = WorkflowTaskEvent {
request_id: task_cmd.request_id,
task_id: task_cmd.task_id.clone(),
status: TaskStatus::Failed,
result: Some(common_contracts::workflow_types::TaskResult {
new_commit: None,
error: Some("Missing required config params (symbol, market, template_id)".to_string()),
summary: None,
}),
};
if let Ok(payload) = serde_json::to_vec(&failed_evt) {
let subject = common_contracts::subjects::NatsSubject::WorkflowEventTaskCompleted.to_string();
let _ = nats.publish(subject, payload.into()).await;
}
}
}
Err(e) => {
error!("Failed to deserialize WorkflowTaskCommand: {}. Payload: {:?}", e, message.payload);
}
}
});
}
});
Ok(())
}

View File

@ -8,7 +8,7 @@ use crate::error::Result;
use common_contracts::{
config_models::{AnalysisTemplateSets, LlmProvidersConfig},
dtos::{
CompanyProfileDto, NewAnalysisResult, RealtimeQuoteDto, SessionDataDto,
AnalysisResultDto, CompanyProfileDto, NewAnalysisResult, RealtimeQuoteDto, SessionDataDto,
TimeSeriesFinancialBatchDto, TimeSeriesFinancialDto,
},
};
@ -156,6 +156,22 @@ impl PersistenceClient {
Ok(())
}
pub async fn get_analysis_results(&self, symbol: &str) -> Result<Vec<AnalysisResultDto>> {
let url = format!("{}/analysis-results", self.base_url);
let params = [("symbol", symbol)];
info!("Fetching analysis results for {} from {}", symbol, url);
let dtos = self
.client
.get(&url)
.query(&params)
.send()
.await?
.error_for_status()?
.json::<Vec<AnalysisResultDto>>()
.await?;
Ok(dtos)
}
pub async fn upsert_company_profile(&self, profile: CompanyProfileDto) -> Result<()> {
let url = format!("{}/companies", self.base_url);
info!("Upserting company profile for {} to {}", profile.symbol, url);

View File

@ -5,13 +5,13 @@ use common_contracts::config_models::{
AnalysisModuleConfig, AnalysisTemplateSets, LlmProvidersConfig,
};
use common_contracts::dtos::{CompanyProfileDto, NewAnalysisResult, TimeSeriesFinancialDto};
use common_contracts::messages::{GenerateReportCommand, ReportGeneratedEvent, ReportFailedEvent};
use common_contracts::messages::{GenerateReportCommand, ReportGeneratedEvent, ReportFailedEvent, WorkflowEvent};
use common_contracts::subjects::SubjectMessage;
use futures_util::StreamExt;
use petgraph::algo::toposort;
use petgraph::graph::DiGraph;
use tera::{Context, Tera};
use tracing::{info, instrument, error};
use tracing::{info, instrument, error, warn};
use crate::error::{ProviderError, Result};
use crate::llm_client::LlmClient;
@ -50,23 +50,68 @@ pub async fn run_report_generation_workflow(
ProviderError::Configuration(err_msg)
})?;
// 3. Topologically sort modules to get execution order
let sorted_modules = sort_modules_by_dependency(&template_set.modules)
// 3. Determine Execution Plan (Single Module vs Full Workflow)
let sorted_modules = if let Some(target_module) = &command.module_id {
info!("Targeting single module execution: {}", target_module);
if !template_set.modules.contains_key(target_module) {
let err_msg = format!("Module '{}' not found in template '{}'", target_module, command.template_id);
error!("{}", err_msg);
return Err(ProviderError::Configuration(err_msg));
}
vec![target_module.clone()]
} else {
info!("Targeting full workflow execution.");
sort_modules_by_dependency(&template_set.modules)
.map_err(|e| {
error!("Failed to sort analysis modules: {}", e);
e
})?;
})?
};
info!(execution_order = ?sorted_modules, "Successfully determined module execution order.");
// 4. Execute modules in order
let mut generated_results: HashMap<String, String> = HashMap::new();
// If single module, preload context from persistence (previous steps)
if command.module_id.is_some() {
info!("Preloading context from previous analysis results...");
match persistence_client.get_analysis_results(command.symbol.as_str()).await {
Ok(results) => {
for r in results {
if r.request_id == command.request_id {
generated_results.insert(r.module_id, r.content);
}
}
info!("Preloaded {} context items.", generated_results.len());
},
Err(e) => {
warn!("Failed to preload analysis results: {}", e);
// Non-fatal, but might cause dependency error later
}
}
}
for module_id in sorted_modules {
let module_config = template_set.modules.get(&module_id).unwrap();
info!(module_id = %module_id, "All dependencies met. Generating report for module.");
// Publish TaskLog
if let Some(task_id) = &command.task_id {
let log_evt = WorkflowEvent::TaskLog {
task_id: task_id.clone(),
level: "INFO".to_string(),
message: format!("Starting module: {}", module_id),
timestamp: chrono::Utc::now().timestamp_millis(),
};
if let Ok(payload) = serde_json::to_vec(&log_evt) {
let subject = common_contracts::subjects::NatsSubject::WorkflowProgress(command.request_id).to_string();
let nats = state.nats.clone();
tokio::spawn(async move { let _ = nats.publish(subject, payload.into()).await; });
}
}
// Broadcast Module Start
let _ = stream_tx.send(serde_json::json!({
"type": "module_start",
@ -76,7 +121,7 @@ pub async fn run_report_generation_workflow(
let llm_client = match create_llm_client_for_module(&llm_providers, module_config) {
Ok(client) => client,
Err(e) => {
error!(module_id = %module_id, "Failed to create LLM client: {}. Skipping module.", e);
error!(module_id = %module_id, "Failed to create LLM client: {}. Aborting workflow.", e);
let err_msg = format!("Error: Failed to create LLM client: {}", e);
generated_results.insert(module_id.clone(), err_msg.clone());
@ -100,7 +145,7 @@ pub async fn run_report_generation_workflow(
}
}
continue;
return Err(e);
}
};
@ -118,6 +163,20 @@ pub async fn run_report_generation_workflow(
let formatted_financials = format_financials_to_markdown(&financials);
context.insert("financial_data", &formatted_financials);
if let Some(task_id) = &command.task_id {
let log_evt = WorkflowEvent::TaskLog {
task_id: task_id.clone(),
level: "INFO".to_string(),
message: format!("Rendering prompt template for module: {}", module_id),
timestamp: chrono::Utc::now().timestamp_millis(),
};
if let Ok(payload) = serde_json::to_vec(&log_evt) {
let subject = common_contracts::subjects::NatsSubject::WorkflowProgress(command.request_id).to_string();
let nats = state.nats.clone();
tokio::spawn(async move { let _ = nats.publish(subject, payload.into()).await; });
}
}
info!(module_id = %module_id, "Rendering prompt template...");
let prompt = match Tera::one_off(&module_config.prompt_template, &context, true) {
Ok(p) => {
@ -164,12 +223,25 @@ pub async fn run_report_generation_workflow(
let _ = state.nats.publish(fail_event.subject().to_string(), payload.into()).await;
}
continue;
return Err(ProviderError::Configuration(err_msg));
}
};
// Streaming Generation
info!(module_id = %module_id, "Initiating LLM stream...");
if let Some(task_id) = &command.task_id {
let log_evt = WorkflowEvent::TaskLog {
task_id: task_id.clone(),
level: "INFO".to_string(),
message: format!("Initiating LLM stream with model: {}", module_config.model_id),
timestamp: chrono::Utc::now().timestamp_millis(),
};
if let Ok(payload) = serde_json::to_vec(&log_evt) {
let subject = common_contracts::subjects::NatsSubject::WorkflowProgress(command.request_id).to_string();
let nats = state.nats.clone();
tokio::spawn(async move { let _ = nats.publish(subject, payload.into()).await; });
}
}
let mut stream = match llm_client.stream_text(prompt).await {
Ok(s) => s,
Err(e) => {
@ -194,7 +266,7 @@ pub async fn run_report_generation_workflow(
let _ = state.nats.publish(fail_event.subject().to_string(), payload.into()).await;
}
continue;
return Err(ProviderError::LlmApi(err_msg));
}
};
@ -217,6 +289,23 @@ pub async fn run_report_generation_workflow(
"module_id": module_id,
"payload": chunk
}).to_string());
// Publish TaskStreamUpdate (NATS)
if let Some(task_id) = &command.task_id {
let stream_evt = WorkflowEvent::TaskStreamUpdate {
task_id: task_id.clone(),
content_delta: chunk.clone(),
index: 0, // Index tracking might be hard with current stream logic, frontend usually appends
};
if let Ok(payload) = serde_json::to_vec(&stream_evt) {
let subject = common_contracts::subjects::NatsSubject::WorkflowProgress(command.request_id).to_string();
// Fire and forget via NATS too
let nats = state.nats.clone();
tokio::spawn(async move {
let _ = nats.publish(subject, payload.into()).await;
});
}
}
}
},
Err(e) => {
@ -234,6 +323,20 @@ pub async fn run_report_generation_workflow(
info!(module_id = %module_id, "Successfully generated content (Length: {}).", full_content.len());
if let Some(task_id) = &command.task_id {
let log_evt = WorkflowEvent::TaskLog {
task_id: task_id.clone(),
level: "INFO".to_string(),
message: format!("Module completed: {}. Content length: {}", module_id, full_content.len()),
timestamp: chrono::Utc::now().timestamp_millis(),
};
if let Ok(payload) = serde_json::to_vec(&log_evt) {
let subject = common_contracts::subjects::NatsSubject::WorkflowProgress(command.request_id).to_string();
let nats = state.nats.clone();
tokio::spawn(async move { let _ = nats.publish(subject, payload.into()).await; });
}
}
// Broadcast Module Done
let _ = stream_tx.send(serde_json::json!({
"type": "module_done",
@ -286,6 +389,9 @@ pub async fn run_report_generation_workflow(
"module_id": "workflow",
"payload": format!("Analysis workflow failed: {}", e)
}).to_string());
// Ensure we propagate the error so the message consumer sends TaskFailed
return Err(e);
}
// Broadcast Workflow Done
@ -548,6 +654,8 @@ mod integration_tests {
request_id,
symbol,
template_id: "test_template".to_string(),
task_id: None,
module_id: None,
};
// 6. Run Workflow

View File

@ -1,9 +1,10 @@
# 1. Build Stage
FROM rust:1.90 as builder
FROM rust:1.90-bookworm as builder
WORKDIR /usr/src/app
# Copy full sources (simple and correct; avoids shipping stub binaries)
COPY ./services/common-contracts /usr/src/app/services/common-contracts
COPY ./crates/workflow-context /usr/src/app/crates/workflow-context
COPY ./services/tushare-provider-service /usr/src/app/services/tushare-provider-service
WORKDIR /usr/src/app/services/tushare-provider-service
RUN cargo build --bin tushare-provider-service
@ -15,7 +16,7 @@ FROM debian:bookworm-slim
ENV TZ=Asia/Shanghai
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
# Minimal runtime deps for health checks (curl) and TLS roots if needed
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl libssl3 && rm -rf /var/lib/apt/lists/*
# Copy the built binary from the builder stage
COPY --from=builder /usr/src/app/services/tushare-provider-service/target/debug/tushare-provider-service /usr/local/bin/

View File

@ -1,13 +1,11 @@
use anyhow::{Result, anyhow, Context};
use tracing::{info, error, warn};
use common_contracts::workflow_types::{WorkflowTaskCommand, WorkflowTaskEvent, TaskStatus, TaskResult};
use common_contracts::subjects::{NatsSubject, SubjectMessage};
use common_contracts::subjects::SubjectMessage;
use common_contracts::dtos::{CompanyProfileDto, TimeSeriesFinancialDto};
use workflow_context::{WorkerContext, OutputFormat};
use workflow_context::WorkerContext;
use crate::state::AppState;
use crate::tushare::TushareDataProvider;
use serde_json::json;
use std::sync::Arc;
pub async fn handle_workflow_command(state: AppState, nats: async_nats::Client, cmd: WorkflowTaskCommand) -> Result<()> {
info!("Processing generic workflow command: task_id={}", cmd.task_id);
@ -20,16 +18,6 @@ pub async fn handle_workflow_command(state: AppState, nats: async_nats::Client,
return send_failure(&nats, &cmd, "Missing symbol in config").await;
}
// 2. Initialize Worker Context
// Note: We use the provided base_commit. If it's empty, it means start from scratch (or empty repo).
// We need to mount the volume.
let root_path = cmd.storage.root_path.clone();
let mut ctx = match WorkerContext::init(&cmd.request_id.to_string(), &root_path, cmd.context.base_commit.as_deref()) {
Ok(c) => c,
Err(e) => return send_failure(&nats, &cmd, &format!("Failed to init context: {}", e)).await,
};
// 3. Fetch Data (with Cache)
let fetch_result = fetch_and_cache(&state, &symbol_code, &market).await;
@ -38,22 +26,39 @@ pub async fn handle_workflow_command(state: AppState, nats: async_nats::Client,
Err(e) => return send_failure(&nats, &cmd, &format!("Fetch failed: {}", e)).await,
};
// 4. Write to VGCS
// Organize data in a structured way
let base_dir = format!("raw/tushare/{}", symbol_code);
// 4. Write to VGCS (Spawn blocking task for Git operations)
let root_path = cmd.storage.root_path.clone();
let req_id = cmd.request_id.to_string();
let base_commit = cmd.context.base_commit.clone().unwrap_or_default();
let task_id = cmd.task_id.clone();
if let Err(e) = ctx.write_file(&format!("{}/profile.json", base_dir), &profile, OutputFormat::Json) {
return send_failure(&nats, &cmd, &format!("Failed to write profile: {}", e)).await;
}
// Clone data needed for closure
let profile_clone = profile.clone();
let financials_clone = financials.clone();
let symbol_code_clone = symbol_code.clone();
if let Err(e) = ctx.write_file(&format!("{}/financials.json", base_dir), &financials, OutputFormat::Json) {
return send_failure(&nats, &cmd, &format!("Failed to write financials: {}", e)).await;
}
let commit_result = tokio::task::spawn_blocking(move || -> Result<String> {
let mut ctx = WorkerContext::new(&root_path, &req_id, &base_commit);
// 5. Commit
let new_commit = match ctx.commit(&format!("Fetched Tushare data for {}", symbol_code)) {
let base_dir = format!("raw/tushare/{}", symbol_code_clone);
let profile_json = serde_json::to_string_pretty(&profile_clone)
.context("Failed to serialize profile")?;
ctx.write_file(&format!("{}/profile.json", base_dir), &profile_json)?;
let financials_json = serde_json::to_string_pretty(&financials_clone)
.context("Failed to serialize financials")?;
ctx.write_file(&format!("{}/financials.json", base_dir), &financials_json)?;
ctx.commit(&format!("Fetched Tushare data for {}", symbol_code_clone))
}).await;
let new_commit = match commit_result {
Ok(res) => match res {
Ok(c) => c,
Err(e) => return send_failure(&nats, &cmd, &format!("Commit failed: {}", e)).await,
Err(e) => return send_failure(&nats, &cmd, &format!("VGCS failed: {}", e)).await,
},
Err(e) => return send_failure(&nats, &cmd, &format!("Task join error: {}", e)).await,
};
info!("Task {} completed. New commit: {}", cmd.task_id, new_commit);
@ -97,7 +102,7 @@ async fn fetch_and_cache(state: &AppState, symbol: &str, _market: &str) -> Resul
let persistence_url = state.get_persistence_url();
let p_client = PersistenceClient::new(persistence_url);
if let Err(e) = p_client.save_company_profile(&profile).await {
if let Err(e) = p_client.upsert_company_profile(profile.clone()).await {
warn!("Failed to cache company profile: {}", e);
}

View File

@ -8,6 +8,7 @@ mod state;
mod ts_client;
mod tushare;
mod worker;
mod generic_worker;
mod config_poller;
use crate::config::AppConfig;

View File

@ -19,10 +19,10 @@ pub async fn run(state: AppState) -> Result<()> {
let status = state.status.read().await.clone();
if let ServiceOperationalStatus::Degraded { reason } = status {
warn!(
"Service is in degraded state (reason: {}). Pausing message consumption for 30s.",
"Service is in degraded state (reason: {}). Pausing message consumption for 5s.",
reason
);
tokio::time::sleep(Duration::from_secs(30)).await;
tokio::time::sleep(Duration::from_secs(5)).await;
continue;
}

View File

@ -67,7 +67,17 @@ impl TushareClient {
.await?;
let text = res.text().await?;
let response: TushareResponse<T> = serde_json::from_str(&text)?;
// Try to parse as TushareResponse
let response: TushareResponse<T> = match serde_json::from_str(&text) {
Ok(r) => r,
Err(e) => {
return Err(AppError::DataParsing(anyhow::anyhow!(format!(
"Failed to parse Tushare response: {}. Body: {}",
e, text
))));
}
};
if response.code != 0 {
return Err(AppError::DataParsing(anyhow::anyhow!(format!(

View File

@ -226,6 +226,16 @@ dependencies = [
"syn 2.0.110",
]
[[package]]
name = "bstr"
version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63044e1ae8e69f3b5a92c736ca6269b8d12fa7efe39bf34ddb06d102cf0e2cab"
dependencies = [
"memchr",
"serde",
]
[[package]]
name = "bumpalo"
version = "3.19.0"
@ -270,6 +280,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b97463e1064cb1b1c1384ad0a0b9c8abd0988e2a91f52606c80ef14aadb63e36"
dependencies = [
"find-msvc-tools",
"jobserver",
"libc",
"shlex",
]
@ -766,6 +778,34 @@ dependencies = [
"wasip2",
]
[[package]]
name = "git2"
version = "0.18.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "232e6a7bfe35766bf715e55a88b39a700596c0ccfd88cd3680b4cdb40d66ef70"
dependencies = [
"bitflags",
"libc",
"libgit2-sys",
"log",
"openssl-probe",
"openssl-sys",
"url",
]
[[package]]
name = "globset"
version = "0.4.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52dfc19153a48bde0cbd630453615c8151bce3a5adfac7a0aebfbf0a1e1f57e3"
dependencies = [
"aho-corasick",
"bstr",
"log",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "h2"
version = "0.4.12"
@ -830,6 +870,12 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "hex"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "http"
version = "1.3.1"
@ -1126,6 +1172,16 @@ version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "jobserver"
version = "0.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
dependencies = [
"getrandom 0.3.4",
"libc",
]
[[package]]
name = "js-sys"
version = "0.3.82"
@ -1159,6 +1215,46 @@ version = "0.2.177"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976"
[[package]]
name = "libgit2-sys"
version = "0.16.2+1.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee4126d8b4ee5c9d9ea891dd875cfdc1e9d0950437179104b183d7d8a74d24e8"
dependencies = [
"cc",
"libc",
"libssh2-sys",
"libz-sys",
"openssl-sys",
"pkg-config",
]
[[package]]
name = "libssh2-sys"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "220e4f05ad4a218192533b300327f5150e809b54c4ec83b5a1d91833601811b9"
dependencies = [
"cc",
"libc",
"libz-sys",
"openssl-sys",
"pkg-config",
"vcpkg",
]
[[package]]
name = "libz-sys"
version = "1.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "15d118bbf3771060e7311cc7bb0545b01d08a8b4a7de949198dec1fa0ca1c0f7"
dependencies = [
"cc",
"libc",
"pkg-config",
"vcpkg",
]
[[package]]
name = "linux-raw-sys"
version = "0.11.0"
@ -1333,6 +1429,15 @@ version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e"
[[package]]
name = "openssl-src"
version = "300.5.4+3.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507b3792995dae9b0df8a1c1e3771e8418b7c2d9f0baeba32e6fe8b06c7cb72"
dependencies = [
"cc",
]
[[package]]
name = "openssl-sys"
version = "0.9.111"
@ -1341,6 +1446,7 @@ checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321"
dependencies = [
"cc",
"libc",
"openssl-src",
"pkg-config",
"vcpkg",
]
@ -1917,6 +2023,15 @@ version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "same-file"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
dependencies = [
"winapi-util",
]
[[package]]
name = "schannel"
version = "0.1.28"
@ -2827,6 +2942,16 @@ version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]]
name = "walkdir"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
dependencies = [
"same-file",
"winapi-util",
]
[[package]]
name = "want"
version = "0.3.1"
@ -2937,6 +3062,15 @@ dependencies = [
"rustls-pki-types",
]
[[package]]
name = "winapi-util"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "windows-core"
version = "0.62.2"
@ -3178,6 +3312,22 @@ version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
[[package]]
name = "workflow-context"
version = "0.1.0"
dependencies = [
"anyhow",
"git2",
"globset",
"hex",
"regex",
"serde",
"serde_json",
"sha2",
"thiserror 1.0.69",
"walkdir",
]
[[package]]
name = "writeable"
version = "0.6.2"
@ -3226,6 +3376,7 @@ dependencies = [
"tracing",
"tracing-subscriber",
"uuid",
"workflow-context",
]
[[package]]

View File

@ -13,6 +13,7 @@ tower-http = { version = "0.6.6", features = ["cors"] }
# Shared Contracts
# Disable default features to avoid pulling in sqlx
common-contracts = { path = "../common-contracts", default-features = false }
workflow-context = { path = "../../crates/workflow-context" }
# Message Queue (NATS)
async-nats = "0.45.0"

View File

@ -1,9 +1,10 @@
# 1. Build Stage
FROM rust:1.90 as builder
FROM rust:1.90-bookworm as builder
WORKDIR /usr/src/app
# Copy full sources (simple and correct; avoids shipping stub binaries)
COPY ./services/common-contracts /usr/src/app/services/common-contracts
COPY ./crates/workflow-context /usr/src/app/crates/workflow-context
COPY ./services/yfinance-provider-service /usr/src/app/services/yfinance-provider-service
WORKDIR /usr/src/app/services/yfinance-provider-service
RUN cargo build --bin yfinance-provider-service
@ -15,7 +16,7 @@ FROM debian:bookworm-slim
ENV TZ=Asia/Shanghai
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
# Minimal runtime deps for health checks (curl) and TLS roots if needed
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl libssl3 && rm -rf /var/lib/apt/lists/*
# Copy the built binary from the builder stage
COPY --from=builder /usr/src/app/services/yfinance-provider-service/target/debug/yfinance-provider-service /usr/local/bin/

View File

@ -0,0 +1,132 @@
use anyhow::{Result, Context};
use tracing::{info, error, warn};
use common_contracts::workflow_types::{WorkflowTaskCommand, WorkflowTaskEvent, TaskStatus, TaskResult};
use common_contracts::subjects::SubjectMessage;
use common_contracts::dtos::{CompanyProfileDto, TimeSeriesFinancialDto};
use workflow_context::WorkerContext;
use crate::state::AppState;
use serde_json::json;
pub async fn handle_workflow_command(state: AppState, nats: async_nats::Client, cmd: WorkflowTaskCommand) -> Result<()> {
info!("Processing generic workflow command: task_id={}", cmd.task_id);
// 1. Parse Config
let symbol_code = cmd.config.get("symbol").and_then(|s| s.as_str()).unwrap_or("").to_string();
let market = cmd.config.get("market").and_then(|s| s.as_str()).unwrap_or("US").to_string();
if symbol_code.is_empty() {
return send_failure(&nats, &cmd, "Missing symbol in config").await;
}
// 2. Initialize Worker Context
// Note: We use the provided base_commit. If it's empty, it means start from scratch (or empty repo).
// We need to mount the volume.
let root_path = cmd.storage.root_path.clone();
// 3. Fetch Data (with Cache)
let fetch_result = fetch_and_cache(&state, &symbol_code, &market).await;
let (profile, financials) = match fetch_result {
Ok(data) => data,
Err(e) => return send_failure(&nats, &cmd, &format!("Fetch failed: {}", e)).await,
};
// 4. Write to VGCS (Spawn blocking task for Git operations)
let req_id = cmd.request_id.to_string();
let base_commit = cmd.context.base_commit.clone().unwrap_or_default();
let _task_id = cmd.task_id.clone();
// Clone data needed for closure
let profile_clone = profile.clone();
let financials_clone = financials.clone();
let symbol_code_clone = symbol_code.clone();
let commit_result = tokio::task::spawn_blocking(move || -> Result<String> {
let mut ctx = WorkerContext::new(&root_path, &req_id, &base_commit);
let base_dir = format!("raw/yfinance/{}", symbol_code_clone);
let profile_json = serde_json::to_string_pretty(&profile_clone)
.context("Failed to serialize profile")?;
ctx.write_file(&format!("{}/profile.json", base_dir), &profile_json)?;
let financials_json = serde_json::to_string_pretty(&financials_clone)
.context("Failed to serialize financials")?;
ctx.write_file(&format!("{}/financials.json", base_dir), &financials_json)?;
ctx.commit(&format!("Fetched YFinance data for {}", symbol_code_clone))
}).await;
let new_commit = match commit_result {
Ok(res) => match res {
Ok(c) => c,
Err(e) => return send_failure(&nats, &cmd, &format!("VGCS failed: {}", e)).await,
},
Err(e) => return send_failure(&nats, &cmd, &format!("Task join error: {}", e)).await,
};
info!("Task {} completed. New commit: {}", cmd.task_id, new_commit);
// 6. Send Success Event
let event = WorkflowTaskEvent {
request_id: cmd.request_id,
task_id: cmd.task_id,
status: TaskStatus::Completed,
result: Some(TaskResult {
new_commit: Some(new_commit),
error: None,
summary: Some(json!({
"symbol": symbol_code,
"records": financials.len()
})),
}),
};
publish_event(&nats, event).await
}
async fn fetch_and_cache(state: &AppState, symbol: &str, _market: &str) -> Result<(CompanyProfileDto, Vec<TimeSeriesFinancialDto>)> {
// 1. Get Provider
// yfinance_provider is likely Arc<YFinanceDataProvider>, and Provider is Clone/ThreadSafe.
let provider = state.yfinance_provider.clone();
// 2. Call fetch
let (profile, financials) = provider.fetch_all_data(symbol).await
.context("Failed to fetch data from YFinance")?;
// 3. Write to DB Cache
use common_contracts::persistence_client::PersistenceClient;
use common_contracts::workflow_harness::TaskState; // For get_persistence_url
let persistence_url = state.get_persistence_url();
let p_client = PersistenceClient::new(persistence_url);
if let Err(e) = p_client.upsert_company_profile(profile.clone()).await {
warn!("Failed to cache company profile: {}", e);
}
Ok((profile, financials))
}
async fn send_failure(nats: &async_nats::Client, cmd: &WorkflowTaskCommand, error_msg: &str) -> Result<()> {
error!("Task {} failed: {}", cmd.task_id, error_msg);
let event = WorkflowTaskEvent {
request_id: cmd.request_id,
task_id: cmd.task_id.clone(),
status: TaskStatus::Failed,
result: Some(TaskResult {
new_commit: None,
error: Some(error_msg.to_string()),
summary: None,
}),
};
publish_event(nats, event).await
}
async fn publish_event(nats: &async_nats::Client, event: WorkflowTaskEvent) -> Result<()> {
let subject = event.subject().to_string();
let payload = serde_json::to_vec(&event)?;
nats.publish(subject, payload.into()).await?;
Ok(())
}

View File

@ -6,6 +6,7 @@ mod message_consumer;
// mod persistence; // Removed
mod state;
mod worker;
mod generic_worker;
mod yfinance;
use crate::config::AppConfig;

View File

@ -1,6 +1,7 @@
use crate::error::Result;
use crate::state::AppState;
use common_contracts::messages::FetchCompanyDataCommand;
use common_contracts::workflow_types::WorkflowTaskCommand;
use common_contracts::subjects::NatsSubject;
use futures_util::StreamExt;
use tracing::{error, info};
@ -11,17 +12,22 @@ pub async fn run(state: AppState) -> Result<()> {
let client = async_nats::connect(&state.config.nats_addr).await?;
info!("Connected to NATS.");
// This is a simple subscriber. For production, consider JetStream for durability.
let legacy = subscribe_legacy(state.clone(), client.clone());
let workflow = subscribe_workflow(state.clone(), client.clone());
tokio::try_join!(legacy, workflow)?;
Ok(())
}
async fn subscribe_legacy(state: AppState, client: async_nats::Client) -> Result<()> {
let subject = NatsSubject::DataFetchCommands.to_string();
let mut subscriber = client.subscribe(subject.clone()).await?;
info!(
"Consumer started, waiting for messages on subject '{}'",
subject
);
info!("Legacy Consumer started, waiting for messages on subject '{}'", subject);
while let Some(message) = subscriber.next().await {
info!("Received NATS message.");
info!("Received Legacy NATS message.");
let state_clone = state.clone();
let publisher_clone = client.clone();
@ -47,6 +53,33 @@ pub async fn run(state: AppState) -> Result<()> {
}
});
}
Ok(())
}
async fn subscribe_workflow(state: AppState, client: async_nats::Client) -> Result<()> {
let routing_key = "provider.yfinance".to_string();
let subject = NatsSubject::WorkflowCommand(routing_key).to_string();
let mut subscriber = client.subscribe(subject.clone()).await?;
info!("Workflow Consumer started on '{}'", subject);
while let Some(message) = subscriber.next().await {
info!("Received Workflow NATS message.");
let state_clone = state.clone();
let client_clone = client.clone();
tokio::spawn(async move {
match serde_json::from_slice::<WorkflowTaskCommand>(&message.payload) {
Ok(cmd) => {
if let Err(e) = crate::generic_worker::handle_workflow_command(state_clone, client_clone, cmd).await {
error!("Error handling workflow command: {:?}", e);
}
}
Err(e) => {
error!("Failed to deserialize workflow message: {}", e);
}
}
});
}
Ok(())
}

View File

@ -9,7 +9,6 @@ use common_contracts::{
};
use crate::error::{Result, AppError};
use crate::state::AppState;
use tracing::info;
pub struct YFinanceFetcher {
state: Arc<AppState>,