Files
brachnha-insight/src/store/use-chat.ts
Max 9b79856827 feat(ui): implement 'Twilight Velvet' dark theme and fix visibility issues
- Add 'Twilight Velvet' color palette to globals.css with OKLCH values
- Update SettingsPage headers, cards, and dialogs to use semantic theme variables
- Update HistoryCard, HistoryFeed, and DraftContent to support dark mode
- Update ProviderSelector and ProviderList to use custom card background (#2A2A3D)
- Add ThemeToggle component with improved visibility
- Ensure consistent use of 'bg-card', 'text-foreground', and 'text-muted-foreground'
2026-01-27 11:03:55 +07:00

153 lines
5.2 KiB
TypeScript

import { create } from 'zustand';
import { persist, createJSONStorage } from 'zustand/middleware';
import { v4 as uuidv4 } from 'uuid';
import { LLMService } from '@/services/llm-service';
// --- Types ---
export type MessageRole = 'user' | 'assistant' | 'system';
export type MessageType = 'text' | 'thought' | 'draft';
export interface Message {
id: string;
role: MessageRole;
content: string;
type?: MessageType;
createdAt: string;
}
export type ChatPhase = 'idle' | 'input' | 'elicitation' | 'drafting' | 'review';
export interface DraftArtifact {
title: string;
insight: string;
lesson: string;
}
interface ChatState {
// State
messages: Message[];
phase: ChatPhase;
isTyping: boolean;
currentDraft: DraftArtifact | null;
// Actions
addMessage: (role: MessageRole, content: string, type?: MessageType) => void;
setPhase: (phase: ChatPhase) => void;
resetSession: () => void;
generateDraft: () => Promise<void>;
sendMessage: (content: string) => Promise<void>;
updateDraft: (draft: DraftArtifact) => void;
}
// --- Store ---
export const useChatStore = create<ChatState>()(
persist(
(set, get) => ({
// Initial State
messages: [],
phase: 'idle',
isTyping: false,
currentDraft: null,
// Actions
addMessage: (role, content, type = 'text') => {
const newMessage: Message = {
id: uuidv4(),
role,
content,
type,
createdAt: new Date().toISOString(),
};
set((state) => ({ messages: [...state.messages, newMessage] }));
},
setPhase: (phase) => set({ phase }),
resetSession: () => set({
messages: [],
phase: 'idle',
isTyping: false,
currentDraft: null
}),
updateDraft: (draft) => set({ currentDraft: draft }),
sendMessage: async (content) => {
const { addMessage, messages } = get();
// 1. Add User Message
addMessage('user', content);
set({ isTyping: true, phase: 'elicitation' });
try {
// 2. Call Teacher Agent
// Use LLM Service to get response
// We pass the history excluding the just added message which LLMService expects?
// Actually LLMService usually expects full history or we construct it.
// Let's pass the current messages (including the new one)
// Note: In a real streaming implementation, we would update the message content incrementally.
// For now, we wait for full response.
await LLMService.getTeacherResponseStream(
content,
messages.map(m => ({ role: m.role, content: m.content })), // History before new msg? Or all?
// LLMService.getTeacherResponseStream logic:
// messages: [...history, { role: 'user', content }]
{
onToken: () => { },
onComplete: (fullText) => {
addMessage('assistant', fullText);
set({ isTyping: false });
},
onError: (error) => {
console.error("Teacher Agent Error:", error);
addMessage('assistant', "I'm having trouble connecting to my brain right now. Please check your settings.");
set({ isTyping: false });
}
}
);
} catch (error) {
set({ isTyping: false });
}
},
generateDraft: async () => {
const { messages, setPhase, updateDraft } = get();
setPhase('drafting');
set({ isTyping: true });
try {
// Call Ghostwriter Agent via LLM Service
const draft = await LLMService.generateDraft(
messages.map(m => ({ role: m.role, content: m.content }))
);
updateDraft(draft);
setPhase('review');
set({ isTyping: false });
} catch (error) {
console.error("Ghostwriter Error:", error);
// Handle error state
set({ isTyping: false, phase: 'idle' });
}
}
}),
{
name: 'test01-chat-storage',
storage: createJSONStorage(() => localStorage),
partialize: (state) => ({
// Persist messages and draft, but maybe reset phase on reload if stuck?
// Let's persist everything for now to support refresh.
messages: state.messages,
phase: state.phase,
currentDraft: state.currentDraft
}),
}
)
);