init: initial commit
This commit is contained in:
@@ -0,0 +1,122 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"AI-Expert-Sidebar/internal/database"
|
||||
"AI-Expert-Sidebar/internal/service"
|
||||
|
||||
"github.com/wailsapp/wails/v2/pkg/runtime"
|
||||
)
|
||||
|
||||
// Expert handles search and AI streaming for the active library.
|
||||
type Expert struct {
|
||||
ctx context.Context
|
||||
stopMu sync.Mutex
|
||||
stopCancel context.CancelFunc
|
||||
}
|
||||
|
||||
func NewExpert() *Expert { return &Expert{} }
|
||||
func (e *Expert) SetContext(ctx context.Context) { e.ctx = ctx }
|
||||
|
||||
// SearchExpert fuzzy-searches the active knowledge library.
|
||||
func (e *Expert) SearchExpert(query string) []service.SearchResult {
|
||||
results, err := service.SearchKnowledge(query)
|
||||
if err != nil {
|
||||
log.Printf("[SearchExpert] %v", err)
|
||||
return nil
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
// AskDeepSeek performs RAG + streaming AI call.
|
||||
func (e *Expert) AskDeepSeek(query, rawAnswer string) string {
|
||||
aiCfg := service.ResolveAIConfig()
|
||||
knowledgeCtx := e.buildKnowledgeContext(query)
|
||||
|
||||
var userMsg string
|
||||
if rawAnswer != "" {
|
||||
userMsg = fmt.Sprintf("用户问题:%s\n\n原始参考答案:%s", query, rawAnswer)
|
||||
} else {
|
||||
userMsg = fmt.Sprintf("用户问题:%s\n\n请直接回答上述问题。", query)
|
||||
}
|
||||
messages := service.BuildRAGMessages(knowledgeCtx, userMsg, aiCfg.SystemPrompt)
|
||||
|
||||
streamCh := make(chan string, 64)
|
||||
var sb strings.Builder
|
||||
|
||||
streamCtx, cancel := context.WithCancel(e.ctx)
|
||||
e.setStopCancel(cancel)
|
||||
|
||||
go func() {
|
||||
defer func() { cancel(); close(streamCh) }()
|
||||
if err := service.CallDeepSeekStream(streamCtx, aiCfg, messages, streamCh); err != nil {
|
||||
if streamCtx.Err() == context.Canceled {
|
||||
streamCh <- "__STOPPED__"
|
||||
} else {
|
||||
log.Printf("[AskDeepSeek] %v", err)
|
||||
streamCh <- "__ERROR__"
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
for chunk := range streamCh {
|
||||
switch chunk {
|
||||
case "__ERROR__":
|
||||
runtime.EventsEmit(e.ctx, "ai:fallback", rawAnswer)
|
||||
return rawAnswer
|
||||
case "__STOPPED__":
|
||||
runtime.EventsEmit(e.ctx, "ai:done", sb.String())
|
||||
return sb.String()
|
||||
default:
|
||||
sb.WriteString(chunk)
|
||||
runtime.EventsEmit(e.ctx, "ai:chunk", chunk)
|
||||
}
|
||||
}
|
||||
runtime.EventsEmit(e.ctx, "ai:done", sb.String())
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (e *Expert) StopGeneration() {
|
||||
e.stopMu.Lock()
|
||||
defer e.stopMu.Unlock()
|
||||
if e.stopCancel != nil {
|
||||
e.stopCancel()
|
||||
e.stopCancel = nil
|
||||
}
|
||||
}
|
||||
|
||||
func (e *Expert) GetDBStatus() bool { return database.IsReady() }
|
||||
func (e *Expert) ToggleTopmost(enabled bool) {
|
||||
runtime.WindowSetAlwaysOnTop(e.ctx, enabled)
|
||||
}
|
||||
|
||||
func (e *Expert) buildKnowledgeContext(query string) string {
|
||||
results, err := service.SearchKnowledge(query)
|
||||
if err != nil || len(results) == 0 {
|
||||
return "(无相关本地知识)"
|
||||
}
|
||||
limit := 3
|
||||
if len(results) < limit {
|
||||
limit = len(results)
|
||||
}
|
||||
var sb strings.Builder
|
||||
for i := 0; i < limit; i++ {
|
||||
r := results[i]
|
||||
sb.WriteString(fmt.Sprintf("%d. Q: %s\n A: %s\n 分类: %s\n", i+1, r.Question, r.Answer, r.Category))
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (e *Expert) setStopCancel(fn context.CancelFunc) {
|
||||
e.stopMu.Lock()
|
||||
defer e.stopMu.Unlock()
|
||||
if e.stopCancel != nil {
|
||||
e.stopCancel()
|
||||
}
|
||||
e.stopCancel = fn
|
||||
}
|
||||
Reference in New Issue
Block a user