init: initial commit

This commit is contained in:
Blizzard
2026-04-07 17:35:09 +08:00
commit 680ecc320f
129 changed files with 10562 additions and 0 deletions
+120
View File
@@ -0,0 +1,120 @@
package vector
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"time"
)
// EmbeddingService calls Ollama or OpenAI-compatible APIs for embeddings.
type EmbeddingService struct {
client *http.Client
}
// NewEmbeddingService creates an embedding service.
func NewEmbeddingService() *EmbeddingService {
return &EmbeddingService{
client: &http.Client{Timeout: 60 * time.Second},
}
}
// EmbeddingRequest is the request body for Ollama embedding API.
type ollamaEmbedReq struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
}
type ollamaEmbedResp struct {
Embedding []float32 `json:"embedding"`
}
// openAI-compatible embedding request
type openAIEmbedReq struct {
Model string `json:"model"`
Input string `json:"input"`
}
type openAIEmbedResp struct {
Data []struct {
Embedding []float32 `json:"embedding"`
} `json:"data"`
}
// GetEmbedding generates an embedding vector for the given text.
// provider: "ollama" or "openai" (compatible format)
func (s *EmbeddingService) GetEmbedding(text, baseURL, model, apiKey, provider string) ([]float32, error) {
switch provider {
case "Ollama":
return s.ollamaEmbed(text, baseURL, model)
default:
return s.openAIEmbed(text, baseURL, model, apiKey)
}
}
func (s *EmbeddingService) ollamaEmbed(text, baseURL, model string) ([]float32, error) {
body, _ := json.Marshal(ollamaEmbedReq{Model: model, Prompt: text})
resp, err := s.client.Post(baseURL+"/api/embeddings", "application/json", bytes.NewReader(body))
if err != nil {
return nil, fmt.Errorf("ollama embed request: %w", err)
}
defer resp.Body.Close()
data, _ := io.ReadAll(resp.Body)
var result ollamaEmbedResp
if err := json.Unmarshal(data, &result); err != nil {
return nil, fmt.Errorf("parse ollama response: %w", err)
}
if len(result.Embedding) == 0 {
return nil, fmt.Errorf("empty embedding returned")
}
return result.Embedding, nil
}
func (s *EmbeddingService) openAIEmbed(text, baseURL, model, apiKey string) ([]float32, error) {
body, _ := json.Marshal(openAIEmbedReq{Model: model, Input: text})
req, _ := http.NewRequest("POST", baseURL+"/v1/embeddings", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
if apiKey != "" {
req.Header.Set("Authorization", "Bearer "+apiKey)
}
resp, err := s.client.Do(req)
if err != nil {
return nil, fmt.Errorf("openai embed request: %w", err)
}
defer resp.Body.Close()
data, _ := io.ReadAll(resp.Body)
var result openAIEmbedResp
if err := json.Unmarshal(data, &result); err != nil {
return nil, fmt.Errorf("parse openai response: %w", err)
}
if len(result.Data) == 0 || len(result.Data[0].Embedding) == 0 {
return nil, fmt.Errorf("empty embedding returned")
}
return result.Data[0].Embedding, nil
}
// ChunkText splits text into overlapping chunks for vectorization.
// chunkSize: target characters per chunk, overlap: characters of overlap.
func ChunkText(text string, chunkSize, overlap int) []string {
runes := []rune(text)
if len(runes) <= chunkSize {
return []string{text}
}
var chunks []string
start := 0
for start < len(runes) {
end := start + chunkSize
if end > len(runes) {
end = len(runes)
}
chunks = append(chunks, string(runes[start:end]))
start += chunkSize - overlap
}
return chunks
}
+93
View File
@@ -0,0 +1,93 @@
package vector
import (
"context"
"fmt"
"engimind/internal/models"
)
// ContextChunk is a search result with source metadata.
type ContextChunk struct {
Text string `json:"text"`
SourceID string `json:"sourceId"`
Score float32 `json:"score"`
}
// RAGService orchestrates embedding + vector search for retrieval.
type RAGService struct {
embedding *EmbeddingService
store *QdrantStore
}
// NewRAGService creates a RAG service.
func NewRAGService(embedding *EmbeddingService, store *QdrantStore) *RAGService {
return &RAGService{embedding: embedding, store: store}
}
// CollectionName returns the Qdrant collection name for a project.
func CollectionName(projectID string) string {
return fmt.Sprintf("engimind_%s", projectID)
}
// IndexDocument chunks and indexes a parsed document.
func (s *RAGService) IndexDocument(ctx context.Context, projectID string, source models.SourceFile, content string, embeddingCfg EmbeddingConfig) error {
colName := CollectionName(projectID)
if err := s.store.EnsureCollection(ctx, colName); err != nil {
return err
}
textChunks := ChunkText(content, 500, 50)
var chunks []Chunk
for i, text := range textChunks {
vec, err := s.embedding.GetEmbedding(
text, embeddingCfg.BaseURL, embeddingCfg.Model,
embeddingCfg.APIKey, embeddingCfg.Provider,
)
if err != nil {
return fmt.Errorf("embed chunk %d: %w", i, err)
}
chunks = append(chunks, Chunk{
ID: fmt.Sprintf("%s-chunk-%d", source.ID, i),
SourceID: source.ID,
Text: text,
Vector: vec,
})
}
return s.store.Insert(ctx, colName, chunks)
}
// SearchContext retrieves relevant text chunks for a query.
func (s *RAGService) SearchContext(ctx context.Context, projectID, question string, topK int, embeddingCfg EmbeddingConfig) ([]ContextChunk, error) {
queryVec, err := s.embedding.GetEmbedding(
question, embeddingCfg.BaseURL, embeddingCfg.Model,
embeddingCfg.APIKey, embeddingCfg.Provider,
)
if err != nil {
return nil, fmt.Errorf("embed query: %w", err)
}
colName := CollectionName(projectID)
results, err := s.store.Search(ctx, colName, queryVec, uint64(topK))
if err != nil {
return nil, err
}
contextChunks := make([]ContextChunk, len(results))
for i, r := range results {
contextChunks[i] = ContextChunk{
Text: r.Text,
SourceID: r.SourceID,
}
}
return contextChunks, nil
}
// EmbeddingConfig holds the config needed to call an embedding API.
type EmbeddingConfig struct {
BaseURL string
Model string
APIKey string
Provider string
}
+146
View File
@@ -0,0 +1,146 @@
package vector
import (
"context"
"fmt"
"log/slog"
pb "github.com/qdrant/go-client/qdrant"
"google.golang.org/grpc"
"google.golang.org/grpc/credentials/insecure"
)
// Chunk is a text segment with its vector.
type Chunk struct {
ID string
SourceID string
Text string
Vector []float32
}
// QdrantStore implements vector storage via remote Qdrant gRPC.
type QdrantStore struct {
conn *grpc.ClientConn
points pb.PointsClient
collection pb.CollectionsClient
dimension uint64
}
// NewQdrantStore connects to a Qdrant instance.
func NewQdrantStore(endpoint string, dimension uint64) (*QdrantStore, error) {
conn, err := grpc.NewClient(endpoint, grpc.WithTransportCredentials(insecure.NewCredentials()))
if err != nil {
return nil, fmt.Errorf("connect qdrant: %w", err)
}
return &QdrantStore{
conn: conn,
points: pb.NewPointsClient(conn),
collection: pb.NewCollectionsClient(conn),
dimension: dimension,
}, nil
}
// EnsureCollection creates a collection if it doesn't exist.
func (s *QdrantStore) EnsureCollection(ctx context.Context, name string) error {
_, err := s.collection.Get(ctx, &pb.GetCollectionInfoRequest{CollectionName: name})
if err == nil {
return nil // already exists
}
_, err = s.collection.Create(ctx, &pb.CreateCollection{
CollectionName: name,
VectorsConfig: &pb.VectorsConfig{
Config: &pb.VectorsConfig_Params{
Params: &pb.VectorParams{
Size: s.dimension,
Distance: pb.Distance_Cosine,
},
},
},
})
if err != nil {
return fmt.Errorf("create collection %s: %w", name, err)
}
slog.Info("created qdrant collection", "name", name, "dim", s.dimension)
return nil
}
// Insert upserts chunks into the specified collection.
func (s *QdrantStore) Insert(ctx context.Context, collectionName string, chunks []Chunk) error {
points := make([]*pb.PointStruct, len(chunks))
for i, c := range chunks {
points[i] = &pb.PointStruct{
Id: &pb.PointId{
PointIdOptions: &pb.PointId_Uuid{Uuid: c.ID},
},
Vectors: &pb.Vectors{
VectorsOptions: &pb.Vectors_Vector{
Vector: &pb.Vector{Data: c.Vector},
},
},
Payload: map[string]*pb.Value{
"text": {Kind: &pb.Value_StringValue{StringValue: c.Text}},
"source_id": {Kind: &pb.Value_StringValue{StringValue: c.SourceID}},
},
}
}
_, err := s.points.Upsert(ctx, &pb.UpsertPoints{
CollectionName: collectionName,
Points: points,
})
return err
}
// Search performs KNN search and returns top-k results.
func (s *QdrantStore) Search(ctx context.Context, collectionName string, queryVec []float32, topK uint64) ([]Chunk, error) {
resp, err := s.points.Search(ctx, &pb.SearchPoints{
CollectionName: collectionName,
Vector: queryVec,
Limit: topK,
WithPayload: &pb.WithPayloadSelector{SelectorOptions: &pb.WithPayloadSelector_Enable{Enable: true}},
})
if err != nil {
return nil, fmt.Errorf("qdrant search: %w", err)
}
results := make([]Chunk, 0, len(resp.Result))
for _, hit := range resp.Result {
text := ""
sourceID := ""
if v, ok := hit.Payload["text"]; ok {
text = v.GetStringValue()
}
if v, ok := hit.Payload["source_id"]; ok {
sourceID = v.GetStringValue()
}
results = append(results, Chunk{
ID: hit.Id.GetUuid(),
SourceID: sourceID,
Text: text,
})
}
return results, nil
}
// DeleteCollection removes a collection.
func (s *QdrantStore) DeleteCollection(ctx context.Context, name string) error {
_, err := s.collection.Delete(ctx, &pb.DeleteCollection{CollectionName: name})
return err
}
// Close closes the gRPC connection.
func (s *QdrantStore) Close() {
if s.conn != nil {
s.conn.Close()
}
}
// TestConnection verifies the Qdrant server is reachable.
func (s *QdrantStore) TestConnection(ctx context.Context) (bool, error) {
_, err := s.collection.List(ctx, &pb.ListCollectionsRequest{})
if err != nil {
return false, err
}
return true, nil
}