package mainimport ( "fmt" "log" "sort" "github.com/traylinx/switchAILocal/internal/intelligence/embedding")type Document struct { ID string Text string Embedding []float32}type SearchResult struct { Document *Document Similarity float64}func main() { // Initialize embedding engine cfg := embedding.Config{ ModelPath: "~/.switchailocal/models/model.onnx", VocabPath: "~/.switchailocal/models/vocab.txt", } engine, err := embedding.NewEngine(cfg) if err != nil { log.Fatal(err) } if err := engine.Initialize(""); err != nil { log.Fatal(err) } defer engine.Shutdown() // Create document corpus documents := []*Document{ {ID: "doc1", Text: "Python is a programming language"}, {ID: "doc2", Text: "Machine learning uses neural networks"}, {ID: "doc3", Text: "The weather is sunny today"}, {ID: "doc4", Text: "Deep learning is a subset of AI"}, } // Generate embeddings for all documents for _, doc := range documents { vec, err := engine.Embed(doc.Text) if err != nil { log.Fatal(err) } doc.Embedding = vec } // Perform semantic search query := "artificial intelligence and neural networks" queryVec, err := engine.Embed(query) if err != nil { log.Fatal(err) } // Compute similarities results := make([]SearchResult, 0, len(documents)) for _, doc := range documents { sim := engine.CosineSimilarity(queryVec, doc.Embedding) results = append(results, SearchResult{ Document: doc, Similarity: sim, }) } // Sort by similarity (descending) sort.Slice(results, func(i, j int) bool { return results[i].Similarity > results[j].Similarity }) // Display top results fmt.Printf("Search results for: %s\n\n", query) for i, result := range results { fmt.Printf("%d. [%.3f] %s\n", i+1, result.Similarity, result.Document.Text, ) }}
Output:
Copy
Search results for: artificial intelligence and neural networks1. [0.876] Machine learning uses neural networks2. [0.823] Deep learning is a subset of AI3. [0.234] Python is a programming language4. [0.098] The weather is sunny today
type Config struct { // ModelPath is the path to the ONNX model file ModelPath string // VocabPath is the path to the vocabulary file VocabPath string // SharedLibraryPath is the path to ONNX Runtime (optional) SharedLibraryPath string}
const ( // DefaultModelName is the default embedding model DefaultModelName = "all-MiniLM-L6-v2" // EmbeddingDimension is the output dimension EmbeddingDimension = 384 // MaxSequenceLength is the max input tokens MaxSequenceLength = 256)