145 lines
3.6 KiB
TypeScript
145 lines
3.6 KiB
TypeScript
/**
|
|
* Embedding service for semantic memory similarity
|
|
* Uses OpenAI-compatible embeddings API (can use OpenRouter or OpenAI directly)
|
|
*/
|
|
|
|
import OpenAI from "openai";
|
|
import { config } from "../../core/config";
|
|
import { createLogger } from "../../core/logger";
|
|
|
|
const logger = createLogger("AI:Embeddings");
|
|
|
|
// Embedding model to use (OpenRouter supports several embedding models)
|
|
const EMBEDDING_MODEL = "openai/text-embedding-3-small";
|
|
const EMBEDDING_DIMENSIONS = 1536;
|
|
|
|
/**
|
|
* OpenRouter-based embedding provider
|
|
*/
|
|
class EmbeddingService {
|
|
private client: OpenAI;
|
|
private enabled: boolean;
|
|
|
|
constructor() {
|
|
this.client = new OpenAI({
|
|
baseURL: "https://openrouter.ai/api/v1",
|
|
apiKey: config.ai.openRouterApiKey,
|
|
defaultHeaders: {
|
|
"HTTP-Referer": "https://github.com/crunk-bun",
|
|
"X-Title": "Joel Discord Bot",
|
|
},
|
|
});
|
|
this.enabled = true;
|
|
}
|
|
|
|
/**
|
|
* Generate an embedding for a piece of text
|
|
*/
|
|
async embed(text: string): Promise<number[] | null> {
|
|
if (!this.enabled) {
|
|
return null;
|
|
}
|
|
|
|
try {
|
|
const response = await this.client.embeddings.create({
|
|
model: EMBEDDING_MODEL,
|
|
input: text.slice(0, 8000), // Limit input length
|
|
});
|
|
|
|
const embedding = response.data[0]?.embedding;
|
|
|
|
if (!embedding) {
|
|
logger.warn("No embedding returned from API");
|
|
return null;
|
|
}
|
|
|
|
logger.debug("Generated embedding", {
|
|
textLength: text.length,
|
|
dimensions: embedding.length
|
|
});
|
|
|
|
return embedding;
|
|
} catch (error) {
|
|
// If embeddings fail, disable and log - don't crash
|
|
logger.error("Failed to generate embedding", error);
|
|
|
|
// Check if it's a model not available error
|
|
const err = error as Error & { status?: number };
|
|
if (err.status === 404 || err.message?.includes("not available")) {
|
|
logger.warn("Embedding model not available, disabling embeddings");
|
|
this.enabled = false;
|
|
}
|
|
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Generate embeddings for multiple texts in batch
|
|
*/
|
|
async embedBatch(texts: string[]): Promise<(number[] | null)[]> {
|
|
if (!this.enabled || texts.length === 0) {
|
|
return texts.map(() => null);
|
|
}
|
|
|
|
try {
|
|
const response = await this.client.embeddings.create({
|
|
model: EMBEDDING_MODEL,
|
|
input: texts.map(t => t.slice(0, 8000)),
|
|
});
|
|
|
|
return response.data.map(d => d.embedding);
|
|
} catch (error) {
|
|
logger.error("Failed to generate batch embeddings", error);
|
|
return texts.map(() => null);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Calculate cosine similarity between two embedding vectors
|
|
*/
|
|
cosineSimilarity(a: number[], b: number[]): number {
|
|
if (a.length !== b.length) {
|
|
throw new Error("Embeddings must have the same dimensions");
|
|
}
|
|
|
|
let dotProduct = 0;
|
|
let normA = 0;
|
|
let normB = 0;
|
|
|
|
for (let i = 0; i < a.length; i++) {
|
|
dotProduct += a[i] * b[i];
|
|
normA += a[i] * a[i];
|
|
normB += b[i] * b[i];
|
|
}
|
|
|
|
normA = Math.sqrt(normA);
|
|
normB = Math.sqrt(normB);
|
|
|
|
if (normA === 0 || normB === 0) {
|
|
return 0;
|
|
}
|
|
|
|
return dotProduct / (normA * normB);
|
|
}
|
|
|
|
/**
|
|
* Check if embeddings are enabled and working
|
|
*/
|
|
isEnabled(): boolean {
|
|
return this.enabled;
|
|
}
|
|
}
|
|
|
|
// Singleton instance
|
|
let embeddingService: EmbeddingService | null = null;
|
|
|
|
export function getEmbeddingService(): EmbeddingService {
|
|
if (!embeddingService) {
|
|
embeddingService = new EmbeddingService();
|
|
}
|
|
return embeddingService;
|
|
}
|
|
|
|
export { EmbeddingService, EMBEDDING_DIMENSIONS };
|