use klipy instead of tenor

This commit is contained in:
2026-02-01 18:31:53 +01:00
parent 0c0efa645a
commit 79efc479f4
17 changed files with 1062 additions and 27 deletions

View File

@@ -16,6 +16,9 @@ interface BotConfig {
maxTokens: number;
temperature: number;
};
klipy: {
apiKey: string;
};
bot: {
/** Chance of Joel responding without being mentioned (0-1) */
freeWillChance: number;
@@ -55,15 +58,18 @@ export const config: BotConfig = {
openRouterApiKey: getEnvOrThrow("OPENROUTER_API_KEY"),
model: getEnvOrDefault(
"AI_MODEL",
"meta-llama/llama-3.1-70b-instruct"
"x-ai/grok-4.1-fast"
),
classificationModel: getEnvOrDefault(
"AI_CLASSIFICATION_MODEL",
"meta-llama/llama-3.1-8b-instruct:free"
"google/gemma-3-12b-it:free" // Free model, good for simple classification
),
maxTokens: parseInt(getEnvOrDefault("AI_MAX_TOKENS", "500")),
temperature: parseFloat(getEnvOrDefault("AI_TEMPERATURE", "1.2")),
},
klipy: {
apiKey: getEnvOrDefault("KLIPY_API_KEY", ""),
},
bot: {
freeWillChance: 0.02,
memoryChance: 0.3,

Binary file not shown.

View File

@@ -0,0 +1,2 @@
-- Add gif_search_enabled column to bot_options
ALTER TABLE `bot_options` ADD `gif_search_enabled` integer DEFAULT 0;

View File

@@ -0,0 +1,519 @@
{
"version": "6",
"dialect": "sqlite",
"id": "f3938d6d-aa4e-461e-bc5f-6e583e0eacf0",
"prevId": "e2827c5c-cc3c-451c-bc4f-5d472d09d7df",
"tables": {
"bot_options": {
"name": "bot_options",
"columns": {
"guild_id": {
"name": "guild_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"active_personality_id": {
"name": "active_personality_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"free_will_chance": {
"name": "free_will_chance",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": 2
},
"memory_chance": {
"name": "memory_chance",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": 30
},
"mention_probability": {
"name": "mention_probability",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": 0
},
"gif_search_enabled": {
"name": "gif_search_enabled",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": 0
},
"updated_at": {
"name": "updated_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(current_timestamp)"
}
},
"indexes": {},
"foreignKeys": {
"bot_options_guild_id_guilds_id_fk": {
"name": "bot_options_guild_id_guilds_id_fk",
"tableFrom": "bot_options",
"tableTo": "guilds",
"columnsFrom": ["guild_id"],
"columnsTo": ["id"],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"guilds": {
"name": "guilds",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"membership": {
"name": "membership",
"columns": {
"user_id": {
"name": "user_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"guild_id": {
"name": "guild_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {
"user_guild_idx": {
"name": "user_guild_idx",
"columns": ["user_id", "guild_id"],
"isUnique": false
},
"user_guild_unique": {
"name": "user_guild_unique",
"columns": ["user_id", "guild_id"],
"isUnique": true
}
},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"memories": {
"name": "memories",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"content": {
"name": "content",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"category": {
"name": "category",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "'general'"
},
"importance": {
"name": "importance",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": 5
},
"source_message_id": {
"name": "source_message_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"user_id": {
"name": "user_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"guild_id": {
"name": "guild_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(current_timestamp)"
},
"last_accessed_at": {
"name": "last_accessed_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"access_count": {
"name": "access_count",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": 0
},
"embedding": {
"name": "embedding",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {
"memory_user_idx": {
"name": "memory_user_idx",
"columns": ["user_id"],
"isUnique": false
},
"memory_guild_idx": {
"name": "memory_guild_idx",
"columns": ["guild_id"],
"isUnique": false
},
"memory_user_importance_idx": {
"name": "memory_user_importance_idx",
"columns": ["user_id", "importance"],
"isUnique": false
},
"memory_category_idx": {
"name": "memory_category_idx",
"columns": ["category"],
"isUnique": false
},
"memory_user_category_idx": {
"name": "memory_user_category_idx",
"columns": ["user_id", "category"],
"isUnique": false
}
},
"foreignKeys": {
"memories_user_id_users_id_fk": {
"name": "memories_user_id_users_id_fk",
"tableFrom": "memories",
"tableTo": "users",
"columnsFrom": ["user_id"],
"columnsTo": ["id"],
"onDelete": "no action",
"onUpdate": "no action"
},
"memories_guild_id_guilds_id_fk": {
"name": "memories_guild_id_guilds_id_fk",
"tableFrom": "memories",
"tableTo": "guilds",
"columnsFrom": ["guild_id"],
"columnsTo": ["id"],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"messages": {
"name": "messages",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"content": {
"name": "content",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"timestamp": {
"name": "timestamp",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(current_timestamp)"
},
"channel_id": {
"name": "channel_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"user_id": {
"name": "user_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"guild_id": {
"name": "guild_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {
"channel_timestamp_idx": {
"name": "channel_timestamp_idx",
"columns": ["channel_id", "timestamp"],
"isUnique": false
}
},
"foreignKeys": {
"messages_user_id_users_id_fk": {
"name": "messages_user_id_users_id_fk",
"tableFrom": "messages",
"tableTo": "users",
"columnsFrom": ["user_id"],
"columnsTo": ["id"],
"onDelete": "no action",
"onUpdate": "no action"
},
"messages_guild_id_guilds_id_fk": {
"name": "messages_guild_id_guilds_id_fk",
"tableFrom": "messages",
"tableTo": "guilds",
"columnsFrom": ["guild_id"],
"columnsTo": ["id"],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"personalities": {
"name": "personalities",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"guild_id": {
"name": "guild_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"system_prompt": {
"name": "system_prompt",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(current_timestamp)"
},
"updated_at": {
"name": "updated_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(current_timestamp)"
}
},
"indexes": {
"personality_guild_idx": {
"name": "personality_guild_idx",
"columns": ["guild_id"],
"isUnique": false
}
},
"foreignKeys": {
"personalities_guild_id_guilds_id_fk": {
"name": "personalities_guild_id_guilds_id_fk",
"tableFrom": "personalities",
"tableTo": "guilds",
"columnsFrom": ["guild_id"],
"columnsTo": ["id"],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"users": {
"name": "users",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"opt_out": {
"name": "opt_out",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"web_sessions": {
"name": "web_sessions",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"user_id": {
"name": "user_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"access_token": {
"name": "access_token",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"refresh_token": {
"name": "refresh_token",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"expires_at": {
"name": "expires_at",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "(current_timestamp)"
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"indexes": {}
}
}

View File

@@ -29,6 +29,13 @@
"when": 1769964737832,
"tag": "0003_silky_sauron",
"breakpoints": true
},
{
"idx": 4,
"version": "6",
"when": 1770048000000,
"tag": "0004_add_gif_search",
"breakpoints": true
}
]
}
}

View File

@@ -6,6 +6,7 @@
import { desc, eq, and, like, sql, asc } from "drizzle-orm";
import { db } from "../connection";
import { memories, type InsertMemory, type Memory } from "../schema";
import { getEmbeddingService } from "../../services/ai/embeddings";
export type MemoryCategory =
| "personal" // Personal info: name, age, location
@@ -38,9 +39,21 @@ export interface MemorySearchOptions {
export const memoryRepository = {
/**
* Create a new memory with full options
* Automatically generates embeddings for semantic search
*/
async create(options: CreateMemoryOptions): Promise<Memory> {
const id = crypto.randomUUID();
// Generate embedding if not provided
let embedding = options.embedding;
if (!embedding) {
const embeddingService = getEmbeddingService();
const generated = await embeddingService.embed(options.content);
if (generated) {
embedding = generated;
}
}
const memory: InsertMemory = {
id,
user_id: options.userId,
@@ -49,7 +62,7 @@ export const memoryRepository = {
category: options.category || "general",
importance: options.importance || 5,
source_message_id: options.sourceMessageId,
embedding: options.embedding ? JSON.stringify(options.embedding) : null,
embedding: embedding ? JSON.stringify(embedding) : null,
access_count: 0,
};
@@ -168,11 +181,53 @@ export const memoryRepository = {
},
/**
* Check for duplicate or similar memories
* Check for duplicate or similar memories using embedding similarity
* Falls back to substring matching if embeddings are unavailable
*/
async findSimilar(userId: string, content: string): Promise<Memory[]> {
// Simple substring match for now
// TODO: Use embedding similarity when embeddings are implemented
async findSimilar(userId: string, content: string, threshold = 0.85): Promise<Memory[]> {
const embeddingService = getEmbeddingService();
// Try embedding-based similarity first
if (embeddingService.isEnabled()) {
const contentEmbedding = await embeddingService.embed(content);
if (contentEmbedding) {
// Get all memories with embeddings for this user
const userMemories = await db
.select()
.from(memories)
.where(
and(
eq(memories.user_id, userId),
sql`${memories.embedding} IS NOT NULL`
)
);
// Calculate similarity scores
const similarities: Array<{ memory: Memory; similarity: number }> = [];
for (const memory of userMemories) {
if (memory.embedding) {
try {
const memoryEmbedding = JSON.parse(memory.embedding) as number[];
const similarity = embeddingService.cosineSimilarity(contentEmbedding, memoryEmbedding);
if (similarity >= threshold) {
similarities.push({ memory, similarity });
}
} catch {
// Skip memories with invalid embeddings
}
}
}
// Sort by similarity and return top matches
similarities.sort((a, b) => b.similarity - a.similarity);
return similarities.slice(0, 5).map(s => s.memory);
}
}
// Fallback to substring matching
const searchTerm = content.toLowerCase().slice(0, 100);
return db
@@ -197,6 +252,65 @@ export const memoryRepository = {
.where(eq(memories.id, memoryId));
},
/**
* Semantic search - find memories by meaning using embeddings
* Falls back to keyword search if embeddings are unavailable
*/
async semanticSearch(
query: string,
options: {
userId?: string;
guildId?: string;
limit?: number;
minSimilarity?: number;
} = {}
): Promise<Array<Memory & { similarity?: number }>> {
const { userId, guildId, limit = 10, minSimilarity = 0.7 } = options;
const embeddingService = getEmbeddingService();
if (embeddingService.isEnabled()) {
const queryEmbedding = await embeddingService.embed(query);
if (queryEmbedding) {
// Build conditions
const conditions = [sql`${memories.embedding} IS NOT NULL`];
if (userId) conditions.push(eq(memories.user_id, userId));
if (guildId) conditions.push(eq(memories.guild_id, guildId));
// Get all relevant memories with embeddings
const allMemories = await db
.select()
.from(memories)
.where(and(...conditions));
// Calculate similarity scores
const results: Array<Memory & { similarity: number }> = [];
for (const memory of allMemories) {
if (memory.embedding) {
try {
const memoryEmbedding = JSON.parse(memory.embedding) as number[];
const similarity = embeddingService.cosineSimilarity(queryEmbedding, memoryEmbedding);
if (similarity >= minSimilarity) {
results.push({ ...memory, similarity });
}
} catch {
// Skip invalid embeddings
}
}
}
// Sort by similarity and limit
results.sort((a, b) => b.similarity - a.similarity);
return results.slice(0, limit);
}
}
// Fallback to keyword search
return this.search({ userId, guildId, query, limit });
},
/**
* Update access statistics for memories
*/
@@ -282,4 +396,58 @@ export const memoryRepository = {
: 0,
};
},
/**
* Backfill embeddings for memories that don't have them
* Useful for migrating existing memories to use semantic search
*/
async backfillEmbeddings(batchSize = 10): Promise<{ processed: number; failed: number }> {
const embeddingService = getEmbeddingService();
if (!embeddingService.isEnabled()) {
return { processed: 0, failed: 0 };
}
// Get memories without embeddings
const memoriesWithoutEmbeddings = await db
.select()
.from(memories)
.where(sql`${memories.embedding} IS NULL`)
.limit(batchSize);
let processed = 0;
let failed = 0;
for (const memory of memoriesWithoutEmbeddings) {
try {
const embedding = await embeddingService.embed(memory.content);
if (embedding) {
await db
.update(memories)
.set({ embedding: JSON.stringify(embedding) })
.where(eq(memories.id, memory.id));
processed++;
} else {
failed++;
}
} catch {
failed++;
}
}
return { processed, failed };
},
/**
* Get count of memories without embeddings
*/
async countWithoutEmbeddings(): Promise<number> {
const result = await db
.select({ count: sql<number>`count(*)` })
.from(memories)
.where(sql`${memories.embedding} IS NULL`);
return result[0]?.count || 0;
},
};

View File

@@ -159,6 +159,7 @@ export const botOptions = sqliteTable("bot_options", {
free_will_chance: integer("free_will_chance").default(2), // stored as percentage 0-100
memory_chance: integer("memory_chance").default(30),
mention_probability: integer("mention_probability").default(0),
gif_search_enabled: integer("gif_search_enabled").default(0), // 0 = disabled, 1 = enabled
updated_at: text("updated_at").default(sql`(current_timestamp)`),
});

View File

@@ -122,12 +122,22 @@ export const joelResponder = {
const userId = message.author.id;
const guildId = message.guildId;
// Fetch guild options to check for enabled features
const guildOptions = await db
.select()
.from(botOptions)
.where(eq(botOptions.guild_id, guildId))
.limit(1);
const gifSearchEnabled = guildOptions.length > 0 && guildOptions[0].gif_search_enabled === 1;
// Create tool context for this conversation
const toolContext: ToolContext = {
userId,
guildId,
channelId: message.channelId,
authorName: author,
gifSearchEnabled,
};
// Classify the message to determine response style
@@ -157,7 +167,7 @@ export const joelResponder = {
}, style);
// Add tool instructions to the system prompt
const systemPromptWithTools = `${systemPrompt}
let systemPromptWithTools = `${systemPrompt}
=== MEMORY TOOLS ===
You have access to tools for managing memories about users:
@@ -168,6 +178,19 @@ You have access to tools for managing memories about users:
Feel free to look up memories when you want to make personalized insults.
The current user's ID is: ${userId}`;
// Add GIF tool instructions if enabled
if (gifSearchEnabled) {
systemPromptWithTools += `
=== GIF TOOL ===
You can search for and send funny GIFs! Use search_gif when you want to:
- React to something with a perfect GIF
- Express your emotions visually
- Be chaotic and random
- Make fun of someone with a relevant GIF
The GIF URL will appear in your response for the user to see.`;
}
// Get reply context if this is a reply
let prompt = message.cleanContent;
if (message.reference) {

View File

@@ -0,0 +1,144 @@
/**
* Embedding service for semantic memory similarity
* Uses OpenAI-compatible embeddings API (can use OpenRouter or OpenAI directly)
*/
import OpenAI from "openai";
import { config } from "../../core/config";
import { createLogger } from "../../core/logger";
const logger = createLogger("AI:Embeddings");
// Embedding model to use (OpenRouter supports several embedding models)
const EMBEDDING_MODEL = "openai/text-embedding-3-small";
const EMBEDDING_DIMENSIONS = 1536;
/**
* OpenRouter-based embedding provider
*/
class EmbeddingService {
private client: OpenAI;
private enabled: boolean;
constructor() {
this.client = new OpenAI({
baseURL: "https://openrouter.ai/api/v1",
apiKey: config.ai.openRouterApiKey,
defaultHeaders: {
"HTTP-Referer": "https://github.com/crunk-bun",
"X-Title": "Joel Discord Bot",
},
});
this.enabled = true;
}
/**
* Generate an embedding for a piece of text
*/
async embed(text: string): Promise<number[] | null> {
if (!this.enabled) {
return null;
}
try {
const response = await this.client.embeddings.create({
model: EMBEDDING_MODEL,
input: text.slice(0, 8000), // Limit input length
});
const embedding = response.data[0]?.embedding;
if (!embedding) {
logger.warn("No embedding returned from API");
return null;
}
logger.debug("Generated embedding", {
textLength: text.length,
dimensions: embedding.length
});
return embedding;
} catch (error) {
// If embeddings fail, disable and log - don't crash
logger.error("Failed to generate embedding", error);
// Check if it's a model not available error
const err = error as Error & { status?: number };
if (err.status === 404 || err.message?.includes("not available")) {
logger.warn("Embedding model not available, disabling embeddings");
this.enabled = false;
}
return null;
}
}
/**
* Generate embeddings for multiple texts in batch
*/
async embedBatch(texts: string[]): Promise<(number[] | null)[]> {
if (!this.enabled || texts.length === 0) {
return texts.map(() => null);
}
try {
const response = await this.client.embeddings.create({
model: EMBEDDING_MODEL,
input: texts.map(t => t.slice(0, 8000)),
});
return response.data.map(d => d.embedding);
} catch (error) {
logger.error("Failed to generate batch embeddings", error);
return texts.map(() => null);
}
}
/**
* Calculate cosine similarity between two embedding vectors
*/
cosineSimilarity(a: number[], b: number[]): number {
if (a.length !== b.length) {
throw new Error("Embeddings must have the same dimensions");
}
let dotProduct = 0;
let normA = 0;
let normB = 0;
for (let i = 0; i < a.length; i++) {
dotProduct += a[i] * b[i];
normA += a[i] * a[i];
normB += b[i] * b[i];
}
normA = Math.sqrt(normA);
normB = Math.sqrt(normB);
if (normA === 0 || normB === 0) {
return 0;
}
return dotProduct / (normA * normB);
}
/**
* Check if embeddings are enabled and working
*/
isEnabled(): boolean {
return this.enabled;
}
}
// Singleton instance
let embeddingService: EmbeddingService | null = null;
export function getEmbeddingService(): EmbeddingService {
if (!embeddingService) {
embeddingService = new EmbeddingService();
}
return embeddingService;
}
export { EmbeddingService, EMBEDDING_DIMENSIONS };

View File

@@ -83,3 +83,4 @@ export function getAiService(): AiService {
export type { AiProvider, AiResponse, MessageStyle } from "./types";
export type { ToolContext, ToolCall, ToolResult } from "./tools";
export { JOEL_TOOLS, MEMORY_EXTRACTION_TOOLS } from "./tools";
export { getEmbeddingService, EmbeddingService } from "./embeddings";

View File

@@ -7,7 +7,7 @@ import type { ChatCompletionMessageParam, ChatCompletionTool } from "openai/reso
import { config } from "../../core/config";
import { createLogger } from "../../core/logger";
import type { AiProvider, AiResponse, AskOptions, AskWithToolsOptions, MessageStyle } from "./types";
import { JOEL_TOOLS, MEMORY_EXTRACTION_TOOLS, type ToolCall, type ToolContext } from "./tools";
import { JOEL_TOOLS, MEMORY_EXTRACTION_TOOLS, getToolsForContext, type ToolCall, type ToolContext } from "./tools";
import { executeTools } from "./tool-handlers";
const logger = createLogger("AI:OpenRouter");
@@ -79,6 +79,9 @@ export class OpenRouterProvider implements AiProvider {
{ role: "user", content: prompt },
];
// Get the appropriate tools for this context (includes optional tools like GIF search)
const tools = getToolsForContext(context);
let iterations = 0;
while (iterations < MAX_TOOL_ITERATIONS) {
@@ -88,7 +91,7 @@ export class OpenRouterProvider implements AiProvider {
const completion = await this.client.chat.completions.create({
model: config.ai.model,
messages,
tools: JOEL_TOOLS,
tools,
tool_choice: "auto",
max_tokens: maxTokens ?? config.ai.maxTokens,
temperature: temperature ?? config.ai.temperature,
@@ -177,7 +180,7 @@ The user's Discord ID is: ${context.userId}`;
try {
const completion = await this.client.chat.completions.create({
model: config.ai.classificationModel,
model: config.ai.model, // Use main model - needs tool support
messages: [
{ role: "system", content: systemPrompt },
{ role: "user", content: `Analyze this message for memorable content:\n\n"${message}"` },

View File

@@ -4,6 +4,7 @@
*/
import { createLogger } from "../../core/logger";
import { config } from "../../core/config";
import { memoryRepository, type MemoryCategory } from "../../database";
import type { ToolHandler, ToolContext, ToolCall, ToolResult } from "./tools";
@@ -83,7 +84,7 @@ const toolHandlers: Record<string, ToolHandler> = {
},
/**
* Search memories by keyword/topic
* Search memories by keyword/topic - uses semantic search when available
*/
async search_memories(args, context): Promise<string> {
const query = args.query as string;
@@ -95,16 +96,23 @@ const toolHandlers: Record<string, ToolHandler> = {
return "Error: No search query provided.";
}
logger.debug("Searching memories", { query, guildId, category, minImportance });
logger.debug("Searching memories (semantic)", { query, guildId, category, minImportance });
const results = await memoryRepository.search({
query,
// Try semantic search first for better results
let results = await memoryRepository.semanticSearch(query, {
guildId,
category,
minImportance,
limit: 15,
minSimilarity: 0.6,
});
// Filter by category and importance if specified
if (category) {
results = results.filter(m => m.category === category);
}
if (minImportance) {
results = results.filter(m => (m.importance || 0) >= minImportance);
}
if (results.length === 0) {
return `No memories found matching "${query}".`;
}
@@ -113,7 +121,8 @@ const toolHandlers: Record<string, ToolHandler> = {
.map((m, i) => {
const cat = m.category || "general";
const imp = m.importance || 5;
return `${i + 1}. [User ${m.user_id?.slice(0, 8)}...] [${cat}|★${imp}] ${m.content}`;
const sim = 'similarity' in m ? ` (${Math.round((m.similarity as number) * 100)}% match)` : '';
return `${i + 1}. [User ${m.user_id?.slice(0, 8)}...] [${cat}|★${imp}]${sim} ${m.content}`;
})
.join("\n");
@@ -190,6 +199,71 @@ const toolHandlers: Record<string, ToolHandler> = {
`Average importance: ${stats.avgImportance.toFixed(1)}/10\n` +
`By category:\n${categoryBreakdown}`;
},
/**
* Search for a GIF using Klipy API
*/
async search_gif(args, context): Promise<string> {
const query = args.query as string;
const limit = Math.min(Math.max((args.limit as number) || 5, 1), 10);
if (!query || query.trim().length === 0) {
return "Error: No search query provided.";
}
if (!config.klipy.apiKey) {
return "Error: GIF search is not configured (missing Klipy API key).";
}
logger.debug("Searching for GIF", { query, limit });
try {
const url = new URL("https://api.klipy.com/v2/search");
url.searchParams.set("q", query);
url.searchParams.set("key", config.klipy.apiKey);
url.searchParams.set("limit", limit.toString());
url.searchParams.set("media_filter", "gif");
url.searchParams.set("contentfilter", "off"); // Joel doesn't care about content filters
const response = await fetch(url.toString());
if (!response.ok) {
logger.error("Klipy API error", { status: response.status });
return `Error: Failed to search for GIFs (HTTP ${response.status})`;
}
const data = await response.json() as {
results: Array<{
id: string;
title: string;
media_formats: {
gif?: { url: string };
mediumgif?: { url: string };
};
}>;
};
if (!data.results || data.results.length === 0) {
return `No GIFs found for "${query}". Try a different search term.`;
}
// Pick a random GIF from the results
const randomIndex = Math.floor(Math.random() * data.results.length);
const gif = data.results[randomIndex];
const gifUrl = gif.media_formats.gif?.url || gif.media_formats.mediumgif?.url;
if (!gifUrl) {
return `Found GIFs but couldn't get URL. Try again.`;
}
logger.info("Found GIF", { query, gifUrl, title: gif.title });
return `GIF found! Include this URL in your response to show it: ${gifUrl}`;
} catch (error) {
logger.error("GIF search failed", error);
return `Error searching for GIFs: ${(error as Error).message}`;
}
},
};
/**

View File

@@ -30,6 +30,8 @@ export interface ToolContext {
guildId: string;
channelId: string;
authorName: string;
/** Optional: enable GIF search for this context */
gifSearchEnabled?: boolean;
}
/**
@@ -166,6 +168,46 @@ export const JOEL_TOOLS: ChatCompletionTool[] = [
},
];
/**
* GIF search tool - only enabled when gif_search_enabled is true for the guild
*/
export const GIF_SEARCH_TOOL: ChatCompletionTool = {
type: "function",
function: {
name: "search_gif",
description: "Search for a funny GIF to send in the chat. Use this when you want to express yourself with a GIF, react to something funny, or just be chaotic. The GIF URL will be included in your response.",
parameters: {
type: "object",
properties: {
query: {
type: "string",
description: "Search query for the GIF. Be creative and funny with your searches!",
},
limit: {
type: "number",
description: "Number of GIFs to get back (1-10). Default is 5, then a random one is picked.",
},
},
required: ["query"],
},
},
};
/**
* Get tools based on context settings
* Returns the base tools plus any optional tools that are enabled
*/
export function getToolsForContext(context: ToolContext): ChatCompletionTool[] {
const tools = [...JOEL_TOOLS];
// Add GIF search tool if enabled for this guild
if (context.gifSearchEnabled) {
tools.push(GIF_SEARCH_TOOL);
}
return tools;
}
/**
* Subset of tools for memory extraction (lightweight)
*/

View File

@@ -244,6 +244,7 @@ export function createApiRoutes(client: BotClient) {
free_will_chance: 2,
memory_chance: 30,
mention_probability: 0,
gif_search_enabled: 0,
});
}
@@ -260,12 +261,30 @@ export function createApiRoutes(client: BotClient) {
return c.json({ error: "Access denied" }, 403);
}
const body = await c.req.json<{
const contentType = c.req.header("content-type");
let body: {
active_personality_id?: string | null;
free_will_chance?: number;
memory_chance?: number;
mention_probability?: number;
}>();
gif_search_enabled?: boolean | string;
};
if (contentType?.includes("application/x-www-form-urlencoded")) {
const form = await c.req.parseBody();
body = {
active_personality_id: form.active_personality_id as string || null,
free_will_chance: form.free_will_chance ? parseInt(form.free_will_chance as string) : undefined,
memory_chance: form.memory_chance ? parseInt(form.memory_chance as string) : undefined,
mention_probability: form.mention_probability ? parseInt(form.mention_probability as string) : undefined,
gif_search_enabled: form.gif_search_enabled === "on" || form.gif_search_enabled === "true",
};
} else {
body = await c.req.json();
}
// Convert gif_search_enabled to integer for SQLite
const gifSearchEnabled = body.gif_search_enabled ? 1 : 0;
// Upsert options
const existing = await db
@@ -277,13 +296,21 @@ export function createApiRoutes(client: BotClient) {
if (existing.length === 0) {
await db.insert(botOptions).values({
guild_id: guildId,
...body,
active_personality_id: body.active_personality_id,
free_will_chance: body.free_will_chance,
memory_chance: body.memory_chance,
mention_probability: body.mention_probability,
gif_search_enabled: gifSearchEnabled,
});
} else {
await db
.update(botOptions)
.set({
...body,
active_personality_id: body.active_personality_id,
free_will_chance: body.free_will_chance,
memory_chance: body.memory_chance,
mention_probability: body.mention_probability,
gif_search_enabled: gifSearchEnabled,
updated_at: new Date().toISOString(),
})
.where(eq(botOptions.guild_id, guildId));

View File

@@ -197,6 +197,7 @@ export function createWebServer(client: BotClient) {
free_will_chance: 2,
memory_chance: 30,
mention_probability: 0,
gif_search_enabled: 0,
};
return c.html(guildDetailPage(guildId, guild.name, options, guildPersonalities));

View File

@@ -26,6 +26,7 @@ interface BotOptions {
free_will_chance: number | null;
memory_chance: number | null;
mention_probability: number | null;
gif_search_enabled: number | null;
}
export function dashboardPage(user: User, guilds: Guild[]): string {
@@ -246,6 +247,16 @@ The user's name is {author}. Insult {author} by name.
<p style="color: #666; font-size: 12px; margin-top: 4px;">Probability that Joel mentions someone in his response.</p>
</div>
<div class="form-group">
<label style="display: flex; align-items: center; gap: 10px; cursor: pointer;">
<input type="checkbox" id="gif_search_enabled" name="gif_search_enabled"
${options.gif_search_enabled ? 'checked' : ''}
style="width: 20px; height: 20px; cursor: pointer;">
<span>Enable GIF Search</span>
</label>
<p style="color: #666; font-size: 12px; margin-top: 4px;">Allow Joel to search for and send funny GIFs in his responses. Powered by Klipy.</p>
</div>
<button type="submit" class="btn">Save Options</button>
</form>
</div>