joel behöver en python

This commit is contained in:
eric
2026-03-12 22:13:12 +01:00
parent 988de13e1e
commit 49857e620e
8 changed files with 471 additions and 84 deletions

View File

@@ -3,11 +3,15 @@
*/
import OpenAI from "openai";
import type { ChatCompletionMessageParam, ChatCompletionTool } from "openai/resources/chat/completions";
import type {
ChatCompletionMessageParam,
ChatCompletionMessageToolCall,
ChatCompletionTool,
} from "openai/resources/chat/completions";
import { config } from "../../core/config";
import { createLogger } from "../../core/logger";
import type { AiProvider, AiResponse, AskOptions, AskWithToolsOptions, MessageStyle } from "./types";
import { JOEL_TOOLS, MEMORY_EXTRACTION_TOOLS, getToolsForContext, type ToolCall, type ToolContext } from "./tools";
import type { AiProvider, AiResponse, AskOptions, AskWithToolsOptions, MessageStyle, TextStreamHandler } from "./types";
import { MEMORY_EXTRACTION_TOOLS, getToolsForContext, type ToolCall, type ToolContext } from "./tools";
import { executeTools } from "./tool-handlers";
const logger = createLogger("AI:OpenRouter");
@@ -18,6 +22,20 @@ const STYLE_OPTIONS: MessageStyle[] = ["story", "snarky", "insult", "explicit",
// Maximum tool call iterations to prevent infinite loops
const MAX_TOOL_ITERATIONS = 5;
interface StreamedToolCall {
id: string;
type: "function";
function: {
name: string;
arguments: string;
};
}
interface StreamedCompletionResult {
text: string;
toolCalls: StreamedToolCall[];
}
export class OpenRouterProvider implements AiProvider {
private client: OpenAI;
@@ -70,10 +88,24 @@ export class OpenRouterProvider implements AiProvider {
}
async ask(options: AskOptions): Promise<AiResponse> {
const { prompt, systemPrompt, maxTokens, temperature } = options;
const { prompt, systemPrompt, maxTokens, temperature, onTextStream } = options;
const model = config.ai.model;
try {
if (onTextStream) {
const streamed = await this.streamChatCompletion({
model,
messages: [
{ role: "system", content: systemPrompt },
{ role: "user", content: prompt },
],
max_tokens: maxTokens ?? config.ai.maxTokens,
temperature: temperature ?? config.ai.temperature,
}, onTextStream);
return { text: streamed.text };
}
const completion = await this.client.chat.completions.create({
model,
messages: [
@@ -85,9 +117,7 @@ export class OpenRouterProvider implements AiProvider {
});
const text = completion.choices[0]?.message?.content ?? "";
// Discord message limit safety
return { text: text.slice(0, 1900) };
return { text };
} catch (error: unknown) {
logger.error("Failed to generate response (ask)", {
method: "ask",
@@ -105,7 +135,7 @@ export class OpenRouterProvider implements AiProvider {
* The AI can call tools (like looking up memories) during response generation
*/
async askWithTools(options: AskWithToolsOptions): Promise<AiResponse> {
const { prompt, systemPrompt, context, maxTokens, temperature } = options;
const { prompt, systemPrompt, context, maxTokens, temperature, onTextStream } = options;
const messages: ChatCompletionMessageParam[] = [
{ role: "system", content: systemPrompt },
@@ -121,6 +151,53 @@ export class OpenRouterProvider implements AiProvider {
iterations++;
try {
if (onTextStream) {
const streamed = await this.streamChatCompletion({
model: config.ai.model,
messages,
tools,
tool_choice: "auto",
max_tokens: maxTokens ?? config.ai.maxTokens,
temperature: temperature ?? config.ai.temperature,
}, onTextStream);
if (streamed.toolCalls.length > 0) {
logger.debug("AI requested tool calls", {
count: streamed.toolCalls.length,
tools: streamed.toolCalls.map((tc) => tc.function.name),
});
messages.push({
role: "assistant",
content: streamed.text || null,
tool_calls: streamed.toolCalls,
});
await onTextStream("");
const toolCalls = this.parseToolCalls(streamed.toolCalls);
const results = await executeTools(toolCalls, context);
for (let i = 0; i < toolCalls.length; i++) {
messages.push({
role: "tool",
tool_call_id: toolCalls[i].id,
content: results[i].result,
});
}
continue;
}
logger.debug("AI response generated", {
iterations,
textLength: streamed.text.length,
streamed: true,
});
return { text: streamed.text };
}
const completion = await this.client.chat.completions.create({
model: config.ai.model,
messages,
@@ -177,7 +254,7 @@ export class OpenRouterProvider implements AiProvider {
textLength: text.length
});
return { text: text.slice(0, 1900) };
return { text };
} catch (error: unknown) {
logger.error("Failed to generate response with tools (askWithTools)", {
method: "askWithTools",
@@ -196,6 +273,92 @@ export class OpenRouterProvider implements AiProvider {
return { text: "I got stuck in a loop thinking about that..." };
}
private async streamChatCompletion(
params: {
model: string;
messages: ChatCompletionMessageParam[];
tools?: ChatCompletionTool[];
tool_choice?: "auto" | "none";
max_tokens: number;
temperature: number;
},
onTextStream: TextStreamHandler,
): Promise<StreamedCompletionResult> {
const stream = await this.client.chat.completions.create({
...params,
stream: true,
});
let text = "";
const toolCalls = new Map<number, StreamedToolCall>();
for await (const chunk of stream) {
const choice = chunk.choices[0];
if (!choice) {
continue;
}
const delta = choice.delta;
const content = delta.content ?? "";
if (content) {
text += content;
await onTextStream(text);
}
for (const toolCallDelta of delta.tool_calls ?? []) {
const current = toolCalls.get(toolCallDelta.index) ?? {
id: "",
type: "function" as const,
function: {
name: "",
arguments: "",
},
};
if (toolCallDelta.id) {
current.id = toolCallDelta.id;
}
if (toolCallDelta.function?.name) {
current.function.name = toolCallDelta.function.name;
}
if (toolCallDelta.function?.arguments) {
current.function.arguments += toolCallDelta.function.arguments;
}
toolCalls.set(toolCallDelta.index, current);
}
}
return {
text,
toolCalls: Array.from(toolCalls.entries())
.sort((a, b) => a[0] - b[0])
.map(([, toolCall]) => toolCall),
};
}
private parseToolCalls(toolCalls: ChatCompletionMessageToolCall[]): ToolCall[] {
return toolCalls.map((toolCall) => {
try {
return {
id: toolCall.id,
name: toolCall.function.name,
arguments: JSON.parse(toolCall.function.arguments || "{}"),
};
} catch (error) {
logger.error("Failed to parse streamed tool call arguments", {
toolName: toolCall.function.name,
toolCallId: toolCall.id,
arguments: toolCall.function.arguments,
error,
});
throw error;
}
});
}
/**
* Analyze a message to extract memorable information
*/