const express = require('express'); const app = express(); const OLLAMA_URL = process.env.OLLAMA_URL || 'https://ollama.aquantico.de/api/chat'; const OLLAMA_MODEL = process.env.OLLAMA_MODEL || 'qwen3.6:35b-a3b-q4_K_M'; const OLLAMA_AUTH = process.env.OLLAMA_AUTH || '324GF44-50AA-4B57-9386-K435DLJ764DFR'; const PORT = parseInt(process.env.PORT || '11435', 10); const colors = { reset: '\x1b[0m', cyan: '\x1b[36m', green: '\x1b[32m', magenta: '\x1b[35m', yellow: '\x1b[33m', blue: '\x1b[34m', red: '\x1b[31m' }; app.set('trust proxy', 1); app.use(express.json({ limit: '50mb' })); // ── Info-Seite ──────────────────────────────────────────────────────────────── app.get('/', (req, res) => { const host = `${req.protocol}://${req.get('host')}`; res.setHeader('Content-Type', 'text/html; charset=utf-8'); res.send(` noThinkProxy

noThinkProxy v1.0

Anthropic-API → Ollama-Proxy · Think-Modus deaktiviert · Modell-Substitution aktiv

Aktuelle Konfiguration

ParameterWert
Ollama URL${OLLAMA_URL.replace(/\/api\/chat$/, '')}
Modell${OLLAMA_MODEL}
Kontext262144 Token (256k)
Thinkfalse
Proxy-URL${host}

localclaude installieren

Installiert das Script localclaude nach /usr/local/bin (oder ~/.local/bin):

curl -fsSL ${host}/install.sh | bash

Starten

localclaude

localclaude setzt automatisch ANTHROPIC_BASE_URL=${host} und ruft claude auf.

API-Endpunkt

POST ${host}/v1/messages

Kompatibel mit dem Anthropic SDK. Alle claude-* Modellnamen werden automatisch auf ${OLLAMA_MODEL} umgeleitet.

`); }); // ── Install-Script ──────────────────────────────────────────────────────────── app.get('/install.sh', (req, res) => { const host = `${req.protocol}://${req.get('host')}`; res.setHeader('Content-Type', 'text/plain; charset=utf-8'); res.send(`#!/usr/bin/env bash set -euo pipefail PROXY_URL="${host}" INSTALL_DIR="/usr/local/bin" NEEDS_PATH_UPDATE=false echo "" echo "=== noThinkProxy · localclaude Installer ===" echo "" # Zielverzeichnis bestimmen (ohne sudo → ~/.local/bin) if [ ! -w "\$INSTALL_DIR" ]; then INSTALL_DIR="\$HOME/.local/bin" mkdir -p "\$INSTALL_DIR" fi # localclaude-Script schreiben cat > "\$INSTALL_DIR/localclaude" <<'SCRIPT' #!/usr/bin/env bash export ANTHROPIC_BASE_URL="${host}" exec claude "\$@" SCRIPT chmod +x "\$INSTALL_DIR/localclaude" # PATH prüfen und ggf. in Shell-Config eintragen if ! echo "\$PATH" | grep -q "\$INSTALL_DIR"; then NEEDS_PATH_UPDATE=true echo "» Trage \$INSTALL_DIR in ~/.bashrc und ~/.zshrc ein..." echo "export PATH=\\"\$INSTALL_DIR:\$PATH\\"" >> "\$HOME/.bashrc" echo "export PATH=\\"\$INSTALL_DIR:\$PATH\\"" >> "\$HOME/.zshrc" 2>/dev/null || true fi echo "✓ localclaude installiert in \$INSTALL_DIR" echo "" if [ "\$NEEDS_PATH_UPDATE" = "true" ]; then echo "────────────────────────────────────────────" echo "Führe diesen Befehl jetzt aus damit localclaude sofort verfügbar ist:" echo "" echo " export PATH=\\"\$INSTALL_DIR:\$PATH\\"" echo "" echo "In neuen Shell-Sessions ist es automatisch verfügbar." echo "────────────────────────────────────────────" else echo "Starte mit: localclaude" fi echo "" `); }); // ── Hilfsfunktionen ─────────────────────────────────────────────────────────── function sanitizeToolSchema(schema) { if (!schema || typeof schema !== 'object') { return { type: 'object', properties: {} }; } const clean = JSON.parse(JSON.stringify(schema)); if (!clean.type) clean.type = 'object'; if (!clean.properties) clean.properties = {}; return clean; } function convertAnthropicTools(anthropicTools) { if (!anthropicTools || anthropicTools.length === 0) return []; const validTools = []; for (const tool of anthropicTools) { try { const ollamaTool = { type: 'function', function: { name: tool.name, description: (tool.description || '').substring(0, 500), parameters: sanitizeToolSchema(tool.input_schema) } }; JSON.stringify(ollamaTool); validTools.push(ollamaTool); } catch (e) { console.error(`${colors.red}[Tool Schema Error] ${e.message}${colors.reset}`); } } return validTools; } function stringifyToolResultContent(content) { if (Array.isArray(content)) { return content .map(c => { if (typeof c === 'string') return c; if (c?.text) return c.text; return JSON.stringify(c); }) .join('\n'); } if (typeof content === 'string') return content; return JSON.stringify(content); } function convertAnthropicToOllama(anthropicBody) { const ollamaMessages = []; if (anthropicBody.system) { ollamaMessages.push({ role: 'system', content: typeof anthropicBody.system === 'string' ? anthropicBody.system : JSON.stringify(anthropicBody.system) }); } for (const msg of anthropicBody.messages || []) { if (typeof msg.content === 'string') { ollamaMessages.push({ role: msg.role, content: msg.content }); continue; } if (!Array.isArray(msg.content)) continue; if (msg.role === 'assistant') { const textParts = []; const toolCalls = []; for (const item of msg.content) { if (item.type === 'text') { textParts.push(item.text || ''); } else if (item.type === 'tool_use') { toolCalls.push({ function: { name: item.name, arguments: item.input || {} } }); } } const assistantMsg = { role: 'assistant', content: textParts.join('\n\n') }; if (toolCalls.length > 0) { assistantMsg.tool_calls = toolCalls; } ollamaMessages.push(assistantMsg); } else { const pendingText = []; for (const item of msg.content) { if (item.type === 'text') { pendingText.push(item.text || ''); } else if (item.type === 'tool_result') { if (pendingText.length > 0) { ollamaMessages.push({ role: 'user', content: pendingText.join('\n\n') }); pendingText.length = 0; } const resultText = stringifyToolResultContent(item.content); console.log(`${colors.blue}📥 Tool Result ${item.tool_use_id}:${colors.reset}`); console.log(`${colors.blue}${resultText}${colors.reset}`); console.log(''); ollamaMessages.push({ role: 'tool', content: resultText }); } } if (pendingText.length > 0) { ollamaMessages.push({ role: 'user', content: pendingText.join('\n\n') }); } } } const ollamaBody = { model: anthropicBody.model, messages: ollamaMessages, stream: anthropicBody.stream !== false, think: false, options: { temperature: 0.7, num_predict: anthropicBody.max_tokens || 4096, num_ctx: 262144 } }; if (anthropicBody.tools && anthropicBody.tools.length > 0) { const validTools = convertAnthropicTools(anthropicBody.tools); if (validTools.length > 0) { ollamaBody.tools = validTools; } } return ollamaBody; } function parseToolArguments(args) { if (!args) return {}; if (typeof args === 'string') { try { return JSON.parse(args); } catch (e) { console.error(`${colors.red}[Tool Args Parse Error] ${e.message}${colors.reset}`); return {}; } } if (typeof args === 'object') return args; return {}; } function makeToolDedupeKey(tc) { const name = tc.function?.name || ''; const args = tc.function?.arguments || {}; const argsString = typeof args === 'string' ? args : JSON.stringify(args); return `${name}:${argsString}`; } // ── Response-Handler ────────────────────────────────────────────────────────── async function handleResponse(response, anthropicBody, res, requestNum) { res.setHeader('Content-Type', 'text/event-stream'); res.setHeader('Cache-Control', 'no-cache'); res.setHeader('Connection', 'keep-alive'); const messageId = 'msg_' + requestNum; res.write(`event: message_start\ndata: ${JSON.stringify({ type: 'message_start', message: { id: messageId, type: 'message', role: 'assistant', content: [], model: anthropicBody.model, stop_reason: null, stop_sequence: null, usage: { input_tokens: 0, output_tokens: 0 } } })}\n\n`); const reader = response.body.getReader(); const decoder = new TextDecoder(); let contentBlocks = []; let currentBlockIndex = 0; const seenToolCalls = new Set(); let emittedToolUse = false; let messageFinished = false; let buffer = ''; function processChunk(data) { if (messageFinished) return; if (data.message?.tool_calls && data.message.tool_calls.length > 0) { for (const tc of data.message.tool_calls) { const dedupeKey = makeToolDedupeKey(tc); if (seenToolCalls.has(dedupeKey)) { console.log(`${colors.yellow}[Duplicate Tool Call skipped] ${dedupeKey}${colors.reset}`); continue; } seenToolCalls.add(dedupeKey); emittedToolUse = true; const toolName = tc.function?.name; const toolInput = parseToolArguments(tc.function?.arguments); const toolUseId = `toolu_${requestNum}_${currentBlockIndex}`; console.log(`${colors.yellow}[Raw Tool Call] ${JSON.stringify(tc)}${colors.reset}`); console.log(`${colors.magenta}[Sending Tool Use: ${toolName}]${colors.reset}`); console.log(`${colors.magenta}Input: ${JSON.stringify(toolInput)}${colors.reset}`); res.write(`event: content_block_start\ndata: ${JSON.stringify({ type: 'content_block_start', index: currentBlockIndex, content_block: { type: 'tool_use', id: toolUseId, name: toolName, input: {} } })}\n\n`); res.write(`event: content_block_delta\ndata: ${JSON.stringify({ type: 'content_block_delta', index: currentBlockIndex, delta: { type: 'input_json_delta', partial_json: JSON.stringify(toolInput) } })}\n\n`); res.write(`event: content_block_stop\ndata: ${JSON.stringify({ type: 'content_block_stop', index: currentBlockIndex })}\n\n`); currentBlockIndex++; } } if (data.message?.content) { const text = data.message.content; if (contentBlocks[currentBlockIndex] === undefined) { res.write(`event: content_block_start\ndata: ${JSON.stringify({ type: 'content_block_start', index: currentBlockIndex, content_block: { type: 'text', text: '' } })}\n\n`); contentBlocks[currentBlockIndex] = ''; } process.stdout.write(`${colors.green}${text}${colors.reset}`); res.write(`event: content_block_delta\ndata: ${JSON.stringify({ type: 'content_block_delta', index: currentBlockIndex, delta: { type: 'text_delta', text } })}\n\n`); contentBlocks[currentBlockIndex] += text; } if (data.done) { messageFinished = true; if (contentBlocks[currentBlockIndex] !== undefined) { res.write(`event: content_block_stop\ndata: ${JSON.stringify({ type: 'content_block_stop', index: currentBlockIndex })}\n\n`); } res.write(`event: message_delta\ndata: ${JSON.stringify({ type: 'message_delta', delta: { stop_reason: emittedToolUse ? 'tool_use' : 'end_turn' }, usage: { output_tokens: data.eval_count || 0 } })}\n\n`); res.write(`event: message_stop\ndata: ${JSON.stringify({ type: 'message_stop' })}\n\n`); console.log(`${colors.green}✓${colors.reset}\n`); } } while (true) { const { done, value } = await reader.read(); if (done) break; buffer += decoder.decode(value, { stream: true }); const lines = buffer.split('\n'); buffer = lines.pop() || ''; for (const line of lines) { const trimmed = line.trim(); if (!trimmed) continue; try { processChunk(JSON.parse(trimmed)); } catch (e) { console.error(`${colors.red}[Stream Parse Error] ${e.message}${colors.reset}`); console.error(`${colors.red}${line}${colors.reset}`); } } } if (buffer.trim()) { try { processChunk(JSON.parse(buffer.trim())); } catch (e) { console.error(`${colors.red}[Final Buffer Parse Error] ${e.message}${colors.reset}`); console.error(buffer); } } if (!messageFinished) { if (contentBlocks[currentBlockIndex] !== undefined) { res.write(`event: content_block_stop\ndata: ${JSON.stringify({ type: 'content_block_stop', index: currentBlockIndex })}\n\n`); } res.write(`event: message_delta\ndata: ${JSON.stringify({ type: 'message_delta', delta: { stop_reason: emittedToolUse ? 'tool_use' : 'end_turn' }, usage: { output_tokens: 0 } })}\n\n`); res.write(`event: message_stop\ndata: ${JSON.stringify({ type: 'message_stop' })}\n\n`); } res.end(); } // ── Proxy-Endpunkt ──────────────────────────────────────────────────────────── app.post('/v1/messages', async (req, res) => { const requestNum = Date.now(); console.log(`${colors.magenta}━━━ #${requestNum} ━━━${colors.reset}`); try { const anthropicBody = req.body; if (anthropicBody.model?.startsWith('claude-')) { anthropicBody.model = OLLAMA_MODEL; } const ollamaBody = convertAnthropicToOllama(anthropicBody); console.log( `${colors.magenta}[msgs=${ollamaBody.messages.length}, tools=${ollamaBody.tools?.length || 0}, ctx=256k, think=false, model=${OLLAMA_MODEL}]${colors.reset}` ); const response = await fetch(OLLAMA_URL, { method: 'POST', headers: { 'Content-Type': 'application/json', 'Authorization': `Bearer ${OLLAMA_AUTH}` }, body: JSON.stringify(ollamaBody) }); if (!response.ok) { const errorText = await response.text(); console.error(`${colors.red}${errorText}${colors.reset}`); throw new Error(`Ollama: ${response.status}`); } return handleResponse(response, anthropicBody, res, requestNum); } catch (error) { console.error(`${colors.red}${error.message}${colors.reset}`); if (!res.headersSent) { res.status(500).json({ type: 'error', error: { type: 'api_error', message: error.message } }); } else { res.end(); } } }); app.listen(PORT, () => { console.log(`${colors.magenta}noThinkProxy: localhost:${PORT}${colors.reset}`); console.log(`${colors.cyan} Ollama : ${OLLAMA_URL}${colors.reset}`); console.log(`${colors.cyan} Modell : ${OLLAMA_MODEL}${colors.reset}`); console.log(`${colors.cyan} Ctx : 256k Think: false${colors.reset}\n`); });