#!/usr/bin/env python3 """ OpenClaw OpenAI Compatible API Server 让笔记软件像调用 OpenAI 一样调用 OpenClaw """ import json import os import http.server import socketserver import uuid import time from urllib.parse import urlparse from datetime import datetime # ============= 配置 ============= API_KEY = os.getenv("OPENCLAW_API_KEY", "oc_gFRPndPnFxa3COTX0tAdh2dnN92UmR8U") PORT = 8080 AGENT_URL = os.getenv("OPENCLAW_AGENT_URL", "http://localhost:11434") # ============= 工具函数 ============= def generate_id(): return f"chatcmpl-{uuid.uuid4().hex[:24]}" def current_timestamp(): return int(time.time()) # ============= API 处理 ============= class OpenAIHandler(http.server.BaseHTTPRequestHandler): def log_message(self, format, *args): pass def send_json(self, data, status=200): self.send_response(status) self.send_header('Content-Type', 'application/json') self.send_header('Access-Control-Allow-Origin', '*') self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS') self.send_header('Access-Control-Allow-Headers', 'Authorization, Content-Type') self.end_headers() self.wfile.write(json.dumps(data, ensure_ascii=False).encode('utf-8')) def do_OPTIONS(self): self.send_response(200) self.end_headers() def do_GET(self): parsed = urlparse(self.path) # 认证 auth = self.headers.get('Authorization', '') if not auth.startswith(f'Bearer {API_KEY}') and auth != API_KEY: self.send_json({"error": {"message": "Invalid API Key", "type": "authentication_error", "code": "invalid_api_key"}}, status=401) return if parsed.path == '/health' or parsed.path == '/': self.send_json({ "status": "healthy", "timestamp": datetime.now().isoformat(), "version": "1.0.0" }) elif parsed.path == '/v1/models': self.send_json({ "object": "list", "data": [{ "id": "openclaw", "object": "model", "created": current_timestamp(), "owned_by": "openclaw" }] }) else: self.send_json({"error": "Not found"}, status=404) def do_POST(self): parsed = urlparse(self.path) # 认证 auth = self.headers.get('Authorization', '') if not auth.startswith(f'Bearer {API_KEY}') and auth != API_KEY: self.send_json({"error": {"message": "Invalid API Key", "type": "authentication_error", "code": "invalid_api_key"}}, status=401) return length = int(self.headers.get('Content-Length', 0)) body = self.rfile.read(length).decode('utf-8') try: data = json.loads(body) if body else {} except: self.send_json({"error": "Invalid JSON"}, status=400) return # Chat Completions API if parsed.path == '/v1/chat/completions': model = data.get('model', 'openclaw') messages = data.get('messages', []) stream = data.get('stream', False) user_message = "" for msg in messages: if msg.get('role') == 'user': user_message = msg.get('content', '') break if not user_message: self.send_json({"error": {"message": "No user message", "type": "invalid_request_error"}}, status=400) return # 这里可以调用实际的 agent # 目前返回占位回复 response_content = f"收到消息: {user_message}\n\n(API 已收到,正在处理...)" if stream: # 流式响应 self.send_response(200) self.send_header('Content-Type', 'text/event-stream') self.send_header('Cache-Control', 'no-cache') self.send_header('Connection', 'keep-alive') self.end_headers() chunks = response_content.split(' ') for i, chunk in enumerate(chunks): chunk_data = { "id": generate_id(), "object": "chat.completion.chunk", "created": current_timestamp(), "model": model, "choices": [{ "index": 0, "delta": {"content": chunk + " "}, "finish_reason": None if i < len(chunks) - 1 else "stop" }] } self.wfile.write(f"data: {json.dumps(chunk_data, ensure_ascii=False)}\n\n".encode('utf-8')) self.wfile.write(b"data: [DONE]\n\n") else: # 非流式响应 response = { "id": generate_id(), "object": "chat.completion", "created": current_timestamp(), "model": model, "choices": [{ "index": 0, "message": { "role": "assistant", "content": response_content }, "finish_reason": "stop" }], "usage": { "prompt_tokens": len(user_message.split()), "completion_tokens": len(response_content.split()), "total_tokens": len(user_message.split()) + len(response_content.split()) }, "system_fingerprint": "fp_openclaw_1" } self.send_json(response) # Completions API (非聊天) elif parsed.path == '/v1/completions': prompt = data.get('prompt', '') response = { "id": generate_id(), "object": "text_completion", "created": current_timestamp(), "model": data.get('model', 'openclaw-text'), "choices": [{ "text": f"响应: {prompt}", "index": 0, "finish_reason": "stop" }], "usage": { "prompt_tokens": 10, "completion_tokens": 20, "total_tokens": 30 } } self.send_json(response) else: self.send_json({"error": "Not found"}, status=404) # ============= 启动 ============= if __name__ == '__main__': print(f"OpenClaw OpenAI Compatible API v1.0") print(f"API Key: {API_KEY}") print(f"Port: {PORT}") print(f"\nEndpoints:") print(f" GET /health - Health check") print(f" GET /v1/models - List models") print(f" POST /v1/chat/completions - Chat completion") print(f" POST /v1/completions - Text completion") print(f"\n兼容 OpenAI SDK 用法:") print(f' client = OpenAI(api_key="your-key", base_url="http://localhost:{PORT}/v1")') print(f' response = client.chat.completions.create(model="openclaw", messages=[{{"role":"user","content":"hi"}}])') print(f"\nCurl 示例:") print(f' curl -X POST http://localhost:{PORT}/v1/chat/completions \\') print(f' -H "Authorization: Bearer {API_KEY}" \\') print(f' -H "Content-Type: application/json" \\') print(f' -d \'{{"model":"openclaw","messages":[{{"role":"user","content":"你好"}}]}}\'') with socketserver.TCPServer(("", PORT), OpenAIHandler) as httpd: print(f"\nServer running on http://localhost:{PORT}") httpd.serve_forever()