Python Integration
Build AI applications with Python, FastAPI, and Super Agent Stack.
Setup
Install Dependencies
bash
pip install openai fastapi uvicorn python-dotenvEnvironment Variables
.env
OPENROUTER_KEY=your_openrouter_key_here
SUPER_AGENT_KEY=your_super_agent_key_hereBasic Usage
Session ID via Headers
The OpenAI SDK doesn't support custom parameters like
sessionId in the create() method. Pass them via default_headers instead!chat.py
from openai import OpenAI
import os
import uuid
from dotenv import load_dotenv
load_dotenv()
# Generate a unique session ID (or reuse existing one)
session_id = str(uuid.uuid4())
# Pass session ID and other params in headers
client = OpenAI(
base_url="https://www.superagentstack.com/api/v1",
api_key=os.environ.get("OPENROUTER_KEY"),
default_headers={
"superAgentKey": os.environ.get("SUPER_AGENT_KEY"),
"sessionId": session_id, # ✅ Pass in headers
"useRAG": "true",
"saveToMemory": "true",
}
)
def chat(message: str) -> str:
completion = client.chat.completions.create(
model="anthropic/claude-sonnet-4.5",
messages=[{"role": "user", "content": message}],
# ❌ Don't pass sessionId here - it won't work!
)
return completion.choices[0].message.content
# Usage
response = chat("Hello!")
print(response)
# Next message in same session - AI remembers context
response = chat("What did I just say?")
print(response) # AI will remember "Hello!"Common Error
If you see
TypeError: Completions.create() got an unexpected keyword argument 'sessionId', you're trying to pass sessionId as a parameter. Use headers instead!FastAPI Application
main.py
from fastapi import FastAPI, HTTPException
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
from openai import OpenAI
import os
app = FastAPI()
class ChatRequest(BaseModel):
message: str
session_id: str
stream: bool = False
def get_client(session_id: str) -> OpenAI:
"""Create OpenAI client with session ID in headers"""
return OpenAI(
base_url="https://www.superagentstack.com/api/v1",
api_key=os.environ.get("OPENROUTER_KEY"),
default_headers={
"superAgentKey": os.environ.get("SUPER_AGENT_KEY"),
"sessionId": session_id, # ✅ Pass session ID in headers
"useRAG": "true",
"saveToMemory": "true",
}
)
@app.post("/chat")
async def chat(request: ChatRequest):
try:
client = get_client(request.session_id)
if request.stream:
return StreamingResponse(
stream_chat(request.message, request.session_id),
media_type="text/plain"
)
else:
completion = client.chat.completions.create(
model="anthropic/claude-sonnet-4.5",
messages=[{"role": "user", "content": request.message}],
)
return {
"response": completion.choices[0].message.content,
"metadata": getattr(completion, '_metadata', {})
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
def stream_chat(message: str, session_id: str):
client = get_client(session_id)
stream = client.chat.completions.create(
model="anthropic/claude-sonnet-4.5",
messages=[{"role": "user", "content": message}],
stream=True,
)
for chunk in stream:
if chunk.choices[0].delta.content:
yield chunk.choices[0].delta.content
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)Run the Server
bash
python main.pyTest with cURL
bash
curl -X POST http://localhost:8000/chat \
-H "Content-Type: application/json" \
-d '{
"message": "Hello!",
"session_id": "user-123",
"stream": false
}'Flask Application
app.py
from flask import Flask, request, jsonify
from openai import OpenAI
import os
app = Flask(__name__)
def get_client(session_id: str) -> OpenAI:
"""Create OpenAI client with session ID in headers"""
return OpenAI(
base_url="https://www.superagentstack.com/api/v1",
api_key=os.environ.get("OPENROUTER_KEY"),
default_headers={
"superAgentKey": os.environ.get("SUPER_AGENT_KEY"),
"sessionId": session_id, # ✅ Pass session ID in headers
"useRAG": "true",
"saveToMemory": "true",
}
)
@app.route('/chat', methods=['POST'])
def chat():
data = request.json
message = data.get('message')
session_id = data.get('session_id')
if not message or not session_id:
return jsonify({"error": "message and session_id required"}), 400
try:
client = get_client(session_id)
completion = client.chat.completions.create(
model="anthropic/claude-sonnet-4.5",
messages=[{"role": "user", "content": message}],
)
return jsonify({
"response": completion.choices[0].message.content
})
except Exception as e:
return jsonify({"error": str(e)}), 500
if __name__ == '__main__':
app.run(debug=True, port=5000)CLI Application
cli_chat.py
from openai import OpenAI
import os
import uuid
# Generate unique session ID for this chat session
session_id = str(uuid.uuid4())
# Create client with session ID in headers
client = OpenAI(
base_url="https://www.superagentstack.com/api/v1",
api_key=os.environ.get("OPENROUTER_KEY"),
default_headers={
"superAgentKey": os.environ.get("SUPER_AGENT_KEY"),
"sessionId": session_id, # ✅ Session ID in headers
"useRAG": "true",
"saveToMemory": "true",
}
)
def chat(message: str):
try:
completion = client.chat.completions.create(
model="anthropic/claude-sonnet-4.5",
messages=[{"role": "user", "content": message}],
)
return completion.choices[0].message.content
except Exception as e:
return f"Error: {e}"
def main():
print(f"Chat started! Session ID: {session_id}")
print("Type 'exit' to quit.\n")
while True:
user_input = input("You: ")
if user_input.lower() == 'exit':
print("Goodbye!")
break
response = chat(user_input)
print(f"\nAI: {response}\n")
if __name__ == "__main__":
main()Django Integration
views.py
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from openai import OpenAI
import json
import os
def get_client(session_id: str) -> OpenAI:
"""Create OpenAI client with session ID in headers"""
return OpenAI(
base_url="https://www.superagentstack.com/api/v1",
api_key=os.environ.get("OPENROUTER_KEY"),
default_headers={
"superAgentKey": os.environ.get("SUPER_AGENT_KEY"),
"sessionId": session_id, # ✅ Pass session ID in headers
"useRAG": "true",
"saveToMemory": "true",
}
)
@csrf_exempt
def chat_view(request):
if request.method == 'POST':
data = json.loads(request.body)
message = data.get('message')
session_id = data.get('session_id')
if not message or not session_id:
return JsonResponse({"error": "message and session_id required"}, status=400)
try:
client = get_client(session_id)
completion = client.chat.completions.create(
model="anthropic/claude-sonnet-4.5",
messages=[{"role": "user", "content": message}],
)
return JsonResponse({
"response": completion.choices[0].message.content
})
except Exception as e:
return JsonResponse({"error": str(e)}, status=500)
return JsonResponse({"error": "Method not allowed"}, status=405)Direct HTTP Requests
If you prefer using direct HTTP requests instead of the OpenAI SDK, you can pass custom parameters in the request body.
Using cURL
bash
# Session ID in request body (works with direct HTTP)
curl -X POST https://www.superagentstack.com/api/v1/chat/completions \
-H "Authorization: Bearer YOUR_OPENROUTER_KEY" \
-H "superAgentKey: YOUR_SUPER_AGENT_KEY" \
-H "Content-Type: application/json" \
-d '{
"model": "anthropic/claude-sonnet-4.5",
"messages": [
{"role": "user", "content": "Hello!"}
],
"sessionId": "user-123",
"useRAG": true,
"saveToMemory": true
}'Using Python requests Library
direct_http.py
import requests
import os
def chat_direct_http(message: str, session_id: str):
"""Using direct HTTP requests - sessionId in body"""
response = requests.post(
"https://www.superagentstack.com/api/v1/chat/completions",
headers={
"Authorization": f"Bearer {os.environ.get('OPENROUTER_KEY')}",
"superAgentKey": os.environ.get("SUPER_AGENT_KEY"),
"Content-Type": "application/json"
},
json={
"model": "anthropic/claude-sonnet-4.5",
"messages": [
{"role": "user", "content": message}
],
# ✅ With direct HTTP, you can pass sessionId in body
"sessionId": session_id,
"useRAG": True,
"saveToMemory": True,
"useGlobalKnowledge": False
}
)
response.raise_for_status()
data = response.json()
return data['choices'][0]['message']['content']
# Usage
response = chat_direct_http("Hello!", "user-123")
print(response)Body vs Headers
- OpenAI SDK: Use headers (
default_headers) - Direct HTTP: Use body parameters (easier)
- Both work: API accepts both methods
Best Practices
- Use environment variables: Never hardcode API keys
- Implement error handling: Catch and handle exceptions properly
- Add rate limiting: Protect your endpoints from abuse
- Use async/await: For better performance with FastAPI
- Log requests: Monitor API usage and errors