Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions openrouter-api/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# How to Use the OpenRouter API to Access Multiple AI Models via Python

This folder contains supporting materials for the Real Python tutorial [How to Use the OpenRouter API to Access Multiple AI Models via Python](https://realpython.com/openrouter-api/).
20 changes: 20 additions & 0 deletions openrouter-api/ask_auto_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import os
import requests

OPENROUTER_API_URL = "https://openrouter.ai/api/v1/chat/completions"

api_key = os.getenv("OPENROUTER_API_KEY")

headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json"
}
payload = {
"model": "openrouter/auto",
"messages": [{"role": "user", "content": "Say hello in one sentence."}]
}
response = requests.post(OPENROUTER_API_URL, headers=headers, json=payload)
data = response.json()

print(f"Model: {data.get('model')}")
print(f"Response: {data['choices'][0]['message']['content']}")
24 changes: 24 additions & 0 deletions openrouter-api/ask_specific_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import os
import requests

OPENROUTER_API_URL = "https://openrouter.ai/api/v1/chat/completions"

api_key = os.getenv("OPENROUTER_API_KEY")

headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json"
}
payload = {
"model": "openai/gpt-3.5-turbo",
"messages": [{"role": "user", "content": "Say hello in one sentence."}]
}
response = requests.post(OPENROUTER_API_URL, headers=headers, json=payload)
data = response.json()

if model := data.get('model'):
print(f"Model: {model} by {data['provider']}")
print(f"Response: {data['choices'][0]['message']['content']}")
else:
print("No model found in the response.")
print(f"Response: {data}")
32 changes: 32 additions & 0 deletions openrouter-api/fallback_models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import os
import requests

OPENROUTER_API_URL = "https://openrouter.ai/api/v1/chat/completions"

api_key = os.getenv("OPENROUTER_API_KEY")

def make_request_with_fallback(models_list, messages):
headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json"
}
payload = {"models": models_list, "messages": messages}

return requests.post(OPENROUTER_API_URL, headers=headers, json=payload)

response = make_request_with_fallback(
models_list=[
"openai/gpt-5",
"openai/gpt-3.5-turbo",
"openai/gpt-3.5-turbo-16k"
],
messages=[{"role": "user", "content": "What is the capital of France?"}]
)

data = response.json()
if model := data.get('model'):
print(f"Model: {model} by {data['provider']}")
print(f"Response: {data['choices'][0]['message']['content']}")
else:
print("No model found in the response.")
print(f"Response: {data}")
14 changes: 14 additions & 0 deletions openrouter-api/get_models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import os
import requests

OPENROUTER_MODELS_URL = "https://openrouter.ai/api/v1/models"

api_key = os.getenv("OPENROUTER_API_KEY")

headers = {"Authorization": f"Bearer {api_key}"}
response = requests.get(OPENROUTER_MODELS_URL, headers=headers)
data = response.json()

models = data.get("data", [])
print(f"Success! Found {len(models)} models via OpenRouter.")
print(f"Examples: {', '.join(m['id'] for m in models[:5])}")
1 change: 1 addition & 0 deletions openrouter-api/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
requests
32 changes: 32 additions & 0 deletions openrouter-api/route_requests.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import os
import requests

OPENROUTER_API_URL = "https://openrouter.ai/api/v1/chat/completions"

api_key = os.getenv("OPENROUTER_API_KEY")

def make_request(model, messages, provider_config=None):
headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json"
}
payload = {"model": model, "messages": messages}
if provider_config:
payload["provider"] = provider_config

response = requests.post(OPENROUTER_API_URL, headers=headers, json=payload)
response.raise_for_status()
return response.json()

data = make_request(
model="meta-llama/llama-3.1-70b-instruct",
messages=[{"role": "user", "content": "Explain AI in one sentence."}],
provider_config={"sort": "price"}
)

if model := data.get('model'):
print(f"Model: {model} by {data['provider']}")
print(f"Response: {data['choices'][0]['message']['content']}")
else:
print("No model found in the response.")
print(f"Response: {data}")
Loading