Model
OpenAI: GPT-5.4
Performance and pricing snapshot from the global rankings; composite scoring matches the on-site model board.
Data updated:
About this model
GPT-5.4 is OpenAI’s flagship-class multimodal model in this snapshot, with pricing and context window reflected in our global leaderboard. Compare list quotes below and use the integration snippet with your OpenRouter API key to prototype calls before hardening auth and retries.
Key metrics
- Rank
- 2
- Kind
- マルチモーダル
- Core metric
- 1.1M ctx
- 1M tokens (avg)
- $8.75
Hippo's Quick Action
OpenRouter chat completions URL; set Authorization and body per docs.
Price calculator
Est. monthly cost (USD): —
Price comparison (snapshot)
| Source / aggregator | Price / 1M tokens | Latency |
|---|---|---|
| OpenRouter | $8.75 | — |
Figures come from the imported leaderboard snapshot; live aggregator pricing and latency can change.
How to integrate
OpenRouter exposes an OpenAI-compatible Chat Completions endpoint. Use the tabs below to switch example languages. Replace the model id with the one from your provider page if you route elsewhere.
// Node.js 18+ — set OPENROUTER_API_KEY in your environment
const res = await fetch('https://openrouter.ai/api/v1/chat/completions', {
method: 'POST',
headers: {
'Authorization': `Bearer ${process.env.OPENROUTER_API_KEY}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: "openai/gpt-5.4",
messages: [{ role: 'user', content: 'Hello' }],
}),
});
const data = await res.json();
console.log(data);# pip install requests
import json
import os
import requests
payload = {
"model": "openai/gpt-5.4",
"messages": [{"role": "user", "content": "Hello"}],
}
resp = requests.post(
"https://openrouter.ai/api/v1/chat/completions",
headers={
"Authorization": f"Bearer {os.environ['OPENROUTER_API_KEY']}",
"Content-Type": "application/json",
},
data=json.dumps(payload),
)
resp.raise_for_status()
print(resp.json())package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
)
func main() {
key := os.Getenv("OPENROUTER_API_KEY")
payload := map[string]any{
"model": "openai/gpt-5.4",
"messages": []map[string]string{{"role": "user", "content": "Hello"}},
}
b, err := json.Marshal(payload)
if err != nil {
panic(err)
}
req, err := http.NewRequest(http.MethodPost, "https://openrouter.ai/api/v1/chat/completions", bytes.NewReader(b))
if err != nil {
panic(err)
}
req.Header.Set("Authorization", "Bearer "+key)
req.Header.Set("Content-Type", "application/json")
res, err := http.DefaultClient.Do(req)
if err != nil {
panic(err)
}
defer res.Body.Close()
out, err := io.ReadAll(res.Body)
if err != nil {
panic(err)
}
fmt.Println(string(out))
}import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
public class OpenRouterChat {
public static void main(String[] args) throws Exception {
String key = System.getenv("OPENROUTER_API_KEY");
String body = "{\"model\":\"openai/gpt-5.4\",\"messages\":[{\"role\":\"user\",\"content\":\"Hello\"}]}";
HttpRequest req = HttpRequest.newBuilder()
.uri(URI.create("https://openrouter.ai/api/v1/chat/completions"))
.header("Authorization", "Bearer " + key)
.header("Content-Type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(body))
.build();
HttpResponse<String> res = HttpClient.newHttpClient().send(req, HttpResponse.BodyHandlers.ofString());
System.out.println(res.body());
}
}import java.net.URI
import java.net.http.HttpClient
import java.net.http.HttpRequest
import java.net.http.HttpResponse
fun main() {
val key = System.getenv("OPENROUTER_API_KEY") ?: error("OPENROUTER_API_KEY")
val body = "{\"model\":\"openai/gpt-5.4\",\"messages\":[{\"role\":\"user\",\"content\":\"Hello\"}]}"
val req = HttpRequest.newBuilder()
.uri(URI.create("https://openrouter.ai/api/v1/chat/completions"))
.header("Authorization", "Bearer $key")
.header("Content-Type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(body))
.build()
val res = HttpClient.newHttpClient().send(req, HttpResponse.BodyHandlers.ofString())
println(res.body())
}// Cargo.toml: serde_json = "1", ureq = "2"
use serde_json::json;
fn main() -> Result<(), Box<dyn std::error::Error>> {
let key = std::env::var("OPENROUTER_API_KEY")?;
let model = "openai/gpt-5.4";
let body = json!({
"model": model,
"messages": [{"role": "user", "content": "Hello"}]
});
let resp = ureq::post("https://openrouter.ai/api/v1/chat/completions")
.set("Authorization", &format!("Bearer {}", key))
.set("Content-Type", "application/json")
.send_string(&body.to_string())?;
println!("{}", resp.into_string()?);
Ok(())
}import java.net.URI
import java.net.http.{HttpClient, HttpRequest, HttpResponse}
@main def run(): Unit =
val key = sys.env.getOrElse("OPENROUTER_API_KEY", throw new RuntimeException("OPENROUTER_API_KEY"))
val body = "{\"model\":\"openai/gpt-5.4\",\"messages\":[{\"role\":\"user\",\"content\":\"Hello\"}]}"
val req = HttpRequest.newBuilder()
.uri(URI.create("https://openrouter.ai/api/v1/chat/completions"))
.header("Authorization", s"Bearer $key")
.header("Content-Type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(body))
.build()
val res = HttpClient.newHttpClient().send(req, HttpResponse.BodyHandlers.ofString())
println(res.body())
}# macOS/Linux — set OPENROUTER_API_KEY in your environment
curl -sS 'https://openrouter.ai/api/v1/chat/completions' \
-H "Authorization: Bearer $OPENROUTER_API_KEY" \
-H "Content-Type: application/json" \
-d '{"model":"openai/gpt-5.4","messages":[{"role":"user","content":"Hello"}]}'Store API keys in environment variables or a secret manager—never commit them to source control.
Pick one or two more models on global rankings and use Compare to view them side by side.