OpenAI
Responses API
Call OpenAI models using the Responses API.
Get Your API Key
Go to the API Keys page to create your API key.
API Base URL
https://tokenoff.com/apiOpenAI models support the /v1/responses endpoint, fully compatible with the OpenAI Responses API.
curl
Blocking Call
curl https://tokenoff.com/api/v1/responses \
-H "Authorization: Bearer your_api_key_here" \
-H "Content-Type: application/json" \
-d '{
"model": "gpt-5.4",
"input": [
{
"type": "message",
"role": "user",
"content": [
{"type": "input_text", "text": "Hello"}
]
}
]
}'
Output:
{
"id": "resp_0ede5d514f3134ce0069e0c56cd4908197b2609163d3b20dcd",
"object": "response",
"created_at": 1776338284,
"status": "completed",
"background": false,
"completed_at": 1776338285,
"content_filters": null,
"error": null,
"frequency_penalty": 0.0,
"incomplete_details": null,
"instructions": null,
"max_output_tokens": null,
"max_tool_calls": null,
"model": "gpt-5.4",
"output": [
{
"id": "msg_0ede5d514f3134ce0069e0c56d61e08197b8c88db05e2b03af",
"type": "message",
"status": "completed",
"content": [
{
"type": "output_text",
"annotations": [],
"logprobs": [],
"text": "Hello! How can I help?"
}
],
"phase": "final_answer",
"role": "assistant"
}
],
"parallel_tool_calls": true,
"presence_penalty": 0.0,
"previous_response_id": null,
"prompt_cache_key": null,
"prompt_cache_retention": null,
"reasoning": {
"effort": "none",
"summary": null
},
"safety_identifier": null,
"service_tier": "default",
"store": true,
"temperature": 1.0,
"text": {
"format": {
"type": "text"
},
"verbosity": "medium"
},
"tool_choice": "auto",
"tools": [],
"top_logprobs": 0,
"top_p": 0.98,
"truncation": "disabled",
"usage": {
"input_tokens": 7,
"input_tokens_details": {
"cached_tokens": 0
},
"output_tokens": 11,
"output_tokens_details": {
"reasoning_tokens": 0
},
"total_tokens": 18
},
"user": null,
"metadata": {}
}Streaming Response
curl https://tokenoff.com/api/v1/responses \
-H "Authorization: Bearer your_api_key_here" \
-H "Content-Type: application/json" \
-d '{
"model": "gpt-5.4",
"input": [
{
"type": "message",
"role": "user",
"content": [
{"type": "input_text", "text": "Hello"}
]
}
],
"stream": true
}'
Output:
event: response.created
data: {"type":"response.created","response":{"id":"resp_0ebb288bdd28cf950069e0c5b6c7088194a3390bd883d78953","object":"response","created_at":1776338358,"status":"in_progress","background":false,"completed_at":null,"content_filters":null,"error":null,"frequency_penalty":0.0,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"max_tool_calls":null,"model":"gpt-5.4","output":[],"parallel_tool_calls":true,"presence_penalty":0.0,"previous_response_id":null,"prompt_cache_key":null,"prompt_cache_retention":null,"reasoning":{"effort":"none","summary":null},"safety_identifier":null,"service_tier":"auto","store":true,"temperature":1.0,"text":{"format":{"type":"text"},"verbosity":"medium"},"tool_choice":"auto","tools":[],"top_logprobs":0,"top_p":0.98,"truncation":"disabled","usage":null,"user":null,"metadata":{}},"sequence_number":0}
event: response.in_progress
data: {"type":"response.in_progress","response":{"id":"resp_0ebb288bdd28cf950069e0c5b6c7088194a3390bd883d78953","object":"response","created_at":1776338358,"status":"in_progress","background":false,"completed_at":null,"content_filters":null,"error":null,"frequency_penalty":0.0,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"max_tool_calls":null,"model":"gpt-5.4","output":[],"parallel_tool_calls":true,"presence_penalty":0.0,"previous_response_id":null,"prompt_cache_key":null,"prompt_cache_retention":null,"reasoning":{"effort":"none","summary":null},"safety_identifier":null,"service_tier":"auto","store":true,"temperature":1.0,"text":{"format":{"type":"text"},"verbosity":"medium"},"tool_choice":"auto","tools":[],"top_logprobs":0,"top_p":0.98,"truncation":"disabled","usage":null,"user":null,"metadata":{}},"sequence_number":1}
event: response.output_item.added
data: {"type":"response.output_item.added","item":{"id":"msg_0ebb288bdd28cf950069e0c5b72d24819499b67d5e9f42ac9d","type":"message","status":"in_progress","content":[],"phase":"final_answer","role":"assistant"},"output_index":0,"sequence_number":2}
event: response.content_part.added
data: {"type":"response.content_part.added","content_index":0,"item_id":"msg_0ebb288bdd28cf950069e0c5b72d24819499b67d5e9f42ac9d","output_index":0,"part":{"type":"output_text","annotations":[],"logprobs":[],"text":""},"sequence_number":3}
event: response.output_text.delta
data: {"type":"response.output_text.delta","content_index":0,"delta":"Hello","item_id":"msg_0ebb288bdd28cf950069e0c5b72d24819499b67d5e9f42ac9d","logprobs":[],"obfuscation":"PQG6rAgXwIS","output_index":0,"sequence_number":4}
event: response.output_text.delta
data: {"type":"response.output_text.delta","content_index":0,"delta":"!","item_id":"msg_0ebb288bdd28cf950069e0c5b72d24819499b67d5e9f42ac9d","logprobs":[],"obfuscation":"05oCdMjfVQIl2nI","output_index":0,"sequence_number":5}
event: response.output_text.delta
data: {"type":"response.output_text.delta","content_index":0,"delta":" How","item_id":"msg_0ebb288bdd28cf950069e0c5b72d24819499b67d5e9f42ac9d","logprobs":[],"obfuscation":"mrsMyP9aG5d6","output_index":0,"sequence_number":6}
event: response.output_text.delta
data: {"type":"response.output_text.delta","content_index":0,"delta":" can","item_id":"msg_0ebb288bdd28cf950069e0c5b72d24819499b67d5e9f42ac9d","logprobs":[],"obfuscation":"vxot3eZ0O24n","output_index":0,"sequence_number":7}
event: response.output_text.delta
data: {"type":"response.output_text.delta","content_index":0,"delta":" I","item_id":"msg_0ebb288bdd28cf950069e0c5b72d24819499b67d5e9f42ac9d","logprobs":[],"obfuscation":"VtWDc1nve4DmHu","output_index":0,"sequence_number":8}
event: response.output_text.delta
data: {"type":"response.output_text.delta","content_index":0,"delta":" help","item_id":"msg_0ebb288bdd28cf950069e0c5b72d24819499b67d5e9f42ac9d","logprobs":[],"obfuscation":"iRjU41lFZhR","output_index":0,"sequence_number":9}
event: response.output_text.delta
data: {"type":"response.output_text.delta","content_index":0,"delta":"?","item_id":"msg_0ebb288bdd28cf950069e0c5b72d24819499b67d5e9f42ac9d","logprobs":[],"obfuscation":"Gf768wfM626vI1E","output_index":0,"sequence_number":10}
event: response.output_text.done
data: {"type":"response.output_text.done","content_index":0,"item_id":"msg_0ebb288bdd28cf950069e0c5b72d24819499b67d5e9f42ac9d","logprobs":[],"output_index":0,"sequence_number":11,"text":"Hello! How can I help?"}
event: response.content_part.done
data: {"type":"response.content_part.done","content_index":0,"item_id":"msg_0ebb288bdd28cf950069e0c5b72d24819499b67d5e9f42ac9d","output_index":0,"part":{"type":"output_text","annotations":[],"logprobs":[],"text":"Hello! How can I help?"},"sequence_number":12}
event: response.output_item.done
data: {"type":"response.output_item.done","item":{"id":"msg_0ebb288bdd28cf950069e0c5b72d24819499b67d5e9f42ac9d","type":"message","status":"completed","content":[{"type":"output_text","annotations":[],"logprobs":[],"text":"Hello! How can I help?"}],"phase":"final_answer","role":"assistant"},"output_index":0,"sequence_number":13}
event: response.completed
data: {"type":"response.completed","response":{"id":"resp_0ebb288bdd28cf950069e0c5b6c7088194a3390bd883d78953","object":"response","created_at":1776338358,"status":"completed","background":false,"completed_at":1776338359,"content_filters":null,"error":null,"frequency_penalty":0.0,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"max_tool_calls":null,"model":"gpt-5.4","output":[{"id":"msg_0ebb288bdd28cf950069e0c5b72d24819499b67d5e9f42ac9d","type":"message","status":"completed","content":[{"type":"output_text","annotations":[],"logprobs":[],"text":"Hello! How can I help?"}],"phase":"final_answer","role":"assistant"}],"parallel_tool_calls":true,"presence_penalty":0.0,"previous_response_id":null,"prompt_cache_key":null,"prompt_cache_retention":null,"reasoning":{"effort":"none","summary":null},"safety_identifier":null,"service_tier":"default","store":true,"temperature":1.0,"text":{"format":{"type":"text"},"verbosity":"medium"},"tool_choice":"auto","tools":[],"top_logprobs":0,"top_p":0.98,"truncation":"disabled","usage":{"input_tokens":7,"input_tokens_details":{"cached_tokens":0},"output_tokens":11,"output_tokens_details":{"reasoning_tokens":0},"total_tokens":18},"user":null,"metadata":{}},"sequence_number":14}Python
pip install openaiBlocking Call
from openai import OpenAI
client = OpenAI(
api_key="your_api_key_here",
base_url="https://tokenoff.com/api/v1",
)
response = client.responses.create(
model="gpt-5.4",
input=[
{
"type": "message",
"role": "user",
"content": [
{"type": "input_text", "text": "Hello"}
],
}
],
)
print(response.output_text)
Output:
Hello! How can I help you today?Streaming Response
from openai import OpenAI
client = OpenAI(
api_key="your_api_key_here",
base_url="https://tokenoff.com/api/v1",
)
response = client.responses.create(
model="gpt-5.4",
input=[
{
"type": "message",
"role": "user",
"content": [
{"type": "input_text", "text": "Hello"}
],
}
],
stream=True,
)
for event in response:
print(event)
Output:
ChatCompletionChunk(id='resp_00a6d0440377313b0069e0c71e487c819482704ea3a47b1038', choices=[Choice(delta=ChoiceDelta(content='', function_call=None, refusal=None, role='assistant', tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1776338718, model='gpt-4.1', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None)
ChatCompletionChunk(id='resp_00a6d0440377313b0069e0c71e487c819482704ea3a47b1038', choices=[Choice(delta=ChoiceDelta(content='Hello', function_call=None, refusal=None, role=None, tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1776338718, model='gpt-4.1', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None)
ChatCompletionChunk(id='resp_00a6d0440377313b0069e0c71e487c819482704ea3a47b1038', choices=[Choice(delta=ChoiceDelta(content='!', function_call=None, refusal=None, role=None, tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1776338718, model='gpt-4.1', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None)
ChatCompletionChunk(id='resp_00a6d0440377313b0069e0c71e487c819482704ea3a47b1038', choices=[Choice(delta=ChoiceDelta(content=' How', function_call=None, refusal=None, role=None, tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1776338718, model='gpt-4.1', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None)
ChatCompletionChunk(id='resp_00a6d0440377313b0069e0c71e487c819482704ea3a47b1038', choices=[Choice(delta=ChoiceDelta(content=' can', function_call=None, refusal=None, role=None, tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1776338718, model='gpt-4.1', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None)
ChatCompletionChunk(id='resp_00a6d0440377313b0069e0c71e487c819482704ea3a47b1038', choices=[Choice(delta=ChoiceDelta(content=' I', function_call=None, refusal=None, role=None, tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1776338718, model='gpt-4.1', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None)
ChatCompletionChunk(id='resp_00a6d0440377313b0069e0c71e487c819482704ea3a47b1038', choices=[Choice(delta=ChoiceDelta(content=' help', function_call=None, refusal=None, role=None, tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1776338718, model='gpt-4.1', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None)
ChatCompletionChunk(id='resp_00a6d0440377313b0069e0c71e487c819482704ea3a47b1038', choices=[Choice(delta=ChoiceDelta(content=' you', function_call=None, refusal=None, role=None, tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1776338718, model='gpt-4.1', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None)
ChatCompletionChunk(id='resp_00a6d0440377313b0069e0c71e487c819482704ea3a47b1038', choices=[Choice(delta=ChoiceDelta(content=' today', function_call=None, refusal=None, role=None, tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1776338718, model='gpt-4.1', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None)
ChatCompletionChunk(id='resp_00a6d0440377313b0069e0c71e487c819482704ea3a47b1038', choices=[Choice(delta=ChoiceDelta(content='?', function_call=None, refusal=None, role=None, tool_calls=None), finish_reason=None, index=0, logprobs=None)], created=1776338718, model='gpt-4.1', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=None)
ChatCompletionChunk(id='resp_00a6d0440377313b0069e0c71e487c819482704ea3a47b1038', choices=[Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role=None, tool_calls=None), finish_reason='stop', index=0, logprobs=None)], created=1776338718, model='gpt-4.1', object='chat.completion.chunk', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=13, prompt_tokens=7, total_tokens=20, completion_tokens_details=None, prompt_tokens_details=None))TypeScript
npm install openaiBlocking Call
import OpenAI from "openai";
const client = new OpenAI({
apiKey: "your_api_key_here",
baseURL: "https://tokenoff.com/api/v1",
});
const response = await client.responses.create({
model: "gpt-5.4",
input: [
{
type: "message",
role: "user",
content: [
{ type: "input_text", text: "Hello" },
],
},
],
});
console.log(response.output_text);
Output:
Hello! How can I help?Streaming Response
import OpenAI from "openai";
const client = new OpenAI({
apiKey: "your_api_key_here",
baseURL: "https://tokenoff.com/api/v1",
});
const response = await client.responses.create({
model: "gpt-5.4",
input: [
{
type: "message",
role: "user",
content: [
{ type: "input_text", text: "Hello" },
],
},
],
stream: true,
});
for await (const event of response) {
console.log(event);
}
Output:
{
type: 'response.created',
response: {
id: 'resp_05c96bf3f567ee650069e0c6a804fc819395b958d72ccefca8',
object: 'response',
created_at: 1776338600,
status: 'in_progress',
background: false,
completed_at: null,
content_filters: null,
error: null,
frequency_penalty: 0,
incomplete_details: null,
instructions: null,
max_output_tokens: null,
max_tool_calls: null,
model: 'gpt-5.4',
output: [],
parallel_tool_calls: true,
presence_penalty: 0,
previous_response_id: null,
prompt_cache_key: null,
prompt_cache_retention: null,
reasoning: { effort: 'none', summary: null },
safety_identifier: null,
service_tier: 'auto',
store: true,
temperature: 1,
text: { format: [Object], verbosity: 'medium' },
tool_choice: 'auto',
tools: [],
top_logprobs: 0,
top_p: 0.98,
truncation: 'disabled',
usage: null,
user: null,
metadata: {}
},
sequence_number: 0
}
{
type: 'response.in_progress',
response: {
id: 'resp_05c96bf3f567ee650069e0c6a804fc819395b958d72ccefca8',
object: 'response',
created_at: 1776338600,
status: 'in_progress',
background: false,
completed_at: null,
content_filters: null,
error: null,
frequency_penalty: 0,
incomplete_details: null,
instructions: null,
max_output_tokens: null,
max_tool_calls: null,
model: 'gpt-5.4',
output: [],
parallel_tool_calls: true,
presence_penalty: 0,
previous_response_id: null,
prompt_cache_key: null,
prompt_cache_retention: null,
reasoning: { effort: 'none', summary: null },
safety_identifier: null,
service_tier: 'auto',
store: true,
temperature: 1,
text: { format: [Object], verbosity: 'medium' },
tool_choice: 'auto',
tools: [],
top_logprobs: 0,
top_p: 0.98,
truncation: 'disabled',
usage: null,
user: null,
metadata: {}
},
sequence_number: 1
}
{
type: 'response.output_item.added',
item: {
id: 'msg_05c96bf3f567ee650069e0c6a88f288193b0672b72501fb6eb',
type: 'message',
status: 'in_progress',
content: [],
phase: 'final_answer',
role: 'assistant'
},
output_index: 0,
sequence_number: 2
}
{
type: 'response.content_part.added',
content_index: 0,
item_id: 'msg_05c96bf3f567ee650069e0c6a88f288193b0672b72501fb6eb',
output_index: 0,
part: { type: 'output_text', annotations: [], logprobs: [], text: '' },
sequence_number: 3
}
{
type: 'response.output_text.delta',
content_index: 0,
delta: 'Hello',
item_id: 'msg_05c96bf3f567ee650069e0c6a88f288193b0672b72501fb6eb',
logprobs: [],
obfuscation: '17zdmCnqVoo',
output_index: 0,
sequence_number: 4
}
{
type: 'response.output_text.delta',
content_index: 0,
delta: '!',
item_id: 'msg_05c96bf3f567ee650069e0c6a88f288193b0672b72501fb6eb',
logprobs: [],
obfuscation: 'uxLgHYMdTFPMM1j',
output_index: 0,
sequence_number: 5
}
{
type: 'response.output_text.delta',
content_index: 0,
delta: ' How',
item_id: 'msg_05c96bf3f567ee650069e0c6a88f288193b0672b72501fb6eb',
logprobs: [],
obfuscation: 'DkMSeFRAFhKk',
output_index: 0,
sequence_number: 6
}
{
type: 'response.output_text.delta',
content_index: 0,
delta: ' can',
item_id: 'msg_05c96bf3f567ee650069e0c6a88f288193b0672b72501fb6eb',
logprobs: [],
obfuscation: 'A3jZg7VBtZrc',
output_index: 0,
sequence_number: 7
}
{
type: 'response.output_text.delta',
content_index: 0,
delta: ' I',
item_id: 'msg_05c96bf3f567ee650069e0c6a88f288193b0672b72501fb6eb',
logprobs: [],
obfuscation: '3wk1NWsADEYKcF',
output_index: 0,
sequence_number: 8
}
{
type: 'response.output_text.delta',
content_index: 0,
delta: ' help',
item_id: 'msg_05c96bf3f567ee650069e0c6a88f288193b0672b72501fb6eb',
logprobs: [],
obfuscation: 'vHVLSahOZsA',
output_index: 0,
sequence_number: 9
}
{
type: 'response.output_text.delta',
content_index: 0,
delta: '?',
item_id: 'msg_05c96bf3f567ee650069e0c6a88f288193b0672b72501fb6eb',
logprobs: [],
obfuscation: 'Hjq0GpUmeSiTAlv',
output_index: 0,
sequence_number: 10
}
{
type: 'response.output_text.done',
content_index: 0,
item_id: 'msg_05c96bf3f567ee650069e0c6a88f288193b0672b72501fb6eb',
logprobs: [],
output_index: 0,
sequence_number: 11,
text: 'Hello! How can I help?'
}
{
type: 'response.content_part.done',
content_index: 0,
item_id: 'msg_05c96bf3f567ee650069e0c6a88f288193b0672b72501fb6eb',
output_index: 0,
part: {
type: 'output_text',
annotations: [],
logprobs: [],
text: 'Hello! How can I help?'
},
sequence_number: 12
}
{
type: 'response.output_item.done',
item: {
id: 'msg_05c96bf3f567ee650069e0c6a88f288193b0672b72501fb6eb',
type: 'message',
status: 'completed',
content: [ [Object] ],
phase: 'final_answer',
role: 'assistant'
},
output_index: 0,
sequence_number: 13
}
{
type: 'response.completed',
response: {
id: 'resp_05c96bf3f567ee650069e0c6a804fc819395b958d72ccefca8',
object: 'response',
created_at: 1776338600,
status: 'completed',
background: false,
completed_at: 1776338600,
content_filters: null,
error: null,
frequency_penalty: 0,
incomplete_details: null,
instructions: null,
max_output_tokens: null,
max_tool_calls: null,
model: 'gpt-5.4',
output: [ [Object] ],
parallel_tool_calls: true,
presence_penalty: 0,
previous_response_id: null,
prompt_cache_key: null,
prompt_cache_retention: null,
reasoning: { effort: 'none', summary: null },
safety_identifier: null,
service_tier: 'default',
store: true,
temperature: 1,
text: { format: [Object], verbosity: 'medium' },
tool_choice: 'auto',
tools: [],
top_logprobs: 0,
top_p: 0.98,
truncation: 'disabled',
usage: {
input_tokens: 7,
input_tokens_details: [Object],
output_tokens: 11,
output_tokens_details: [Object],
total_tokens: 18
},
user: null,
metadata: {}
},
sequence_number: 14
}Supported Models
gpt-5.4
GPT-5.4
gpt-5.4-mini
GPT-5.4 mini
gpt-5.4-nano
GPT-5.4 nano
gpt-5.2
GPT-5.2
gpt-5.1
GPT-5.1
gpt-5
GPT-5
gpt-5-mini
GPT-5 mini
gpt-5-nano
GPT-5 nano
gpt-4.1
GPT-4.1
gpt-4.1-mini
GPT-4.1 mini
gpt-4.1-nano
GPT-4.1 nano
gpt-4o
GPT-4o
gpt-4o-mini
GPT-4o mini
o4-mini
o4-mini
o3
o3
o3-mini
o3-mini
o3-pro
o3-pro
gpt-5.3-codex
GPT-5.3-Codex
gpt-5.3-codex-spark
GPT-5.3-Codex-Spark
gpt-5.2-codex
GPT-5.2-Codex
gpt-5.1-codex-max
GPT-5.1-Codex-Max
gpt-5.1-codex
GPT-5.1-Codex
gpt-5-codex
GPT-5-Codex
gpt-5.1-codex-mini
GPT-5.1-Codex-Mini
Contact Us
If you encounter any issues while using TokenOff:
Contact us atsupport@tokenoff.comand other official channels for technical support
Submit an issue on ourGithubrepository