Skip to content

Code Examples

Complete code examples for common use cases in Python, Node.js and cURL.

Basic Chat

python
from openai import OpenAI

client = OpenAI(
    api_key="YOUR_API_KEY",
    base_url="https://1688token.ai/v1"
)

response = client.chat.completions.create(
    model="gpt-4o-mini",
    messages=[
        {"role": "system", "content": "You are a professional AI assistant."},
        {"role": "user", "content": "Explain what a large language model is in one sentence."}
    ]
)

print(response.choices[0].message.content)
javascript
import OpenAI from "openai";

const client = new OpenAI({
  apiKey: "YOUR_API_KEY",
  baseURL: "https://1688token.ai/v1"
});

const response = await client.chat.completions.create({
  model: "gpt-4o-mini",
  messages: [
    { role: "system", content: "You are a professional AI assistant." },
    { role: "user", content: "Explain what a large language model is in one sentence." }
  ]
});

console.log(response.choices[0].message.content);
bash
curl https://1688token.ai/v1/chat/completions \
  -H "Content-Type: application/json" \
  -H "Authorization: Bearer YOUR_API_KEY" \
  -d '{
    "model": "gpt-4o-mini",
    "messages": [
      {"role": "system", "content": "You are a professional AI assistant."},
      {"role": "user", "content": "Explain what a large language model is in one sentence."}
    ]
  }'

Streaming

python
from openai import OpenAI

client = OpenAI(
    api_key="YOUR_API_KEY",
    base_url="https://1688token.ai/v1"
)

stream = client.chat.completions.create(
    model="gpt-4o",
    messages=[{"role": "user", "content": "Write a short poem about autumn."}],
    stream=True
)

for chunk in stream:
    delta = chunk.choices[0].delta.content
    if delta:
        print(delta, end="", flush=True)
javascript
import OpenAI from "openai";

const client = new OpenAI({
  apiKey: "YOUR_API_KEY",
  baseURL: "https://1688token.ai/v1"
});

const stream = await client.chat.completions.create({
  model: "gpt-4o",
  messages: [{ role: "user", content: "Write a short poem about autumn." }],
  stream: true
});

for await (const chunk of stream) {
  const delta = chunk.choices[0]?.delta?.content;
  if (delta) process.stdout.write(delta);
}

Multi-Turn Conversation

python
from openai import OpenAI

client = OpenAI(
    api_key="YOUR_API_KEY",
    base_url="https://1688token.ai/v1"
)

messages = [{"role": "system", "content": "You are a professional AI assistant."}]

# Turn 1
messages.append({"role": "user", "content": "What is machine learning?"})
response = client.chat.completions.create(model="gpt-4o-mini", messages=messages)
reply = response.choices[0].message.content
messages.append({"role": "assistant", "content": reply})
print("Assistant:", reply)

# Turn 2
messages.append({"role": "user", "content": "How does it differ from deep learning?"})
response = client.chat.completions.create(model="gpt-4o-mini", messages=messages)
print("Assistant:", response.choices[0].message.content)
javascript
import OpenAI from "openai";

const client = new OpenAI({
  apiKey: "YOUR_API_KEY",
  baseURL: "https://1688token.ai/v1"
});

const messages = [{ role: "system", content: "You are a professional AI assistant." }];

// Turn 1
messages.push({ role: "user", content: "What is machine learning?" });
let response = await client.chat.completions.create({ model: "gpt-4o-mini", messages });
const reply = response.choices[0].message.content;
messages.push({ role: "assistant", content: reply });
console.log("Assistant:", reply);

// Turn 2
messages.push({ role: "user", content: "How does it differ from deep learning?" });
response = await client.chat.completions.create({ model: "gpt-4o-mini", messages });
console.log("Assistant:", response.choices[0].message.content);

Using Claude Models

python
from openai import OpenAI

client = OpenAI(
    api_key="YOUR_API_KEY",
    base_url="https://1688token.ai/v1"
)

response = client.chat.completions.create(
    model="claude-sonnet-4-6",
    messages=[
        {"role": "user", "content": "Write a Python quicksort function with comments."}
    ],
    max_tokens=1024
)

print(response.choices[0].message.content)
javascript
import OpenAI from "openai";

const client = new OpenAI({
  apiKey: "YOUR_API_KEY",
  baseURL: "https://1688token.ai/v1"
});

const response = await client.chat.completions.create({
  model: "claude-sonnet-4-6",
  messages: [
    { role: "user", content: "Write a Python quicksort function with comments." }
  ],
  max_tokens: 1024
});

console.log(response.choices[0].message.content);