Model API
- Welcome
- Get started
- Understand the API
- Tasks
- Audio Classification
- Automatic Speech Recognition
- Chat
- Depth Estimation
- Document Question Answering
- Feature Extraction
- Fill Mask
- Image Classification
- Image Feature Extraction
- Image Segmentation
- Image-to-Text
- Mask Generation
- Object Detection
- Question Answering
- Sentence Similarity
- Summarization
- Text Classification
- Text Generation
- Image Generation
- Text-to-Speech
- Text-to-Text Generation
- Text-to-Video
- Token Classification
- Translation
- Unconditional Image Generation
- Video Classification
- Visual Question Answering
- Zero Shot Classification
- Zero Shot Image Classification
- Zero Shot Object Detection
Tasks
Chat
Using multi-modal models is easy
import Bytez from "bytez.js";
// insert your key
const sdk = new Bytez("BYTEZ_KEY");
// choose your chat model
const model = sdk.model("microsoft/Phi-3-mini-4k-instruct");
// init the model
await model.create();
// provide the model your chat session
const messages = [
{ role: "system", content: "You are a friendly chatbot" },
{ role: "assistant", content: "Hello, I'm a friendly bot" },
{ role: "user", content: "Hello bot, what is the capital of England?" },
]
// send to model
const { error, output } = await model.run(messages);
console.log({ error, output });
import Bytez from "bytez.js";
// insert your key
const sdk = new Bytez("BYTEZ_KEY");
// choose your chat model
const model = sdk.model("microsoft/Phi-3-mini-4k-instruct");
// init the model
await model.create();
// provide the model your chat session
const messages = [
{ role: "system", content: "You are a friendly chatbot" },
{ role: "assistant", content: "Hello, I'm a friendly bot" },
{ role: "user", content: "Hello bot, what is the capital of England?" },
]
const params = { temperature: 0 }
// send to model
const { error, output } = await model.run(messages, params);
console.log({ error, output });
import Bytez from "bytez.js";
// insert your key
const sdk = new Bytez("BYTEZ_KEY");
// choose your chat model
const model = sdk.model("microsoft/Phi-3-mini-4k-instruct");
// init the model
await model.create();
// provide the model your chat session
const messages = [
{ role: "system", content: "You are a friendly chatbot" },
{ role: "assistant", content: "Hello, I'm a friendly bot" },
{ role: "user", content: "Hello bot, what is the capital of England?" },
]
const stream = true
// send to model
const readStream = await model.run(messages, stream);
let text = ''
for await (const chunk of readStream) {
text += chunk
console.log(chunk)
}
console.log({ text });
import Bytez from "bytez.js";
// insert your key
const sdk = new Bytez("BYTEZ_KEY");
// choose your chat model
const model = sdk.model("microsoft/Phi-3-mini-4k-instruct");
// init the model
await model.create();
// provide the model your chat session
const messages = [
{ role: "system", content: "You are a friendly chatbot" },
{ role: "assistant", content: "Hello, I'm a friendly bot" },
{ role: "user", content: "Hello bot, what is the capital of England?" },
]
const params = { temperature: 0 }
const stream = true
// send to model
const readStream = await model.run(messages, params, stream);
let text = ''
for await (const chunk of readStream) {
text += chunk
console.log(chunk)
}
console.log({ text });
import Bytez from "bytez.js";
// insert your key
const sdk = new Bytez("BYTEZ_KEY");
// choose your chat model
const model = sdk.model("meta-llama/Llama-3.2-11B-Vision-Instruct");
// init the model
await model.create();
// provide the model your chat session
const messages = [
{
role: "user",
content: [
{ type: "text": text: "Describe this image" },
{ type: "image": url: "https://hips.hearstapps.com/hmg-prod/images/how-to-keep-ducks-call-ducks-1615457181.jpg?crop=0.670xw:1.00xh;0.157xw,0&resize=980:*" }
]
}
]
// send to model
const { error, output } = await model.run(messages);
console.log({ error, output });
You can send images as either an HTTP URL or a base64 data URL.
{
"role": "user",
"content": [
{ "type": "text", "text": "Describe this image" },
{ "type": "image", "url": "https://hips.hearstapps.com/hmg-prod/images/how-to-keep-ducks-call-ducks-1615457181.jpg?crop=0.670xw:1.00xh;0.157xw,0&resize=980:*" }
]
}
import Bytez from "bytez.js";
// insert your key
const sdk = new Bytez("BYTEZ_KEY");
// choose your chat model
const model = sdk.model("Qwen/Qwen2-Audio-7B-Instruct");
// init the model
await model.create();
// provide the model your chat session
const messages = [
{
role: "user",
content: [
{ type: "text": text: "Describe this audio" },
{ type: "audio": url: "https://dn720307.ca.archive.org/0/items/various-bird-sounds/Various%20Bird%20Sounds.mp3" }
]
}
]
// send to model
const { error, output } = await model.run(messages);
console.log({ error, output });
You can send audio as either an HTTP URL or a base64 data URL.
{
"role": "user",
"content": [
{ "type": "text", "text": "Describe this audio" },
{ "type": "audio", "url": "https://dn720307.ca.archive.org/0/items/various-bird-sounds/Various%20Bird%20Sounds.mp3" }
]
}
import Bytez from "bytez.js";
// insert your key
const sdk = new Bytez("BYTEZ_KEY");
// choose your chat model
const model = sdk.model("llava-hf/LLaVA-NeXT-Video-7B-hf");
// init the model
await model.create();
// provide the model your chat session
const messages = [
{
role: "user",
content: [
{ type: "text": text: "Describe this video" },
{ type: "audio": url: "https://example.com/path-to-video.mp4" }
]
}
]
// send to model
const { error, output } = await model.run(messages);
console.log({ error, output });
import Bytez from "bytez.js";
// insert your key
const sdk = new Bytez("BYTEZ_KEY");
// choose your chat model + insert your provider key
//
// const model = sdk.model("google/gemini-2.0-flash", "YOUR_GEMINI_KEY");
const model = sdk.model("openai/gpt-4o", "YOUR_OPEN_AI_KEY");
// provide the model your chat session
const messages = [
{ role: "system", content: "You are a friendly chatbot" },
{ role: "assistant", content: "Hello, I'm a friendly bot" },
{ role: "user", content: "Hello bot, what is the capital of England?" },
]
// send to model
const { error, output } = await model.run(messages);
console.log({ error, output });