In case you want to use a specific agent from the deprecated agents api for batch inference, you can specify the agent ID here.













Batch Endpoints
Batch API












Examples
Real world code examples
Get Batch Jobs
GET /v1/batch/jobs
Get a list of batch jobs for your organization and user.
200
OK
Playground
Test the endpoints live
import { Mistral } from "@mistralai/mistralai";
const mistral = new Mistral({
apiKey: "MISTRAL_API_KEY",
});
async function run() {
const result = await mistral.batch.jobs.list({});
console.log(result);
}
run();
import { Mistral } from "@mistralai/mistralai";
const mistral = new Mistral({
apiKey: "MISTRAL_API_KEY",
});
async function run() {
const result = await mistral.batch.jobs.list({});
console.log(result);
}
run();
from mistralai import Mistral
import os
with Mistral(
api_key=os.getenv("MISTRAL_API_KEY", ""),
) as mistral:
res = mistral.batch.jobs.list(page=0, page_size=100, created_by_me=False)
# Handle response
print(res)
from mistralai import Mistral
import os
with Mistral(
api_key=os.getenv("MISTRAL_API_KEY", ""),
) as mistral:
res = mistral.batch.jobs.list(page=0, page_size=100, created_by_me=False)
# Handle response
print(res)
curl https://api.mistral.ai/v1/batch/jobs \
-X GET \
-H 'Authorization: Bearer YOUR_APIKEY_HERE'
curl https://api.mistral.ai/v1/batch/jobs \
-X GET \
-H 'Authorization: Bearer YOUR_APIKEY_HERE'
200
{
"total": 87
}
{
"total": 87
}
Create Batch Job
POST /v1/batch/jobs
Create a new batch job, it will be queued for processing.
agent_id
endpoint
input_files
The list of input files to be used for batch inference, these files should be jsonl
files, containing the input data corresponding to the bory request for the batch inference in a "body" field. An example of such file is the following: json \{"custom_id": "0", "body": \{"max_tokens": 100, "messages": [\{"role": "user", "content": "What is the best French cheese?"\}]\}\} \{"custom_id": "1", "body": \{"max_tokens": 100, "messages": [\{"role": "user", "content": "What is the best French wine?"\}]\}\}
metadata
The metadata of your choice to be associated with the batch inference job.
model
The model to be used for batch inference.
timeout_hours
Default Value: 24
The timeout in hours for the batch inference job.
200
OK
agent_id
completed_at
completed_requests
created_at
endpoint
error_file
failed_requests
id
input_files
metadata
model
object
Default Value: "batch"
output_file
started_at
status
succeeded_requests
total_requests
Playground
Test the endpoints live
import { Mistral } from "@mistralai/mistralai";
const mistral = new Mistral({
apiKey: "MISTRAL_API_KEY",
});
async function run() {
const result = await mistral.batch.jobs.create({
inputFiles: [
"fe3343a2-3b8d-404b-ba32-a78dede2614a",
],
endpoint: "/v1/classifications",
});
console.log(result);
}
run();
import { Mistral } from "@mistralai/mistralai";
const mistral = new Mistral({
apiKey: "MISTRAL_API_KEY",
});
async function run() {
const result = await mistral.batch.jobs.create({
inputFiles: [
"fe3343a2-3b8d-404b-ba32-a78dede2614a",
],
endpoint: "/v1/classifications",
});
console.log(result);
}
run();
from mistralai import Mistral
import os
with Mistral(
api_key=os.getenv("MISTRAL_API_KEY", ""),
) as mistral:
res = mistral.batch.jobs.create(input_files=[
"fe3343a2-3b8d-404b-ba32-a78dede2614a",
], endpoint="/v1/moderations", timeout_hours=24)
# Handle response
print(res)
from mistralai import Mistral
import os
with Mistral(
api_key=os.getenv("MISTRAL_API_KEY", ""),
) as mistral:
res = mistral.batch.jobs.create(input_files=[
"fe3343a2-3b8d-404b-ba32-a78dede2614a",
], endpoint="/v1/moderations", timeout_hours=24)
# Handle response
print(res)
curl https://api.mistral.ai/v1/batch/jobs \
-X POST \
-H 'Authorization: Bearer YOUR_APIKEY_HERE' \
-d '{
"endpoint": "/v1/chat/completions",
"input_files": [
"ipsum eiusmod"
]
}'
curl https://api.mistral.ai/v1/batch/jobs \
-X POST \
-H 'Authorization: Bearer YOUR_APIKEY_HERE' \
-d '{
"endpoint": "/v1/chat/completions",
"input_files": [
"ipsum eiusmod"
]
}'
200
{
"completed_requests": 87,
"created_at": 14,
"endpoint": "ipsum eiusmod",
"errors": [
{
"message": "consequat do"
}
],
"failed_requests": 56,
"id": "reprehenderit ut dolore",
"input_files": [
"occaecat dolor sit"
],
"status": "QUEUED",
"succeeded_requests": 91,
"total_requests": 32
}
{
"completed_requests": 87,
"created_at": 14,
"endpoint": "ipsum eiusmod",
"errors": [
{
"message": "consequat do"
}
],
"failed_requests": 56,
"id": "reprehenderit ut dolore",
"input_files": [
"occaecat dolor sit"
],
"status": "QUEUED",
"succeeded_requests": 91,
"total_requests": 32
}
Get Batch Job
GET /v1/batch/jobs/{job_id}
Get a batch job details by its UUID.
job_id
200
OK
agent_id
completed_at
completed_requests
created_at
endpoint
error_file
failed_requests
id
input_files
metadata
model
object
Default Value: "batch"
output_file
started_at
status
succeeded_requests
total_requests
Playground
Test the endpoints live
import { Mistral } from "@mistralai/mistralai";
const mistral = new Mistral({
apiKey: "MISTRAL_API_KEY",
});
async function run() {
const result = await mistral.batch.jobs.get({
jobId: "4017dc9f-b629-42f4-9700-8c681b9e7f0f",
});
console.log(result);
}
run();
import { Mistral } from "@mistralai/mistralai";
const mistral = new Mistral({
apiKey: "MISTRAL_API_KEY",
});
async function run() {
const result = await mistral.batch.jobs.get({
jobId: "4017dc9f-b629-42f4-9700-8c681b9e7f0f",
});
console.log(result);
}
run();
from mistralai import Mistral
import os
with Mistral(
api_key=os.getenv("MISTRAL_API_KEY", ""),
) as mistral:
res = mistral.batch.jobs.get(job_id="4017dc9f-b629-42f4-9700-8c681b9e7f0f")
# Handle response
print(res)
from mistralai import Mistral
import os
with Mistral(
api_key=os.getenv("MISTRAL_API_KEY", ""),
) as mistral:
res = mistral.batch.jobs.get(job_id="4017dc9f-b629-42f4-9700-8c681b9e7f0f")
# Handle response
print(res)
curl https://api.mistral.ai/v1/batch/jobs/{job_id} \
-X GET \
-H 'Authorization: Bearer YOUR_APIKEY_HERE'
curl https://api.mistral.ai/v1/batch/jobs/{job_id} \
-X GET \
-H 'Authorization: Bearer YOUR_APIKEY_HERE'
200
{
"completed_requests": 87,
"created_at": 14,
"endpoint": "ipsum eiusmod",
"errors": [
{
"message": "consequat do"
}
],
"failed_requests": 56,
"id": "reprehenderit ut dolore",
"input_files": [
"occaecat dolor sit"
],
"status": "QUEUED",
"succeeded_requests": 91,
"total_requests": 32
}
{
"completed_requests": 87,
"created_at": 14,
"endpoint": "ipsum eiusmod",
"errors": [
{
"message": "consequat do"
}
],
"failed_requests": 56,
"id": "reprehenderit ut dolore",
"input_files": [
"occaecat dolor sit"
],
"status": "QUEUED",
"succeeded_requests": 91,
"total_requests": 32
}
Cancel Batch Job
POST /v1/batch/jobs/{job_id}/cancel
Request the cancellation of a batch job.
job_id
200
OK
agent_id
completed_at
completed_requests
created_at
endpoint
error_file
failed_requests
id
input_files
metadata
model
object
Default Value: "batch"
output_file
started_at
status
succeeded_requests
total_requests
Playground
Test the endpoints live
import { Mistral } from "@mistralai/mistralai";
const mistral = new Mistral({
apiKey: "MISTRAL_API_KEY",
});
async function run() {
const result = await mistral.batch.jobs.cancel({
jobId: "4fb29d1c-535b-4f0a-a1cb-2167f86da569",
});
console.log(result);
}
run();
import { Mistral } from "@mistralai/mistralai";
const mistral = new Mistral({
apiKey: "MISTRAL_API_KEY",
});
async function run() {
const result = await mistral.batch.jobs.cancel({
jobId: "4fb29d1c-535b-4f0a-a1cb-2167f86da569",
});
console.log(result);
}
run();
from mistralai import Mistral
import os
with Mistral(
api_key=os.getenv("MISTRAL_API_KEY", ""),
) as mistral:
res = mistral.batch.jobs.cancel(job_id="4fb29d1c-535b-4f0a-a1cb-2167f86da569")
# Handle response
print(res)
from mistralai import Mistral
import os
with Mistral(
api_key=os.getenv("MISTRAL_API_KEY", ""),
) as mistral:
res = mistral.batch.jobs.cancel(job_id="4fb29d1c-535b-4f0a-a1cb-2167f86da569")
# Handle response
print(res)
curl https://api.mistral.ai/v1/batch/jobs/{job_id}/cancel \
-X POST \
-H 'Authorization: Bearer YOUR_APIKEY_HERE'
curl https://api.mistral.ai/v1/batch/jobs/{job_id}/cancel \
-X POST \
-H 'Authorization: Bearer YOUR_APIKEY_HERE'
200
{
"completed_requests": 87,
"created_at": 14,
"endpoint": "ipsum eiusmod",
"errors": [
{
"message": "consequat do"
}
],
"failed_requests": 56,
"id": "reprehenderit ut dolore",
"input_files": [
"occaecat dolor sit"
],
"status": "QUEUED",
"succeeded_requests": 91,
"total_requests": 32
}
{
"completed_requests": 87,
"created_at": 14,
"endpoint": "ipsum eiusmod",
"errors": [
{
"message": "consequat do"
}
],
"failed_requests": 56,
"id": "reprehenderit ut dolore",
"input_files": [
"occaecat dolor sit"
],
"status": "QUEUED",
"succeeded_requests": 91,
"total_requests": 32
}