victor HF Staff Claude commited on
Commit
38e4cc9
·
1 Parent(s): a79b092

Add Pelican Bicycle SVG Benchmark

Browse files

A comprehensive benchmark testing AI models' ability to generate SVG art of a pelican riding a bicycle.

Features:
- Tests 20 models from Hugging Face router API
- Each model tested with 3 temperature settings (0, 0.5, 1.0)
- Clean, minimalist UI showing temperature variations side by side
- Responsive SVG display
- Only shows models with successful generations
- 15 out of 20 models successfully generated at least one SVG

Created by Simon Willison
Powered by https://router.huggingface.co/v1/models

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <[email protected]>

Files changed (6) hide show
  1. benchmark_models.py +204 -0
  2. benchmark_results.json +0 -0
  3. benchmark_stats.json +30 -0
  4. fetch_models.py +14 -0
  5. index.html +211 -19
  6. models.json +1920 -0
benchmark_models.py ADDED
@@ -0,0 +1,204 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import json
3
+ import time
4
+ import re
5
+ import requests
6
+ from concurrent.futures import ThreadPoolExecutor, as_completed
7
+ from datetime import datetime
8
+
9
+ # Get HF token
10
+ HF_TOKEN = os.environ.get("HF_TOKEN", "")
11
+
12
+ # Load models
13
+ with open("models.json", "r") as f:
14
+ models_data = json.load(f)
15
+
16
+ # Extract model IDs
17
+ model_ids = [model["id"] for model in models_data["data"]]
18
+
19
+ # Limit to first 20 models
20
+ model_ids = model_ids[:20]
21
+
22
+
23
+ def extract_svg(text):
24
+ """Extract SVG content from model response"""
25
+ # First, check for code blocks with different markers
26
+ code_block_patterns = [
27
+ r"```svg\s*(.*?)\s*```",
28
+ r"```xml\s*(.*?)\s*```",
29
+ r"```html\s*(.*?)\s*```",
30
+ r"```\s*(.*?)\s*```",
31
+ ]
32
+
33
+ for pattern in code_block_patterns:
34
+ match = re.search(pattern, text, re.DOTALL | re.IGNORECASE)
35
+ if match:
36
+ content = match.group(1)
37
+ # Extract SVG from the code block content
38
+ if "<svg" in content:
39
+ svg_match = re.search(
40
+ r"<svg[^>]*>.*?</svg>", content, re.DOTALL | re.IGNORECASE
41
+ )
42
+ if svg_match:
43
+ return svg_match.group(0)
44
+
45
+ # If no code blocks, look for SVG directly in the text
46
+ # Handle cases where SVG might be in thinking tags or other wrappers
47
+ svg_pattern = r"<svg[^>]*>.*?</svg>"
48
+ svg_match = re.search(svg_pattern, text, re.DOTALL | re.IGNORECASE)
49
+ if svg_match:
50
+ return svg_match.group(0)
51
+
52
+ return None
53
+
54
+
55
+ def test_model_with_temperature(model_id, temperature):
56
+ """Test a single model with a specific temperature"""
57
+ print(f"Testing {model_id} with temperature {temperature}...")
58
+
59
+ result = {
60
+ "model_id": model_id,
61
+ "temperature": temperature,
62
+ "timestamp": datetime.now().isoformat(),
63
+ "success": False,
64
+ "response_time": None,
65
+ "svg_content": None,
66
+ "error": None,
67
+ "raw_response": None,
68
+ }
69
+
70
+ prompt = """Create a pelican riding a bicycle using SVG. Return only the SVG code without any explanation or markdown formatting. The SVG should be a complete, valid SVG document starting with <svg> and ending with </svg>."""
71
+
72
+ headers = {
73
+ "Authorization": f"Bearer {HF_TOKEN}",
74
+ "Content-Type": "application/json",
75
+ }
76
+
77
+ data = {
78
+ "model": model_id,
79
+ "messages": [{"role": "user", "content": prompt}],
80
+ "max_tokens": 2000,
81
+ "temperature": temperature,
82
+ }
83
+
84
+ try:
85
+ start_time = time.time()
86
+
87
+ response = requests.post(
88
+ "https://router.huggingface.co/v1/chat/completions",
89
+ headers=headers,
90
+ json=data,
91
+ timeout=60,
92
+ )
93
+
94
+ response_time = time.time() - start_time
95
+ result["response_time"] = response_time
96
+
97
+ if response.status_code == 200:
98
+ response_data = response.json()
99
+ if response_data.get("choices") and response_data["choices"][0].get(
100
+ "message"
101
+ ):
102
+ response_text = response_data["choices"][0]["message"]["content"]
103
+ result["raw_response"] = response_text
104
+
105
+ # Extract SVG
106
+ svg_content = extract_svg(response_text)
107
+ if svg_content:
108
+ result["svg_content"] = svg_content
109
+ result["success"] = True
110
+ else:
111
+ result["error"] = "No valid SVG found in response"
112
+ else:
113
+ result["error"] = "Empty response from model"
114
+ else:
115
+ result["error"] = f"HTTP {response.status_code}: {response.text}"
116
+
117
+ except Exception as e:
118
+ result["error"] = str(e)
119
+ print(f"Error testing {model_id} with temperature {temperature}: {e}")
120
+
121
+ return result
122
+
123
+
124
+ def main():
125
+ temperatures = [0, 0.5, 1.0]
126
+ print(f"Testing {len(model_ids)} models with {len(temperatures)} temperature settings...")
127
+ results = []
128
+
129
+ # Create test tasks for each model and temperature combination
130
+ test_tasks = []
131
+ for model_id in model_ids:
132
+ for temp in temperatures:
133
+ test_tasks.append((model_id, temp))
134
+
135
+ # Use ThreadPoolExecutor for concurrent requests
136
+ with ThreadPoolExecutor(max_workers=10) as executor:
137
+ future_to_task = {
138
+ executor.submit(test_model_with_temperature, task[0], task[1]): task
139
+ for task in test_tasks
140
+ }
141
+
142
+ for future in as_completed(future_to_task):
143
+ task = future_to_task[future]
144
+ model_id, temp = task
145
+ try:
146
+ result = future.result()
147
+ results.append(result)
148
+ print(
149
+ f"Completed {model_id} (temp={temp}): {'Success' if result['success'] else 'Failed'}"
150
+ )
151
+ except Exception as e:
152
+ print(f"Exception for {model_id} (temp={temp}): {e}")
153
+ results.append({
154
+ "model_id": model_id,
155
+ "temperature": temp,
156
+ "success": False,
157
+ "error": str(e)
158
+ })
159
+
160
+ # Save results
161
+ with open("benchmark_results.json", "w") as f:
162
+ json.dump(results, f, indent=2)
163
+
164
+ # Generate statistics
165
+ total_tests = len(results)
166
+ successful_tests = sum(1 for r in results if r.get("success", False))
167
+
168
+ # Group by model to count unique models with at least one success
169
+ models_with_success = {}
170
+ for r in results:
171
+ if r.get("success", False):
172
+ models_with_success[r["model_id"]] = True
173
+
174
+ stats = {
175
+ "total_models": len(model_ids),
176
+ "temperatures_tested": temperatures,
177
+ "total_tests": total_tests,
178
+ "successful_tests": successful_tests,
179
+ "failed_tests": total_tests - successful_tests,
180
+ "models_with_at_least_one_success": len(models_with_success),
181
+ "average_response_time": (
182
+ sum(r.get("response_time", 0) for r in results if r.get("response_time"))
183
+ / len([r for r in results if r.get("response_time")])
184
+ if any(r.get("response_time") for r in results)
185
+ else 0
186
+ ),
187
+ "successful_model_ids": list(models_with_success.keys()),
188
+ }
189
+
190
+ with open("benchmark_stats.json", "w") as f:
191
+ json.dump(stats, f, indent=2)
192
+
193
+ print("\nBenchmark complete!")
194
+ print(f"Total models tested: {stats['total_models']}")
195
+ print(f"Temperature settings: {stats['temperatures_tested']}")
196
+ print(f"Total tests: {stats['total_tests']}")
197
+ print(f"Successful tests: {stats['successful_tests']}")
198
+ print(f"Failed tests: {stats['failed_tests']}")
199
+ print(f"Models with at least one success: {stats['models_with_at_least_one_success']}")
200
+ print(f"Average response time: {stats['average_response_time']:.2f}s")
201
+
202
+
203
+ if __name__ == "__main__":
204
+ main()
benchmark_results.json ADDED
The diff for this file is too large to render. See raw diff
 
benchmark_stats.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "total_models": 20,
3
+ "temperatures_tested": [
4
+ 0,
5
+ 0.5,
6
+ 1.0
7
+ ],
8
+ "total_tests": 60,
9
+ "successful_tests": 35,
10
+ "failed_tests": 25,
11
+ "models_with_at_least_one_success": 15,
12
+ "average_response_time": 10.812262760965448,
13
+ "successful_model_ids": [
14
+ "moonshotai/Kimi-K2-Instruct",
15
+ "Qwen/Qwen3-Coder-480B-A35B-Instruct",
16
+ "Qwen/Qwen3-235B-A22B-Instruct-2507",
17
+ "meta-llama/Llama-3.1-8B-Instruct",
18
+ "zai-org/GLM-4.5",
19
+ "deepseek-ai/DeepSeek-R1",
20
+ "zai-org/GLM-4.1V-9B-Thinking",
21
+ "Qwen/Qwen3-235B-A22B",
22
+ "meta-llama/Llama-3.2-3B-Instruct",
23
+ "meta-llama/Meta-Llama-3-8B-Instruct",
24
+ "Qwen/Qwen3-32B",
25
+ "Qwen/Qwen3-4B",
26
+ "deepseek-ai/DeepSeek-R1-0528-Qwen3-8B",
27
+ "meta-llama/Llama-4-Scout-17B-16E-Instruct",
28
+ "Qwen/Qwen3-30B-A3B"
29
+ ]
30
+ }
fetch_models.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ import json
3
+
4
+ # Fetch available models from Hugging Face router
5
+ response = requests.get("https://router.huggingface.co/v1/models")
6
+ models = response.json()
7
+
8
+ print(f"Found {len(models['data'])} models:")
9
+ for model in models['data']:
10
+ print(f"- {model['id']}")
11
+
12
+ # Save to file for later use
13
+ with open('models.json', 'w') as f:
14
+ json.dump(models, f, indent=2)
index.html CHANGED
@@ -1,19 +1,211 @@
1
- <!doctype html>
2
- <html>
3
- <head>
4
- <meta charset="utf-8" />
5
- <meta name="viewport" content="width=device-width" />
6
- <title>My static Space</title>
7
- <link rel="stylesheet" href="style.css" />
8
- </head>
9
- <body>
10
- <div class="card">
11
- <h1>Welcome to your static Space!</h1>
12
- <p>You can modify this app directly by editing <i>index.html</i> in the Files and versions tab.</p>
13
- <p>
14
- Also don't forget to check the
15
- <a href="https://huggingface.co/docs/hub/spaces" target="_blank">Spaces documentation</a>.
16
- </p>
17
- </div>
18
- </body>
19
- </html>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>Pelican Bicycle SVG Benchmark</title>
7
+ <style>
8
+ * {
9
+ margin: 0;
10
+ padding: 0;
11
+ box-sizing: border-box;
12
+ }
13
+
14
+ body {
15
+ font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;
16
+ background-color: #fff;
17
+ color: #333;
18
+ line-height: 1.6;
19
+ }
20
+
21
+ .container {
22
+ max-width: 1400px;
23
+ margin: 0 auto;
24
+ padding: 20px;
25
+ }
26
+
27
+ h1 {
28
+ text-align: center;
29
+ margin-bottom: 5px;
30
+ color: #000;
31
+ font-size: 1.8em;
32
+ font-weight: 500;
33
+ }
34
+
35
+ .subtitle {
36
+ text-align: center;
37
+ color: #666;
38
+ margin-bottom: 30px;
39
+ font-size: 0.9em;
40
+ }
41
+
42
+ .models-container {
43
+ margin-bottom: 40px;
44
+ }
45
+
46
+ .model-group {
47
+ margin-bottom: 30px;
48
+ background: white;
49
+ border: 1px solid #eee;
50
+ padding: 20px;
51
+ }
52
+
53
+ .model-group-header {
54
+ font-weight: bold;;
55
+ font-size: 1.1em;
56
+ color: #333;
57
+ margin-bottom: 15px;
58
+ padding-bottom: 10px;
59
+ border-bottom: 2px solid #f0f0f0;
60
+ }
61
+
62
+ .temperature-grid {
63
+ display: grid;
64
+ grid-template-columns: repeat(3, 1fr);
65
+ gap: 15px;
66
+ }
67
+
68
+ .temp-card {
69
+ border: 1px solid #eee;
70
+ overflow: hidden;
71
+ }
72
+
73
+ .temp-header {
74
+ padding: 8px;
75
+ background: #fafafa;
76
+ border-bottom: 1px solid #f0f0f0;
77
+ font-size: 0.85em;
78
+ text-align: center;
79
+ color: #666;
80
+ }
81
+
82
+ .svg-preview {
83
+ background: white;
84
+ padding: 20px;
85
+ height: 300px;
86
+ display: flex;
87
+ align-items: center;
88
+ justify-content: center;
89
+ position: relative;
90
+ }
91
+
92
+ .svg-preview svg {
93
+ max-width: 100%;
94
+ max-height: 180px;
95
+ width: auto;
96
+ height: auto;
97
+ }
98
+
99
+ .no-svg {
100
+ color: #95a5a6;
101
+ font-style: italic;
102
+ text-align: center;
103
+ }
104
+
105
+
106
+ .loading {
107
+ text-align: center;
108
+ padding: 50px;
109
+ font-size: 1.2em;
110
+ color: #7f8c8d;
111
+ }
112
+
113
+
114
+ @media (max-width: 768px) {
115
+ .models-grid {
116
+ grid-template-columns: 1fr;
117
+ }
118
+
119
+ h1 {
120
+ font-size: 2em;
121
+ }
122
+
123
+ .stat-value {
124
+ font-size: 2em;
125
+ }
126
+ }
127
+ </style>
128
+ </head>
129
+ <body>
130
+ <div >
131
+ <h1>Pelican Bicycle SVG Benchmark</h1>
132
+ <p class="subtitle">Testing AI models' ability to create SVG art of a pelican riding a bicycle<br>
133
+ Benchmark created by <a href="https://simonwillison.net/" target="_blank" style="color: #666;">Simon Willison</a><br>
134
+ Inference done with <a href="https://router.huggingface.co/v1/models" target="_blank" style="color: #666;">https://router.huggingface.co/v1/models</a></p>
135
+
136
+ <div class="models-container" id="modelsContainer">
137
+ <div class="loading">Loading model results...</div>
138
+ </div>
139
+ </div>
140
+
141
+ <script>
142
+ let allResults = [];
143
+
144
+ async function loadResults() {
145
+ try {
146
+ const response = await fetch('benchmark_results.json');
147
+ allResults = await response.json();
148
+
149
+ displayModelGroups(allResults);
150
+ } catch (error) {
151
+ console.error('Error loading results:', error);
152
+ document.getElementById('modelsContainer').innerHTML =
153
+ '<div style="text-align: center; color: #999;">Error loading results</div>';
154
+ }
155
+ }
156
+
157
+ function displayModelGroups(results) {
158
+ // Group results by model
159
+ const modelGroups = {};
160
+ results.forEach(result => {
161
+ if (!modelGroups[result.model_id]) {
162
+ modelGroups[result.model_id] = {};
163
+ }
164
+ modelGroups[result.model_id][result.temperature] = result;
165
+ });
166
+
167
+ // Create HTML for each model group
168
+ const modelsHtml = Object.entries(modelGroups).map(([modelId, temps]) => {
169
+ // Check if at least one temperature has a successful result
170
+ const hasSuccess = Object.values(temps).some(t => t.success);
171
+ if (!hasSuccess) return ''; // Skip models with no successful results
172
+
173
+ const temperatureCards = [0, 0.5, 1].map(temp => {
174
+ const result = temps[temp];
175
+ if (!result || !result.success) {
176
+ return `
177
+ <div class="temp-card">
178
+ <div class="temp-header">Temperature ${temp}</div>
179
+ <div class="svg-preview">
180
+ <div class="no-svg">Failed</div>
181
+ </div>
182
+ </div>
183
+ `;
184
+ }
185
+ return `
186
+ <div class="temp-card">
187
+ <div class="temp-header">Temperature ${temp}</div>
188
+ <div class="svg-preview">${result.svg_content}</div>
189
+ </div>
190
+ `;
191
+ }).join('');
192
+
193
+ return `
194
+ <div class="model-group">
195
+ <div class="model-group-header">${modelId}</div>
196
+ <div class="temperature-grid">
197
+ ${temperatureCards}
198
+ </div>
199
+ </div>
200
+ `;
201
+ }).filter(html => html !== '').join('');
202
+
203
+ document.getElementById('modelsContainer').innerHTML = modelsHtml ||
204
+ '<div style="text-align: center; color: #999;">No successful results to display</div>';
205
+ }
206
+
207
+ // Load results when page loads
208
+ loadResults();
209
+ </script>
210
+ </body>
211
+ </html>
models.json ADDED
@@ -0,0 +1,1920 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "object": "list",
3
+ "data": [
4
+ {
5
+ "id": "Qwen/Qwen3-Coder-480B-A35B-Instruct",
6
+ "object": "model",
7
+ "created": 1753195958,
8
+ "owned_by": "Qwen",
9
+ "providers": [
10
+ {
11
+ "provider": "novita",
12
+ "status": "live",
13
+ "supports_tools": true,
14
+ "supports_structured_output": true
15
+ },
16
+ {
17
+ "provider": "together",
18
+ "status": "live",
19
+ "supports_tools": false,
20
+ "supports_structured_output": true
21
+ },
22
+ {
23
+ "provider": "hyperbolic",
24
+ "status": "live",
25
+ "supports_tools": true,
26
+ "supports_structured_output": false
27
+ }
28
+ ]
29
+ },
30
+ {
31
+ "id": "Qwen/Qwen3-235B-A22B-Instruct-2507",
32
+ "object": "model",
33
+ "created": 1753080416,
34
+ "owned_by": "Qwen",
35
+ "providers": [
36
+ {
37
+ "provider": "novita",
38
+ "status": "live",
39
+ "supports_tools": true,
40
+ "supports_structured_output": false
41
+ },
42
+ {
43
+ "provider": "fireworks-ai",
44
+ "status": "live",
45
+ "supports_tools": true,
46
+ "supports_structured_output": false
47
+ },
48
+ {
49
+ "provider": "together",
50
+ "status": "live",
51
+ "supports_tools": true,
52
+ "supports_structured_output": true
53
+ },
54
+ {
55
+ "provider": "hyperbolic",
56
+ "status": "live",
57
+ "supports_tools": true,
58
+ "supports_structured_output": false
59
+ }
60
+ ]
61
+ },
62
+ {
63
+ "id": "moonshotai/Kimi-K2-Instruct",
64
+ "object": "model",
65
+ "created": 1752195312,
66
+ "owned_by": "moonshotai",
67
+ "providers": [
68
+ {
69
+ "provider": "novita",
70
+ "status": "live",
71
+ "supports_tools": true,
72
+ "supports_structured_output": false,
73
+ "context_length": 163840,
74
+ "pricing": {
75
+ "input": 0.57,
76
+ "output": 2.3
77
+ }
78
+ },
79
+ {
80
+ "provider": "fireworks-ai",
81
+ "status": "live",
82
+ "supports_tools": true,
83
+ "supports_structured_output": false
84
+ },
85
+ {
86
+ "provider": "together",
87
+ "status": "live",
88
+ "supports_tools": true,
89
+ "supports_structured_output": true,
90
+ "context_length": 131072,
91
+ "pricing": {
92
+ "input": 1,
93
+ "output": 3
94
+ }
95
+ },
96
+ {
97
+ "provider": "groq",
98
+ "status": "live",
99
+ "supports_tools": true,
100
+ "supports_structured_output": false,
101
+ "context_length": 131072,
102
+ "pricing": {
103
+ "input": 1,
104
+ "output": 3
105
+ }
106
+ }
107
+ ]
108
+ },
109
+ {
110
+ "id": "Qwen/Qwen3-235B-A22B-Thinking-2507",
111
+ "object": "model",
112
+ "created": 1753419545,
113
+ "owned_by": "Qwen",
114
+ "providers": [
115
+ {
116
+ "provider": "novita",
117
+ "status": "live",
118
+ "supports_tools": true,
119
+ "supports_structured_output": false
120
+ }
121
+ ]
122
+ },
123
+ {
124
+ "id": "zai-org/GLM-4.5",
125
+ "object": "model",
126
+ "created": 1752981936,
127
+ "owned_by": "zai-org",
128
+ "providers": [
129
+ {
130
+ "provider": "novita",
131
+ "status": "live",
132
+ "supports_tools": true,
133
+ "supports_structured_output": false
134
+ }
135
+ ]
136
+ },
137
+ {
138
+ "id": "Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8",
139
+ "object": "model",
140
+ "created": 1753197829,
141
+ "owned_by": "Qwen",
142
+ "providers": [
143
+ {
144
+ "provider": "together",
145
+ "status": "live",
146
+ "supports_tools": false,
147
+ "supports_structured_output": true
148
+ }
149
+ ]
150
+ },
151
+ {
152
+ "id": "HuggingFaceTB/SmolLM3-3B",
153
+ "object": "model",
154
+ "created": 1751969505,
155
+ "owned_by": "HuggingFaceTB",
156
+ "providers": [
157
+ {
158
+ "provider": "hf-inference",
159
+ "status": "live"
160
+ }
161
+ ]
162
+ },
163
+ {
164
+ "id": "meta-llama/Llama-3.1-8B-Instruct",
165
+ "object": "model",
166
+ "created": 1721292960,
167
+ "owned_by": "meta-llama",
168
+ "providers": [
169
+ {
170
+ "provider": "novita",
171
+ "status": "live",
172
+ "supports_tools": false,
173
+ "supports_structured_output": false
174
+ },
175
+ {
176
+ "provider": "nebius",
177
+ "status": "live",
178
+ "supports_tools": true,
179
+ "supports_structured_output": true
180
+ },
181
+ {
182
+ "provider": "featherless-ai",
183
+ "status": "live"
184
+ },
185
+ {
186
+ "provider": "fireworks-ai",
187
+ "status": "live",
188
+ "supports_tools": false,
189
+ "supports_structured_output": false
190
+ },
191
+ {
192
+ "provider": "hyperbolic",
193
+ "status": "staging",
194
+ "supports_tools": false,
195
+ "supports_structured_output": false
196
+ },
197
+ {
198
+ "provider": "nscale",
199
+ "status": "live",
200
+ "supports_tools": false,
201
+ "supports_structured_output": true
202
+ },
203
+ {
204
+ "provider": "sambanova",
205
+ "status": "live",
206
+ "supports_tools": true,
207
+ "supports_structured_output": true
208
+ }
209
+ ]
210
+ },
211
+ {
212
+ "id": "deepseek-ai/DeepSeek-R1",
213
+ "object": "model",
214
+ "created": 1737344767,
215
+ "owned_by": "deepseek-ai",
216
+ "providers": [
217
+ {
218
+ "provider": "novita",
219
+ "status": "live",
220
+ "supports_tools": true,
221
+ "supports_structured_output": false
222
+ },
223
+ {
224
+ "provider": "nebius",
225
+ "status": "live",
226
+ "supports_tools": false,
227
+ "supports_structured_output": false
228
+ },
229
+ {
230
+ "provider": "fireworks-ai",
231
+ "status": "live",
232
+ "supports_tools": false,
233
+ "supports_structured_output": false
234
+ },
235
+ {
236
+ "provider": "together",
237
+ "status": "live",
238
+ "supports_tools": false,
239
+ "supports_structured_output": true
240
+ },
241
+ {
242
+ "provider": "hyperbolic",
243
+ "status": "live",
244
+ "supports_tools": false,
245
+ "supports_structured_output": false
246
+ },
247
+ {
248
+ "provider": "sambanova",
249
+ "status": "live",
250
+ "supports_tools": false,
251
+ "supports_structured_output": false
252
+ }
253
+ ]
254
+ },
255
+ {
256
+ "id": "zai-org/GLM-4.1V-9B-Thinking",
257
+ "object": "model",
258
+ "created": 1751120650,
259
+ "owned_by": "zai-org",
260
+ "providers": [
261
+ {
262
+ "provider": "novita",
263
+ "status": "live",
264
+ "supports_tools": false,
265
+ "supports_structured_output": false
266
+ }
267
+ ]
268
+ },
269
+ {
270
+ "id": "Qwen/Qwen3-235B-A22B",
271
+ "object": "model",
272
+ "created": 1745726026,
273
+ "owned_by": "Qwen",
274
+ "providers": [
275
+ {
276
+ "provider": "cerebras",
277
+ "status": "live",
278
+ "supports_tools": false,
279
+ "supports_structured_output": false
280
+ },
281
+ {
282
+ "provider": "novita",
283
+ "status": "live",
284
+ "supports_tools": false,
285
+ "supports_structured_output": false
286
+ },
287
+ {
288
+ "provider": "nebius",
289
+ "status": "live",
290
+ "supports_tools": true,
291
+ "supports_structured_output": true
292
+ },
293
+ {
294
+ "provider": "fireworks-ai",
295
+ "status": "live",
296
+ "supports_tools": true,
297
+ "supports_structured_output": false
298
+ },
299
+ {
300
+ "provider": "together",
301
+ "status": "live",
302
+ "supports_tools": true,
303
+ "supports_structured_output": true
304
+ },
305
+ {
306
+ "provider": "nscale",
307
+ "status": "live",
308
+ "supports_tools": false,
309
+ "supports_structured_output": true
310
+ }
311
+ ]
312
+ },
313
+ {
314
+ "id": "deepseek-ai/DeepSeek-R1-0528",
315
+ "object": "model",
316
+ "created": 1748425602,
317
+ "owned_by": "deepseek-ai",
318
+ "providers": [
319
+ {
320
+ "provider": "novita",
321
+ "status": "live",
322
+ "supports_tools": true,
323
+ "supports_structured_output": false
324
+ },
325
+ {
326
+ "provider": "nebius",
327
+ "status": "live",
328
+ "supports_tools": true,
329
+ "supports_structured_output": true
330
+ },
331
+ {
332
+ "provider": "fireworks-ai",
333
+ "status": "live",
334
+ "supports_tools": true,
335
+ "supports_structured_output": false
336
+ },
337
+ {
338
+ "provider": "together",
339
+ "status": "live",
340
+ "supports_tools": false,
341
+ "supports_structured_output": true
342
+ },
343
+ {
344
+ "provider": "hyperbolic",
345
+ "status": "live",
346
+ "supports_tools": false,
347
+ "supports_structured_output": false
348
+ },
349
+ {
350
+ "provider": "sambanova",
351
+ "status": "live",
352
+ "supports_tools": false,
353
+ "supports_structured_output": false
354
+ }
355
+ ]
356
+ },
357
+ {
358
+ "id": "meta-llama/Llama-3.2-3B-Instruct",
359
+ "object": "model",
360
+ "created": 1726672760,
361
+ "owned_by": "meta-llama",
362
+ "providers": [
363
+ {
364
+ "provider": "novita",
365
+ "status": "live",
366
+ "supports_tools": true,
367
+ "supports_structured_output": false
368
+ },
369
+ {
370
+ "provider": "together",
371
+ "status": "live",
372
+ "supports_tools": false,
373
+ "supports_structured_output": true
374
+ },
375
+ {
376
+ "provider": "hyperbolic",
377
+ "status": "live",
378
+ "supports_tools": false,
379
+ "supports_structured_output": false
380
+ }
381
+ ]
382
+ },
383
+ {
384
+ "id": "Qwen/Qwen3-4B",
385
+ "object": "model",
386
+ "created": 1745725289,
387
+ "owned_by": "Qwen",
388
+ "providers": [
389
+ {
390
+ "provider": "nebius",
391
+ "status": "live",
392
+ "supports_tools": true,
393
+ "supports_structured_output": true
394
+ }
395
+ ]
396
+ },
397
+ {
398
+ "id": "meta-llama/Meta-Llama-3-8B-Instruct",
399
+ "object": "model",
400
+ "created": 1713346512,
401
+ "owned_by": "meta-llama",
402
+ "providers": [
403
+ {
404
+ "provider": "novita",
405
+ "status": "live",
406
+ "supports_tools": false,
407
+ "supports_structured_output": false
408
+ },
409
+ {
410
+ "provider": "featherless-ai",
411
+ "status": "live"
412
+ },
413
+ {
414
+ "provider": "groq",
415
+ "status": "live",
416
+ "supports_tools": true,
417
+ "supports_structured_output": false
418
+ }
419
+ ]
420
+ },
421
+ {
422
+ "id": "Qwen/Qwen3-32B",
423
+ "object": "model",
424
+ "created": 1745725979,
425
+ "owned_by": "Qwen",
426
+ "providers": [
427
+ {
428
+ "provider": "cerebras",
429
+ "status": "live",
430
+ "supports_tools": false,
431
+ "supports_structured_output": false
432
+ },
433
+ {
434
+ "provider": "novita",
435
+ "status": "live",
436
+ "supports_tools": false,
437
+ "supports_structured_output": false
438
+ },
439
+ {
440
+ "provider": "nebius",
441
+ "status": "live",
442
+ "supports_tools": true,
443
+ "supports_structured_output": true
444
+ },
445
+ {
446
+ "provider": "featherless-ai",
447
+ "status": "live"
448
+ },
449
+ {
450
+ "provider": "groq",
451
+ "status": "live",
452
+ "supports_tools": true,
453
+ "supports_structured_output": false
454
+ },
455
+ {
456
+ "provider": "nscale",
457
+ "status": "live",
458
+ "supports_tools": false,
459
+ "supports_structured_output": true
460
+ },
461
+ {
462
+ "provider": "sambanova",
463
+ "status": "live",
464
+ "supports_tools": false,
465
+ "supports_structured_output": false
466
+ }
467
+ ]
468
+ },
469
+ {
470
+ "id": "deepseek-ai/DeepSeek-R1-0528-Qwen3-8B",
471
+ "object": "model",
472
+ "created": 1748516867,
473
+ "owned_by": "deepseek-ai",
474
+ "providers": [
475
+ {
476
+ "provider": "novita",
477
+ "status": "live",
478
+ "supports_tools": false,
479
+ "supports_structured_output": false
480
+ },
481
+ {
482
+ "provider": "featherless-ai",
483
+ "status": "live"
484
+ }
485
+ ]
486
+ },
487
+ {
488
+ "id": "Qwen/Qwen2.5-VL-7B-Instruct",
489
+ "object": "model",
490
+ "created": 1737883597,
491
+ "owned_by": "Qwen",
492
+ "providers": [
493
+ {
494
+ "provider": "hyperbolic",
495
+ "status": "live",
496
+ "supports_tools": false,
497
+ "supports_structured_output": false
498
+ }
499
+ ]
500
+ },
501
+ {
502
+ "id": "Qwen/Qwen3-30B-A3B",
503
+ "object": "model",
504
+ "created": 1745725385,
505
+ "owned_by": "Qwen",
506
+ "providers": [
507
+ {
508
+ "provider": "novita",
509
+ "status": "live",
510
+ "supports_tools": false,
511
+ "supports_structured_output": false
512
+ },
513
+ {
514
+ "provider": "nebius",
515
+ "status": "live",
516
+ "supports_tools": true,
517
+ "supports_structured_output": true
518
+ },
519
+ {
520
+ "provider": "fireworks-ai",
521
+ "status": "live",
522
+ "supports_tools": true,
523
+ "supports_structured_output": false
524
+ }
525
+ ]
526
+ },
527
+ {
528
+ "id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
529
+ "object": "model",
530
+ "created": 1743600857,
531
+ "owned_by": "meta-llama",
532
+ "providers": [
533
+ {
534
+ "provider": "cerebras",
535
+ "status": "live",
536
+ "supports_tools": true,
537
+ "supports_structured_output": false
538
+ },
539
+ {
540
+ "provider": "novita",
541
+ "status": "live",
542
+ "supports_tools": false,
543
+ "supports_structured_output": false
544
+ },
545
+ {
546
+ "provider": "fireworks-ai",
547
+ "status": "live",
548
+ "supports_tools": true,
549
+ "supports_structured_output": false
550
+ },
551
+ {
552
+ "provider": "together",
553
+ "status": "live",
554
+ "supports_tools": true,
555
+ "supports_structured_output": true
556
+ },
557
+ {
558
+ "provider": "groq",
559
+ "status": "live",
560
+ "supports_tools": true,
561
+ "supports_structured_output": false
562
+ },
563
+ {
564
+ "provider": "nscale",
565
+ "status": "live",
566
+ "supports_tools": false,
567
+ "supports_structured_output": false
568
+ }
569
+ ]
570
+ },
571
+ {
572
+ "id": "Qwen/Qwen3-8B",
573
+ "object": "model",
574
+ "created": 1745725341,
575
+ "owned_by": "Qwen",
576
+ "providers": [
577
+ {
578
+ "provider": "featherless-ai",
579
+ "status": "live"
580
+ },
581
+ {
582
+ "provider": "nscale",
583
+ "status": "live",
584
+ "supports_tools": false,
585
+ "supports_structured_output": true
586
+ }
587
+ ]
588
+ },
589
+ {
590
+ "id": "google/gemma-3-27b-it",
591
+ "object": "model",
592
+ "created": 1740856219,
593
+ "owned_by": "google",
594
+ "providers": [
595
+ {
596
+ "provider": "nebius",
597
+ "status": "live",
598
+ "supports_tools": false,
599
+ "supports_structured_output": true
600
+ },
601
+ {
602
+ "provider": "featherless-ai",
603
+ "status": "live"
604
+ }
605
+ ]
606
+ },
607
+ {
608
+ "id": "meta-llama/Llama-3.3-70B-Instruct",
609
+ "object": "model",
610
+ "created": 1732637327,
611
+ "owned_by": "meta-llama",
612
+ "providers": [
613
+ {
614
+ "provider": "cerebras",
615
+ "status": "live",
616
+ "supports_tools": true,
617
+ "supports_structured_output": false
618
+ },
619
+ {
620
+ "provider": "novita",
621
+ "status": "live",
622
+ "supports_tools": true,
623
+ "supports_structured_output": false
624
+ },
625
+ {
626
+ "provider": "nebius",
627
+ "status": "live",
628
+ "supports_tools": true,
629
+ "supports_structured_output": true
630
+ },
631
+ {
632
+ "provider": "fireworks-ai",
633
+ "status": "live",
634
+ "supports_tools": false,
635
+ "supports_structured_output": false
636
+ },
637
+ {
638
+ "provider": "together",
639
+ "status": "live",
640
+ "supports_tools": true,
641
+ "supports_structured_output": true
642
+ },
643
+ {
644
+ "provider": "groq",
645
+ "status": "live",
646
+ "supports_tools": true,
647
+ "supports_structured_output": false
648
+ },
649
+ {
650
+ "provider": "hyperbolic",
651
+ "status": "live",
652
+ "supports_tools": true,
653
+ "supports_structured_output": false
654
+ },
655
+ {
656
+ "provider": "nscale",
657
+ "status": "live",
658
+ "supports_tools": false,
659
+ "supports_structured_output": true
660
+ },
661
+ {
662
+ "provider": "sambanova",
663
+ "status": "live",
664
+ "supports_tools": true,
665
+ "supports_structured_output": true
666
+ }
667
+ ]
668
+ },
669
+ {
670
+ "id": "Qwen/Qwen2.5-Coder-32B-Instruct",
671
+ "object": "model",
672
+ "created": 1730879390,
673
+ "owned_by": "Qwen",
674
+ "providers": [
675
+ {
676
+ "provider": "nebius",
677
+ "status": "live",
678
+ "supports_tools": false,
679
+ "supports_structured_output": false
680
+ },
681
+ {
682
+ "provider": "featherless-ai",
683
+ "status": "live"
684
+ },
685
+ {
686
+ "provider": "together",
687
+ "status": "live",
688
+ "supports_tools": true,
689
+ "supports_structured_output": true
690
+ },
691
+ {
692
+ "provider": "hyperbolic",
693
+ "status": "live",
694
+ "supports_tools": false,
695
+ "supports_structured_output": false
696
+ },
697
+ {
698
+ "provider": "nscale",
699
+ "status": "live",
700
+ "supports_tools": false,
701
+ "supports_structured_output": true
702
+ }
703
+ ]
704
+ },
705
+ {
706
+ "id": "Qwen/Qwen3-14B",
707
+ "object": "model",
708
+ "created": 1745725365,
709
+ "owned_by": "Qwen",
710
+ "providers": [
711
+ {
712
+ "provider": "nebius",
713
+ "status": "live",
714
+ "supports_tools": true,
715
+ "supports_structured_output": true
716
+ },
717
+ {
718
+ "provider": "featherless-ai",
719
+ "status": "live"
720
+ }
721
+ ]
722
+ },
723
+ {
724
+ "id": "meta-llama/Llama-3.2-1B-Instruct",
725
+ "object": "model",
726
+ "created": 1726672367,
727
+ "owned_by": "meta-llama",
728
+ "providers": [
729
+ {
730
+ "provider": "novita",
731
+ "status": "live",
732
+ "supports_tools": false,
733
+ "supports_structured_output": false
734
+ }
735
+ ]
736
+ },
737
+ {
738
+ "id": "deepseek-ai/DeepSeek-V3-0324",
739
+ "object": "model",
740
+ "created": 1742808502,
741
+ "owned_by": "deepseek-ai",
742
+ "providers": [
743
+ {
744
+ "provider": "novita",
745
+ "status": "live",
746
+ "supports_tools": true,
747
+ "supports_structured_output": false
748
+ },
749
+ {
750
+ "provider": "nebius",
751
+ "status": "live",
752
+ "supports_tools": true,
753
+ "supports_structured_output": true
754
+ },
755
+ {
756
+ "provider": "fireworks-ai",
757
+ "status": "live",
758
+ "supports_tools": true,
759
+ "supports_structured_output": false
760
+ },
761
+ {
762
+ "provider": "together",
763
+ "status": "live",
764
+ "supports_tools": true,
765
+ "supports_structured_output": true
766
+ },
767
+ {
768
+ "provider": "hyperbolic",
769
+ "status": "live",
770
+ "supports_tools": true,
771
+ "supports_structured_output": false
772
+ },
773
+ {
774
+ "provider": "sambanova",
775
+ "status": "live",
776
+ "supports_tools": true,
777
+ "supports_structured_output": true
778
+ }
779
+ ]
780
+ },
781
+ {
782
+ "id": "Qwen/Qwen2.5-Coder-7B-Instruct",
783
+ "object": "model",
784
+ "created": 1726580329,
785
+ "owned_by": "Qwen",
786
+ "providers": [
787
+ {
788
+ "provider": "featherless-ai",
789
+ "status": "live"
790
+ },
791
+ {
792
+ "provider": "nscale",
793
+ "status": "live",
794
+ "supports_tools": false,
795
+ "supports_structured_output": true
796
+ }
797
+ ]
798
+ },
799
+ {
800
+ "id": "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",
801
+ "object": "model",
802
+ "created": 1737363858,
803
+ "owned_by": "deepseek-ai",
804
+ "providers": [
805
+ {
806
+ "provider": "featherless-ai",
807
+ "status": "live"
808
+ },
809
+ {
810
+ "provider": "nscale",
811
+ "status": "live",
812
+ "supports_tools": false,
813
+ "supports_structured_output": false
814
+ }
815
+ ]
816
+ },
817
+ {
818
+ "id": "meta-llama/Llama-4-Maverick-17B-128E-Instruct",
819
+ "object": "model",
820
+ "created": 1743545840,
821
+ "owned_by": "meta-llama",
822
+ "providers": [
823
+ {
824
+ "provider": "fireworks-ai",
825
+ "status": "live",
826
+ "supports_tools": true,
827
+ "supports_structured_output": false
828
+ },
829
+ {
830
+ "provider": "groq",
831
+ "status": "live",
832
+ "supports_tools": true,
833
+ "supports_structured_output": false
834
+ },
835
+ {
836
+ "provider": "sambanova",
837
+ "status": "live",
838
+ "supports_tools": true,
839
+ "supports_structured_output": true
840
+ }
841
+ ]
842
+ },
843
+ {
844
+ "id": "deepseek-ai/DeepSeek-V3",
845
+ "object": "model",
846
+ "created": 1735131143,
847
+ "owned_by": "deepseek-ai",
848
+ "providers": [
849
+ {
850
+ "provider": "novita",
851
+ "status": "live",
852
+ "supports_tools": true,
853
+ "supports_structured_output": false
854
+ },
855
+ {
856
+ "provider": "nebius",
857
+ "status": "live",
858
+ "supports_tools": false,
859
+ "supports_structured_output": true
860
+ },
861
+ {
862
+ "provider": "fireworks-ai",
863
+ "status": "live",
864
+ "supports_tools": true,
865
+ "supports_structured_output": false
866
+ },
867
+ {
868
+ "provider": "together",
869
+ "status": "live",
870
+ "supports_tools": true,
871
+ "supports_structured_output": false
872
+ }
873
+ ]
874
+ },
875
+ {
876
+ "id": "MiniMaxAI/MiniMax-M1-80k",
877
+ "object": "model",
878
+ "created": 1749802874,
879
+ "owned_by": "MiniMaxAI",
880
+ "providers": [
881
+ {
882
+ "provider": "novita",
883
+ "status": "live",
884
+ "supports_tools": false,
885
+ "supports_structured_output": false
886
+ }
887
+ ]
888
+ },
889
+ {
890
+ "id": "Qwen/Qwen2.5-7B-Instruct",
891
+ "object": "model",
892
+ "created": 1726487740,
893
+ "owned_by": "Qwen",
894
+ "providers": [
895
+ {
896
+ "provider": "featherless-ai",
897
+ "status": "live"
898
+ },
899
+ {
900
+ "provider": "together",
901
+ "status": "live",
902
+ "supports_tools": true,
903
+ "supports_structured_output": false
904
+ }
905
+ ]
906
+ },
907
+ {
908
+ "id": "microsoft/phi-4",
909
+ "object": "model",
910
+ "created": 1733917649,
911
+ "owned_by": "microsoft",
912
+ "providers": [
913
+ {
914
+ "provider": "nebius",
915
+ "status": "live",
916
+ "supports_tools": false,
917
+ "supports_structured_output": true
918
+ }
919
+ ]
920
+ },
921
+ {
922
+ "id": "Qwen/Qwen2.5-72B-Instruct",
923
+ "object": "model",
924
+ "created": 1726487791,
925
+ "owned_by": "Qwen",
926
+ "providers": [
927
+ {
928
+ "provider": "novita",
929
+ "status": "live",
930
+ "supports_tools": true,
931
+ "supports_structured_output": true
932
+ },
933
+ {
934
+ "provider": "nebius",
935
+ "status": "live",
936
+ "supports_tools": true,
937
+ "supports_structured_output": true
938
+ },
939
+ {
940
+ "provider": "featherless-ai",
941
+ "status": "live"
942
+ },
943
+ {
944
+ "provider": "together",
945
+ "status": "live",
946
+ "supports_tools": true,
947
+ "supports_structured_output": true
948
+ },
949
+ {
950
+ "provider": "hyperbolic",
951
+ "status": "live",
952
+ "supports_tools": false,
953
+ "supports_structured_output": false
954
+ }
955
+ ]
956
+ },
957
+ {
958
+ "id": "CohereLabs/c4ai-command-r-plus",
959
+ "object": "model",
960
+ "created": 1712150824,
961
+ "owned_by": "CohereLabs",
962
+ "providers": [
963
+ {
964
+ "provider": "cohere",
965
+ "status": "live",
966
+ "supports_tools": true,
967
+ "supports_structured_output": false
968
+ }
969
+ ]
970
+ },
971
+ {
972
+ "id": "deepseek-ai/DeepSeek-R1-Distill-Qwen-14B",
973
+ "object": "model",
974
+ "created": 1737364707,
975
+ "owned_by": "deepseek-ai",
976
+ "providers": [
977
+ {
978
+ "provider": "novita",
979
+ "status": "live",
980
+ "supports_tools": false,
981
+ "supports_structured_output": true
982
+ },
983
+ {
984
+ "provider": "featherless-ai",
985
+ "status": "live"
986
+ },
987
+ {
988
+ "provider": "nscale",
989
+ "status": "live",
990
+ "supports_tools": false,
991
+ "supports_structured_output": true
992
+ }
993
+ ]
994
+ },
995
+ {
996
+ "id": "Sao10K/L3-8B-Stheno-v3.2",
997
+ "object": "model",
998
+ "created": 1717583457,
999
+ "owned_by": "Sao10K",
1000
+ "providers": [
1001
+ {
1002
+ "provider": "novita",
1003
+ "status": "live",
1004
+ "supports_tools": false,
1005
+ "supports_structured_output": false
1006
+ },
1007
+ {
1008
+ "provider": "featherless-ai",
1009
+ "status": "live"
1010
+ }
1011
+ ]
1012
+ },
1013
+ {
1014
+ "id": "Qwen/QwQ-32B",
1015
+ "object": "model",
1016
+ "created": 1741184219,
1017
+ "owned_by": "Qwen",
1018
+ "providers": [
1019
+ {
1020
+ "provider": "nebius",
1021
+ "status": "live",
1022
+ "supports_tools": false,
1023
+ "supports_structured_output": true
1024
+ },
1025
+ {
1026
+ "provider": "featherless-ai",
1027
+ "status": "live"
1028
+ },
1029
+ {
1030
+ "provider": "hyperbolic",
1031
+ "status": "live",
1032
+ "supports_tools": false,
1033
+ "supports_structured_output": false
1034
+ },
1035
+ {
1036
+ "provider": "nscale",
1037
+ "status": "live",
1038
+ "supports_tools": false,
1039
+ "supports_structured_output": true
1040
+ }
1041
+ ]
1042
+ },
1043
+ {
1044
+ "id": "zai-org/GLM-4-32B-0414",
1045
+ "object": "model",
1046
+ "created": 1744030421,
1047
+ "owned_by": "zai-org",
1048
+ "providers": [
1049
+ {
1050
+ "provider": "novita",
1051
+ "status": "live",
1052
+ "supports_tools": false,
1053
+ "supports_structured_output": true
1054
+ },
1055
+ {
1056
+ "provider": "featherless-ai",
1057
+ "status": "live"
1058
+ }
1059
+ ]
1060
+ },
1061
+ {
1062
+ "id": "Qwen/Qwen2.5-VL-32B-Instruct",
1063
+ "object": "model",
1064
+ "created": 1742570595,
1065
+ "owned_by": "Qwen",
1066
+ "providers": [
1067
+ {
1068
+ "provider": "fireworks-ai",
1069
+ "status": "live",
1070
+ "supports_tools": false,
1071
+ "supports_structured_output": false
1072
+ }
1073
+ ]
1074
+ },
1075
+ {
1076
+ "id": "CohereLabs/aya-expanse-32b",
1077
+ "object": "model",
1078
+ "created": 1729665764,
1079
+ "owned_by": "CohereLabs",
1080
+ "providers": [
1081
+ {
1082
+ "provider": "cohere",
1083
+ "status": "live",
1084
+ "supports_tools": false,
1085
+ "supports_structured_output": false
1086
+ }
1087
+ ]
1088
+ },
1089
+ {
1090
+ "id": "nvidia/Llama-3_3-Nemotron-Super-49B-v1",
1091
+ "object": "model",
1092
+ "created": 1742089188,
1093
+ "owned_by": "nvidia",
1094
+ "providers": [
1095
+ {
1096
+ "provider": "nebius",
1097
+ "status": "live",
1098
+ "supports_tools": false,
1099
+ "supports_structured_output": false
1100
+ }
1101
+ ]
1102
+ },
1103
+ {
1104
+ "id": "baidu/ERNIE-4.5-21B-A3B-PT",
1105
+ "object": "model",
1106
+ "created": 1751091210,
1107
+ "owned_by": "baidu",
1108
+ "providers": [
1109
+ {
1110
+ "provider": "novita",
1111
+ "status": "live",
1112
+ "supports_tools": false,
1113
+ "supports_structured_output": false
1114
+ }
1115
+ ]
1116
+ },
1117
+ {
1118
+ "id": "meta-llama/Llama-3.2-11B-Vision-Instruct",
1119
+ "object": "model",
1120
+ "created": 1726678048,
1121
+ "owned_by": "meta-llama",
1122
+ "providers": [
1123
+ {
1124
+ "provider": "hf-inference",
1125
+ "status": "live",
1126
+ "supports_tools": true,
1127
+ "supports_structured_output": false
1128
+ }
1129
+ ]
1130
+ },
1131
+ {
1132
+ "id": "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
1133
+ "object": "model",
1134
+ "created": 1737364574,
1135
+ "owned_by": "deepseek-ai",
1136
+ "providers": [
1137
+ {
1138
+ "provider": "featherless-ai",
1139
+ "status": "live"
1140
+ },
1141
+ {
1142
+ "provider": "nscale",
1143
+ "status": "live",
1144
+ "supports_tools": false,
1145
+ "supports_structured_output": false
1146
+ }
1147
+ ]
1148
+ },
1149
+ {
1150
+ "id": "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
1151
+ "object": "model",
1152
+ "created": 1737364740,
1153
+ "owned_by": "deepseek-ai",
1154
+ "providers": [
1155
+ {
1156
+ "provider": "novita",
1157
+ "status": "live",
1158
+ "supports_tools": false,
1159
+ "supports_structured_output": true
1160
+ },
1161
+ {
1162
+ "provider": "nscale",
1163
+ "status": "live",
1164
+ "supports_tools": false,
1165
+ "supports_structured_output": true
1166
+ }
1167
+ ]
1168
+ },
1169
+ {
1170
+ "id": "Qwen/Qwen2.5-VL-72B-Instruct",
1171
+ "object": "model",
1172
+ "created": 1737951124,
1173
+ "owned_by": "Qwen",
1174
+ "providers": [
1175
+ {
1176
+ "provider": "nebius",
1177
+ "status": "live",
1178
+ "supports_tools": false,
1179
+ "supports_structured_output": true
1180
+ },
1181
+ {
1182
+ "provider": "hyperbolic",
1183
+ "status": "live",
1184
+ "supports_tools": false,
1185
+ "supports_structured_output": false
1186
+ }
1187
+ ]
1188
+ },
1189
+ {
1190
+ "id": "Qwen/QwQ-32B-Preview",
1191
+ "object": "model",
1192
+ "created": 1732722655,
1193
+ "owned_by": "Qwen",
1194
+ "providers": [
1195
+ {
1196
+ "provider": "featherless-ai",
1197
+ "status": "live"
1198
+ },
1199
+ {
1200
+ "provider": "together",
1201
+ "status": "live",
1202
+ "supports_tools": true,
1203
+ "supports_structured_output": true
1204
+ }
1205
+ ]
1206
+ },
1207
+ {
1208
+ "id": "Sao10K/L3-8B-Lunaris-v1",
1209
+ "object": "model",
1210
+ "created": 1719362412,
1211
+ "owned_by": "Sao10K",
1212
+ "providers": [
1213
+ {
1214
+ "provider": "novita",
1215
+ "status": "live",
1216
+ "supports_tools": false,
1217
+ "supports_structured_output": false
1218
+ },
1219
+ {
1220
+ "provider": "featherless-ai",
1221
+ "status": "live"
1222
+ }
1223
+ ]
1224
+ },
1225
+ {
1226
+ "id": "google/gemma-2-9b-it",
1227
+ "object": "model",
1228
+ "created": 1719216341,
1229
+ "owned_by": "google",
1230
+ "providers": [
1231
+ {
1232
+ "provider": "nebius",
1233
+ "status": "live",
1234
+ "supports_tools": false,
1235
+ "supports_structured_output": true
1236
+ },
1237
+ {
1238
+ "provider": "featherless-ai",
1239
+ "status": "live"
1240
+ },
1241
+ {
1242
+ "provider": "groq",
1243
+ "status": "live",
1244
+ "supports_tools": true,
1245
+ "supports_structured_output": false
1246
+ }
1247
+ ]
1248
+ },
1249
+ {
1250
+ "id": "deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
1251
+ "object": "model",
1252
+ "created": 1737364413,
1253
+ "owned_by": "deepseek-ai",
1254
+ "providers": [
1255
+ {
1256
+ "provider": "novita",
1257
+ "status": "live",
1258
+ "supports_tools": false,
1259
+ "supports_structured_output": false
1260
+ },
1261
+ {
1262
+ "provider": "nebius",
1263
+ "status": "live",
1264
+ "supports_tools": false,
1265
+ "supports_structured_output": true
1266
+ },
1267
+ {
1268
+ "provider": "groq",
1269
+ "status": "live",
1270
+ "supports_tools": true,
1271
+ "supports_structured_output": false
1272
+ },
1273
+ {
1274
+ "provider": "nscale",
1275
+ "status": "live",
1276
+ "supports_tools": false,
1277
+ "supports_structured_output": true
1278
+ },
1279
+ {
1280
+ "provider": "sambanova",
1281
+ "status": "live",
1282
+ "supports_tools": false,
1283
+ "supports_structured_output": false
1284
+ }
1285
+ ]
1286
+ },
1287
+ {
1288
+ "id": "baidu/ERNIE-4.5-VL-28B-A3B-PT",
1289
+ "object": "model",
1290
+ "created": 1751089833,
1291
+ "owned_by": "baidu",
1292
+ "providers": [
1293
+ {
1294
+ "provider": "novita",
1295
+ "status": "live",
1296
+ "supports_tools": false,
1297
+ "supports_structured_output": false
1298
+ }
1299
+ ]
1300
+ },
1301
+ {
1302
+ "id": "meta-llama/Llama-3.1-70B-Instruct",
1303
+ "object": "model",
1304
+ "created": 1721146066,
1305
+ "owned_by": "meta-llama",
1306
+ "providers": [
1307
+ {
1308
+ "provider": "nebius",
1309
+ "status": "live",
1310
+ "supports_tools": true,
1311
+ "supports_structured_output": true
1312
+ },
1313
+ {
1314
+ "provider": "fireworks-ai",
1315
+ "status": "live",
1316
+ "supports_tools": true,
1317
+ "supports_structured_output": false
1318
+ },
1319
+ {
1320
+ "provider": "hyperbolic",
1321
+ "status": "staging",
1322
+ "supports_tools": false,
1323
+ "supports_structured_output": false
1324
+ }
1325
+ ]
1326
+ },
1327
+ {
1328
+ "id": "NousResearch/Hermes-3-Llama-3.1-405B",
1329
+ "object": "model",
1330
+ "created": 1723525073,
1331
+ "owned_by": "NousResearch",
1332
+ "providers": [
1333
+ {
1334
+ "provider": "nebius",
1335
+ "status": "live",
1336
+ "supports_tools": false,
1337
+ "supports_structured_output": true
1338
+ }
1339
+ ]
1340
+ },
1341
+ {
1342
+ "id": "Qwen/Qwen2.5-Coder-3B-Instruct",
1343
+ "object": "model",
1344
+ "created": 1730879378,
1345
+ "owned_by": "Qwen",
1346
+ "providers": [
1347
+ {
1348
+ "provider": "nscale",
1349
+ "status": "live",
1350
+ "supports_tools": false,
1351
+ "supports_structured_output": true
1352
+ }
1353
+ ]
1354
+ },
1355
+ {
1356
+ "id": "alpindale/WizardLM-2-8x22B",
1357
+ "object": "model",
1358
+ "created": 1713235019,
1359
+ "owned_by": "alpindale",
1360
+ "providers": [
1361
+ {
1362
+ "provider": "novita",
1363
+ "status": "live",
1364
+ "supports_tools": false,
1365
+ "supports_structured_output": false
1366
+ },
1367
+ {
1368
+ "provider": "featherless-ai",
1369
+ "status": "live"
1370
+ }
1371
+ ]
1372
+ },
1373
+ {
1374
+ "id": "meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
1375
+ "object": "model",
1376
+ "created": 1743540422,
1377
+ "owned_by": "meta-llama",
1378
+ "providers": [
1379
+ {
1380
+ "provider": "novita",
1381
+ "status": "live",
1382
+ "supports_tools": false,
1383
+ "supports_structured_output": false
1384
+ },
1385
+ {
1386
+ "provider": "together",
1387
+ "status": "live",
1388
+ "supports_tools": true,
1389
+ "supports_structured_output": false
1390
+ }
1391
+ ]
1392
+ },
1393
+ {
1394
+ "id": "google/gemma-2-2b-it",
1395
+ "object": "model",
1396
+ "created": 1721127099,
1397
+ "owned_by": "google",
1398
+ "providers": [
1399
+ {
1400
+ "provider": "nebius",
1401
+ "status": "live",
1402
+ "supports_tools": false,
1403
+ "supports_structured_output": true
1404
+ }
1405
+ ]
1406
+ },
1407
+ {
1408
+ "id": "meta-llama/Meta-Llama-3-70B-Instruct",
1409
+ "object": "model",
1410
+ "created": 1713346494,
1411
+ "owned_by": "meta-llama",
1412
+ "providers": [
1413
+ {
1414
+ "provider": "novita",
1415
+ "status": "live",
1416
+ "supports_tools": false,
1417
+ "supports_structured_output": true
1418
+ },
1419
+ {
1420
+ "provider": "together",
1421
+ "status": "live",
1422
+ "supports_tools": false,
1423
+ "supports_structured_output": true
1424
+ },
1425
+ {
1426
+ "provider": "groq",
1427
+ "status": "live",
1428
+ "supports_tools": true,
1429
+ "supports_structured_output": false
1430
+ },
1431
+ {
1432
+ "provider": "hyperbolic",
1433
+ "status": "live",
1434
+ "supports_tools": false,
1435
+ "supports_structured_output": false
1436
+ }
1437
+ ]
1438
+ },
1439
+ {
1440
+ "id": "Qwen/Qwen2.5-Coder-7B",
1441
+ "object": "model",
1442
+ "created": 1726487844,
1443
+ "owned_by": "Qwen",
1444
+ "providers": [
1445
+ {
1446
+ "provider": "nebius",
1447
+ "status": "live",
1448
+ "supports_tools": false,
1449
+ "supports_structured_output": true
1450
+ },
1451
+ {
1452
+ "provider": "featherless-ai",
1453
+ "status": "live"
1454
+ }
1455
+ ]
1456
+ },
1457
+ {
1458
+ "id": "CohereLabs/aya-vision-8b",
1459
+ "object": "model",
1460
+ "created": 1740938806,
1461
+ "owned_by": "CohereLabs",
1462
+ "providers": [
1463
+ {
1464
+ "provider": "cohere",
1465
+ "status": "live",
1466
+ "supports_tools": true,
1467
+ "supports_structured_output": false
1468
+ }
1469
+ ]
1470
+ },
1471
+ {
1472
+ "id": "Qwen/Qwen2.5-32B-Instruct",
1473
+ "object": "model",
1474
+ "created": 1726546675,
1475
+ "owned_by": "Qwen",
1476
+ "providers": [
1477
+ {
1478
+ "provider": "nebius",
1479
+ "status": "live",
1480
+ "supports_tools": true,
1481
+ "supports_structured_output": true
1482
+ },
1483
+ {
1484
+ "provider": "featherless-ai",
1485
+ "status": "live"
1486
+ }
1487
+ ]
1488
+ },
1489
+ {
1490
+ "id": "CohereLabs/c4ai-command-r-v01",
1491
+ "object": "model",
1492
+ "created": 1710172254,
1493
+ "owned_by": "CohereLabs",
1494
+ "providers": [
1495
+ {
1496
+ "provider": "cohere",
1497
+ "status": "live",
1498
+ "supports_tools": true,
1499
+ "supports_structured_output": false
1500
+ }
1501
+ ]
1502
+ },
1503
+ {
1504
+ "id": "nvidia/Llama-3_1-Nemotron-Ultra-253B-v1",
1505
+ "object": "model",
1506
+ "created": 1744051630,
1507
+ "owned_by": "nvidia",
1508
+ "providers": [
1509
+ {
1510
+ "provider": "nebius",
1511
+ "status": "live",
1512
+ "supports_tools": false,
1513
+ "supports_structured_output": true
1514
+ }
1515
+ ]
1516
+ },
1517
+ {
1518
+ "id": "CohereLabs/c4ai-command-r-08-2024",
1519
+ "object": "model",
1520
+ "created": 1724053211,
1521
+ "owned_by": "CohereLabs",
1522
+ "providers": [
1523
+ {
1524
+ "provider": "cohere",
1525
+ "status": "live",
1526
+ "supports_tools": true,
1527
+ "supports_structured_output": false
1528
+ }
1529
+ ]
1530
+ },
1531
+ {
1532
+ "id": "meta-llama/Llama-Guard-4-12B",
1533
+ "object": "model",
1534
+ "created": 1745407825,
1535
+ "owned_by": "meta-llama",
1536
+ "providers": [
1537
+ {
1538
+ "provider": "groq",
1539
+ "status": "live",
1540
+ "supports_tools": false,
1541
+ "supports_structured_output": false
1542
+ }
1543
+ ]
1544
+ },
1545
+ {
1546
+ "id": "CohereLabs/aya-expanse-8b",
1547
+ "object": "model",
1548
+ "created": 1729665253,
1549
+ "owned_by": "CohereLabs",
1550
+ "providers": [
1551
+ {
1552
+ "provider": "cohere",
1553
+ "status": "live",
1554
+ "supports_tools": false,
1555
+ "supports_structured_output": false
1556
+ }
1557
+ ]
1558
+ },
1559
+ {
1560
+ "id": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
1561
+ "object": "model",
1562
+ "created": 1704953687,
1563
+ "owned_by": "NousResearch",
1564
+ "providers": [
1565
+ {
1566
+ "provider": "together",
1567
+ "status": "live",
1568
+ "supports_tools": false,
1569
+ "supports_structured_output": false
1570
+ }
1571
+ ]
1572
+ },
1573
+ {
1574
+ "id": "CohereLabs/c4ai-command-r7b-arabic-02-2025",
1575
+ "object": "model",
1576
+ "created": 1740643869,
1577
+ "owned_by": "CohereLabs",
1578
+ "providers": [
1579
+ {
1580
+ "provider": "cohere",
1581
+ "status": "live",
1582
+ "supports_tools": true,
1583
+ "supports_structured_output": false
1584
+ }
1585
+ ]
1586
+ },
1587
+ {
1588
+ "id": "nvidia/Llama-3.1-Nemotron-70B-Instruct-HF",
1589
+ "object": "model",
1590
+ "created": 1728700633,
1591
+ "owned_by": "nvidia",
1592
+ "providers": [
1593
+ {
1594
+ "provider": "together",
1595
+ "status": "live",
1596
+ "supports_tools": true,
1597
+ "supports_structured_output": false
1598
+ }
1599
+ ]
1600
+ },
1601
+ {
1602
+ "id": "CohereLabs/c4ai-command-a-03-2025",
1603
+ "object": "model",
1604
+ "created": 1741684205,
1605
+ "owned_by": "CohereLabs",
1606
+ "providers": [
1607
+ {
1608
+ "provider": "cohere",
1609
+ "status": "live",
1610
+ "supports_tools": true,
1611
+ "supports_structured_output": false
1612
+ }
1613
+ ]
1614
+ },
1615
+ {
1616
+ "id": "Qwen/Qwen3-235B-A22B-FP8",
1617
+ "object": "model",
1618
+ "created": 1745850968,
1619
+ "owned_by": "Qwen",
1620
+ "providers": [
1621
+ {
1622
+ "provider": "together",
1623
+ "status": "live",
1624
+ "supports_tools": true,
1625
+ "supports_structured_output": true
1626
+ }
1627
+ ]
1628
+ },
1629
+ {
1630
+ "id": "Qwen/Qwen2-VL-72B-Instruct",
1631
+ "object": "model",
1632
+ "created": 1726547134,
1633
+ "owned_by": "Qwen",
1634
+ "providers": [
1635
+ {
1636
+ "provider": "nebius",
1637
+ "status": "live",
1638
+ "supports_tools": false,
1639
+ "supports_structured_output": true
1640
+ }
1641
+ ]
1642
+ },
1643
+ {
1644
+ "id": "SentientAGI/Dobby-Unhinged-Llama-3.3-70B",
1645
+ "object": "model",
1646
+ "created": 1739304442,
1647
+ "owned_by": "SentientAGI",
1648
+ "providers": [
1649
+ {
1650
+ "provider": "featherless-ai",
1651
+ "status": "live"
1652
+ },
1653
+ {
1654
+ "provider": "fireworks-ai",
1655
+ "status": "live",
1656
+ "supports_tools": false,
1657
+ "supports_structured_output": false
1658
+ }
1659
+ ]
1660
+ },
1661
+ {
1662
+ "id": "CohereLabs/c4ai-command-r7b-12-2024",
1663
+ "object": "model",
1664
+ "created": 1733932505,
1665
+ "owned_by": "CohereLabs",
1666
+ "providers": [
1667
+ {
1668
+ "provider": "cohere",
1669
+ "status": "live",
1670
+ "supports_tools": true,
1671
+ "supports_structured_output": false
1672
+ }
1673
+ ]
1674
+ },
1675
+ {
1676
+ "id": "NousResearch/Hermes-3-Llama-3.1-70B",
1677
+ "object": "model",
1678
+ "created": 1722263277,
1679
+ "owned_by": "NousResearch",
1680
+ "providers": [
1681
+ {
1682
+ "provider": "hyperbolic",
1683
+ "status": "live",
1684
+ "supports_tools": false,
1685
+ "supports_structured_output": false
1686
+ }
1687
+ ]
1688
+ },
1689
+ {
1690
+ "id": "aaditya/Llama3-OpenBioLLM-70B",
1691
+ "object": "model",
1692
+ "created": 1713925272,
1693
+ "owned_by": "aaditya",
1694
+ "providers": [
1695
+ {
1696
+ "provider": "nebius",
1697
+ "status": "live",
1698
+ "supports_tools": false,
1699
+ "supports_structured_output": true
1700
+ }
1701
+ ]
1702
+ },
1703
+ {
1704
+ "id": "CohereLabs/aya-vision-32b",
1705
+ "object": "model",
1706
+ "created": 1740951400,
1707
+ "owned_by": "CohereLabs",
1708
+ "providers": [
1709
+ {
1710
+ "provider": "cohere",
1711
+ "status": "live",
1712
+ "supports_tools": false,
1713
+ "supports_structured_output": false
1714
+ }
1715
+ ]
1716
+ },
1717
+ {
1718
+ "id": "NousResearch/Hermes-2-Pro-Llama-3-8B",
1719
+ "object": "model",
1720
+ "created": 1714436178,
1721
+ "owned_by": "NousResearch",
1722
+ "providers": [
1723
+ {
1724
+ "provider": "novita",
1725
+ "status": "live",
1726
+ "supports_tools": false,
1727
+ "supports_structured_output": false
1728
+ }
1729
+ ]
1730
+ },
1731
+ {
1732
+ "id": "Qwen/Qwen2-72B-Instruct",
1733
+ "object": "model",
1734
+ "created": 1716868129,
1735
+ "owned_by": "Qwen",
1736
+ "providers": [
1737
+ {
1738
+ "provider": "featherless-ai",
1739
+ "status": "live"
1740
+ },
1741
+ {
1742
+ "provider": "together",
1743
+ "status": "live",
1744
+ "supports_tools": false,
1745
+ "supports_structured_output": false
1746
+ }
1747
+ ]
1748
+ },
1749
+ {
1750
+ "id": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B",
1751
+ "object": "model",
1752
+ "created": 1737364182,
1753
+ "owned_by": "deepseek-ai",
1754
+ "providers": [
1755
+ {
1756
+ "provider": "novita",
1757
+ "status": "live",
1758
+ "supports_tools": false,
1759
+ "supports_structured_output": true
1760
+ },
1761
+ {
1762
+ "provider": "featherless-ai",
1763
+ "status": "live"
1764
+ },
1765
+ {
1766
+ "provider": "nscale",
1767
+ "status": "live",
1768
+ "supports_tools": false,
1769
+ "supports_structured_output": true
1770
+ }
1771
+ ]
1772
+ },
1773
+ {
1774
+ "id": "dphn/dolphin-2.9.2-mixtral-8x22b",
1775
+ "object": "model",
1776
+ "created": 1716934597,
1777
+ "owned_by": "dphn",
1778
+ "providers": [
1779
+ {
1780
+ "provider": "novita",
1781
+ "status": "live",
1782
+ "supports_tools": false,
1783
+ "supports_structured_output": false
1784
+ }
1785
+ ]
1786
+ },
1787
+ {
1788
+ "id": "marin-community/marin-8b-instruct",
1789
+ "object": "model",
1790
+ "created": 1747241066,
1791
+ "owned_by": "marin-community",
1792
+ "providers": [
1793
+ {
1794
+ "provider": "featherless-ai",
1795
+ "status": "live"
1796
+ },
1797
+ {
1798
+ "provider": "together",
1799
+ "status": "live",
1800
+ "supports_tools": false,
1801
+ "supports_structured_output": true
1802
+ }
1803
+ ]
1804
+ },
1805
+ {
1806
+ "id": "meta-llama/Llama-3.1-405B-Instruct",
1807
+ "object": "model",
1808
+ "created": 1721154284,
1809
+ "owned_by": "meta-llama",
1810
+ "providers": [
1811
+ {
1812
+ "provider": "nebius",
1813
+ "status": "live",
1814
+ "supports_tools": true,
1815
+ "supports_structured_output": true
1816
+ },
1817
+ {
1818
+ "provider": "fireworks-ai",
1819
+ "status": "live",
1820
+ "supports_tools": true,
1821
+ "supports_structured_output": false
1822
+ },
1823
+ {
1824
+ "provider": "sambanova",
1825
+ "status": "live",
1826
+ "supports_tools": true,
1827
+ "supports_structured_output": true
1828
+ }
1829
+ ]
1830
+ },
1831
+ {
1832
+ "id": "baidu/ERNIE-4.5-VL-424B-A47B-Base-PT",
1833
+ "object": "model",
1834
+ "created": 1751089776,
1835
+ "owned_by": "baidu",
1836
+ "providers": [
1837
+ {
1838
+ "provider": "novita",
1839
+ "status": "live",
1840
+ "supports_tools": false,
1841
+ "supports_structured_output": false
1842
+ }
1843
+ ]
1844
+ },
1845
+ {
1846
+ "id": "Sao10K/L3-70B-Euryale-v2.1",
1847
+ "object": "model",
1848
+ "created": 1718134069,
1849
+ "owned_by": "Sao10K",
1850
+ "providers": [
1851
+ {
1852
+ "provider": "novita",
1853
+ "status": "live",
1854
+ "supports_tools": false,
1855
+ "supports_structured_output": false
1856
+ },
1857
+ {
1858
+ "provider": "featherless-ai",
1859
+ "status": "live"
1860
+ }
1861
+ ]
1862
+ },
1863
+ {
1864
+ "id": "tokyotech-llm/Llama-3.3-Swallow-70B-Instruct-v0.4",
1865
+ "object": "model",
1866
+ "created": 1740976430,
1867
+ "owned_by": "tokyotech-llm",
1868
+ "providers": [
1869
+ {
1870
+ "provider": "sambanova",
1871
+ "status": "live",
1872
+ "supports_tools": false,
1873
+ "supports_structured_output": true
1874
+ }
1875
+ ]
1876
+ },
1877
+ {
1878
+ "id": "deepseek-ai/DeepSeek-Prover-V2-671B",
1879
+ "object": "model",
1880
+ "created": 1745993675,
1881
+ "owned_by": "deepseek-ai",
1882
+ "providers": [
1883
+ {
1884
+ "provider": "novita",
1885
+ "status": "live",
1886
+ "supports_tools": false,
1887
+ "supports_structured_output": false
1888
+ }
1889
+ ]
1890
+ },
1891
+ {
1892
+ "id": "baidu/ERNIE-4.5-300B-A47B-Base-PT",
1893
+ "object": "model",
1894
+ "created": 1751089026,
1895
+ "owned_by": "baidu",
1896
+ "providers": [
1897
+ {
1898
+ "provider": "novita",
1899
+ "status": "live",
1900
+ "supports_tools": false,
1901
+ "supports_structured_output": false
1902
+ }
1903
+ ]
1904
+ },
1905
+ {
1906
+ "id": "baidu/ERNIE-4.5-0.3B-PT",
1907
+ "object": "model",
1908
+ "created": 1751091102,
1909
+ "owned_by": "baidu",
1910
+ "providers": [
1911
+ {
1912
+ "provider": "novita",
1913
+ "status": "live",
1914
+ "supports_tools": false,
1915
+ "supports_structured_output": false
1916
+ }
1917
+ ]
1918
+ }
1919
+ ]
1920
+ }