|
zai-org/GLM-5
GLM-5
|
|
- |
- |
- |
- |
- |
- |
- |
|
zai-org/GLM-5
GLM-5
|
|
$1.00 |
$3.20 |
202,752 |
0.63 |
34 |
Yes |
Yes |
|
zai-org/GLM-5
GLM-5
|
|
- |
- |
- |
- |
- |
- |
- |
|
moonshotai/Kimi-K2.5
Kimi-K2.5
|
|
- |
- |
- |
- |
- |
- |
- |
|
moonshotai/Kimi-K2.5
Kimi-K2.5
|
|
$0.50 |
$2.80 |
262,144 |
0.50 |
80 |
Yes |
Yes |
|
moonshotai/Kimi-K2.5
Kimi-K2.5
|
|
- |
- |
- |
- |
- |
- |
- |
|
zai-org/GLM-4.7-Flash
GLM-4.7-Flash
|
|
- |
- |
- |
- |
- |
- |
- |
|
zai-org/GLM-4.7-Flash
GLM-4.7-Flash
|
|
- |
- |
- |
2.65 |
60 |
Yes |
No |
|
meta-llama/Llama-3.1-8B-Instruct
Llama-3.1-8B-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
meta-llama/Llama-3.1-8B-Instruct
Llama-3.1-8B-Instruct
|
|
$0.10 |
$0.10 |
- |
0.25 |
1,305 |
No |
No |
|
meta-llama/Llama-3.1-8B-Instruct
Llama-3.1-8B-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
meta-llama/Llama-3.1-8B-Instruct
Llama-3.1-8B-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
meta-llama/Llama-3.1-8B-Instruct
Llama-3.1-8B-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
meta-llama/Llama-3.1-8B-Instruct
Llama-3.1-8B-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
meta-llama/Llama-3.1-8B-Instruct
Llama-3.1-8B-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
openai/gpt-oss-20b
gpt-oss-20b
|
|
- |
- |
- |
- |
- |
- |
- |
|
openai/gpt-oss-20b
gpt-oss-20b
|
|
- |
- |
- |
- |
- |
- |
- |
|
openai/gpt-oss-20b
gpt-oss-20b
|
|
- |
- |
- |
- |
- |
- |
- |
|
openai/gpt-oss-20b
gpt-oss-20b
|
|
- |
- |
- |
- |
- |
- |
- |
|
openai/gpt-oss-20b
gpt-oss-20b
|
|
$0.05 |
$0.20 |
131,072 |
0.22 |
107 |
Yes |
Yes |
|
openai/gpt-oss-20b
gpt-oss-20b
|
|
- |
- |
- |
- |
- |
- |
- |
|
openai/gpt-oss-20b
gpt-oss-20b
|
|
- |
- |
- |
- |
- |
- |
- |
|
MiniMaxAI/MiniMax-M2.1
MiniMax-M2.1
|
|
$0.30 |
$1.20 |
204,800 |
0.97 |
37 |
Yes |
No |
|
openai/gpt-oss-120b
gpt-oss-120b
|
|
- |
- |
- |
- |
- |
- |
- |
|
openai/gpt-oss-120b
gpt-oss-120b
|
|
- |
- |
- |
- |
- |
- |
- |
|
openai/gpt-oss-120b
gpt-oss-120b
|
|
$0.25 |
$0.69 |
- |
0.23 |
1,161 |
Yes |
No |
|
openai/gpt-oss-120b
gpt-oss-120b
|
|
- |
- |
- |
- |
- |
- |
- |
|
openai/gpt-oss-120b
gpt-oss-120b
|
|
- |
- |
- |
- |
- |
- |
- |
|
openai/gpt-oss-120b
gpt-oss-120b
|
|
- |
- |
- |
- |
- |
- |
- |
|
openai/gpt-oss-120b
gpt-oss-120b
|
|
- |
- |
- |
- |
- |
- |
- |
|
openai/gpt-oss-120b
gpt-oss-120b
|
|
- |
- |
- |
- |
- |
- |
- |
|
openai/gpt-oss-120b
gpt-oss-120b
|
|
- |
- |
- |
- |
- |
- |
- |
|
openai/gpt-oss-120b
gpt-oss-120b
|
|
- |
- |
- |
- |
- |
- |
- |
|
zai-org/GLM-4.7
GLM-4.7
|
|
- |
- |
- |
- |
- |
- |
- |
|
zai-org/GLM-4.7
GLM-4.7
|
|
- |
- |
- |
- |
- |
- |
- |
|
zai-org/GLM-4.7
GLM-4.7
|
|
- |
- |
- |
1.64 |
89 |
Yes |
No |
|
Qwen/Qwen3-32B
Qwen3-32B
|
|
- |
- |
- |
- |
- |
- |
- |
|
Qwen/Qwen3-32B
Qwen3-32B
|
|
- |
- |
- |
- |
- |
- |
- |
|
Qwen/Qwen3-32B
Qwen3-32B
|
|
$0.40 |
$0.80 |
- |
0.20 |
1,121 |
Yes |
No |
|
Qwen/Qwen3-32B
Qwen3-32B
|
|
- |
- |
- |
- |
- |
- |
- |
|
Qwen/Qwen3-32B
Qwen3-32B
|
|
- |
- |
- |
- |
- |
- |
- |
|
Qwen/Qwen3-32B
Qwen3-32B
|
|
- |
- |
- |
- |
- |
- |
- |
|
zai-org/GLM-4.6V-Flash
GLM-4.6V-Flash
|
|
$0.30 |
$0.90 |
131,072 |
0.85 |
20 |
Yes |
No |
|
zai-org/GLM-4.6V-Flash
GLM-4.6V-Flash
|
|
- |
- |
- |
- |
- |
- |
- |
|
meta-llama/Llama-3.3-70B-Instruct
Llama-3.3-70B-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
meta-llama/Llama-3.3-70B-Instruct
Llama-3.3-70B-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
meta-llama/Llama-3.3-70B-Instruct
Llama-3.3-70B-Instruct
|
|
$0.85 |
$1.20 |
- |
0.23 |
1,009 |
Yes |
No |
|
meta-llama/Llama-3.3-70B-Instruct
Llama-3.3-70B-Instruct
|
|
$0.60 |
$1.20 |
131,072 |
0.48 |
281 |
Yes |
Yes |
|
meta-llama/Llama-3.3-70B-Instruct
Llama-3.3-70B-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
meta-llama/Llama-3.3-70B-Instruct
Llama-3.3-70B-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
meta-llama/Llama-3.3-70B-Instruct
Llama-3.3-70B-Instruct
|
|
$0.88 |
$0.88 |
131,072 |
0.34 |
133 |
Yes |
Yes |
|
meta-llama/Llama-3.3-70B-Instruct
Llama-3.3-70B-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
meta-llama/Llama-3.3-70B-Instruct
Llama-3.3-70B-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
meta-llama/Llama-3.3-70B-Instruct
Llama-3.3-70B-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
XiaomiMiMo/MiMo-V2-Flash
MiMo-V2-Flash
|
|
$0.10 |
$0.30 |
262,144 |
1.00 |
57 |
No |
No |
|
HuggingFaceTB/SmolLM3-3B
SmolLM3-3B
|
|
- |
- |
- |
0.11 |
91 |
Yes |
Yes |
|
Qwen/Qwen2.5-Coder-32B-Instruct
Qwen2.5-Coder-32B-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
Qwen/Qwen2.5-Coder-32B-Instruct
Qwen2.5-Coder-32B-Instruct
|
|
$0.20 |
$0.20 |
32,768 |
0.42 |
41 |
No |
No |
|
Qwen/Qwen2.5-Coder-32B-Instruct
Qwen2.5-Coder-32B-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
Qwen/Qwen2.5-Coder-32B-Instruct
Qwen2.5-Coder-32B-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
allenai/Olmo-3.1-32B-Think
Olmo-3.1-32B-Think
|
|
$0.15 |
$0.50 |
- |
2.39 |
76 |
No |
No |
|
zai-org/GLM-4.6V
GLM-4.6V
|
|
- |
- |
- |
- |
- |
- |
- |
|
zai-org/GLM-4.6V
GLM-4.6V
|
|
- |
- |
- |
3.47 |
44 |
Yes |
No |
|
katanemo/Arch-Router-1.5B
Arch-Router-1.5B
|
|
- |
- |
- |
0.19 |
69 |
No |
Yes |
|
Qwen/Qwen3-235B-A22B-Instruct-2507
Qwen3-235B-A22B-Instruct-2507
|
|
- |
- |
- |
- |
- |
- |
- |
|
Qwen/Qwen3-235B-A22B-Instruct-2507
Qwen3-235B-A22B-Instruct-2507
|
|
- |
- |
- |
- |
- |
- |
- |
|
Qwen/Qwen3-235B-A22B-Instruct-2507
Qwen3-235B-A22B-Instruct-2507
|
|
- |
- |
- |
- |
- |
- |
- |
|
Qwen/Qwen3-235B-A22B-Instruct-2507
Qwen3-235B-A22B-Instruct-2507
|
|
- |
- |
- |
- |
- |
- |
- |
|
Qwen/Qwen3-235B-A22B-Instruct-2507
Qwen3-235B-A22B-Instruct-2507
|
|
$0.20 |
$0.60 |
262,144 |
0.51 |
44 |
Yes |
Yes |
|
Qwen/Qwen3-235B-A22B-Instruct-2507
Qwen3-235B-A22B-Instruct-2507
|
|
- |
- |
- |
- |
- |
- |
- |
|
Qwen/Qwen3-235B-A22B-Instruct-2507
Qwen3-235B-A22B-Instruct-2507
|
|
- |
- |
- |
- |
- |
- |
- |
|
meta-llama/Llama-4-Maverick-17B-128E-Instruct
Llama-4-Maverick-17B-128E-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
meta-llama/Llama-4-Maverick-17B-128E-Instruct
Llama-4-Maverick-17B-128E-Instruct
|
|
$0.63 |
$1.80 |
131,072 |
2.86 |
240 |
Yes |
Yes |
|
meta-llama/Llama-4-Maverick-17B-128E-Instruct
Llama-4-Maverick-17B-128E-Instruct
|
|
- |
- |
- |
- |
- |
- |
- |
|
allenai/Molmo2-8B
Molmo2-8B
|
|
- |
- |
- |
2.19 |
127 |
No |
Yes |
|
allenai/Olmo-3.1-32B-Instruct
Olmo-3.1-32B-Instruct
|
|
$0.20 |
$0.60 |
- |
2.03 |
50 |
Yes |
Yes |
|
zai-org/GLM-4.7-FP8
GLM-4.7-FP8
|
|
- |
- |
- |
- |
- |
- |
- |
|
zai-org/GLM-4.7-FP8
GLM-4.7-FP8
|
|
- |
- |
- |
2.34 |
88 |
Yes |
No |
|
Qwen/Qwen3-235B-A22B
Qwen3-235B-A22B
|
|
$0.20 |
$0.80 |
40,960 |
0.39 |
16 |
No |
No |
|
Qwen/Qwen3-235B-A22B
Qwen3-235B-A22B
|
|
- |
- |
- |
- |
- |
- |
- |
|
Qwen/Qwen3-235B-A22B
Qwen3-235B-A22B
|
|
- |
- |
- |
- |
- |
- |
- |
|
Qwen/Qwen3-235B-A22B
Qwen3-235B-A22B
|
|
- |
- |
- |
- |
- |
- |
- |
|
zai-org/GLM-4.5
GLM-4.5
|
|
- |
- |
- |
- |
- |
- |
- |
|
zai-org/GLM-4.5
GLM-4.5
|
|
- |
- |
- |
- |
- |
- |
- |
|
zai-org/GLM-4.5
GLM-4.5
|
|
- |
- |
- |
2.10 |
56 |
Yes |
No |
|
swiss-ai/Apertus-70B-Instruct-2509
Apertus-70B-Instruct-2509
|
|
$0.82 |
$2.92 |
- |
1.96 |
53 |
No |
Yes |
|
CohereLabs/aya-expanse-32b
aya-expanse-32b
|
|
- |
- |
- |
0.18 |
42 |
No |
No |
|
CohereLabs/aya-vision-32b
aya-vision-32b
|
|
- |
- |
- |
0.66 |
36 |
No |
No |
|
swiss-ai/Apertus-8B-Instruct-2509
Apertus-8B-Instruct-2509
|
|
$0.10 |
$0.20 |
- |
1.79 |
95 |
No |
No |
|
zai-org/GLM-4.5V-FP8
GLM-4.5V-FP8
|
|
- |
- |
- |
2.03 |
62 |
Yes |
No |
|
CohereLabs/c4ai-command-r-08-2024
c4ai-command-r-08-2024
|
|
- |
- |
- |
0.29 |
33 |
Yes |
No |
|
deepseek-ai/DeepSeek-R1-0528
DeepSeek-R1-0528
|
|
- |
- |
- |
- |
- |
- |
- |
|
deepseek-ai/DeepSeek-R1-0528
DeepSeek-R1-0528
|
|
$5.00 |
$7.00 |
131,072 |
0.67 |
194 |
Yes |
Yes |
|
deepseek-ai/DeepSeek-R1-0528
DeepSeek-R1-0528
|
|
- |
- |
- |
- |
- |
- |
- |
|
deepseek-ai/DeepSeek-R1-0528
DeepSeek-R1-0528
|
|
- |
- |
- |
- |
- |
- |
- |
|
deepseek-ai/DeepSeek-R1-0528
DeepSeek-R1-0528
|
|
- |
- |
- |
- |
- |
- |
- |
|
CohereLabs/c4ai-command-r7b-12-2024
c4ai-command-r7b-12-2024
|
|
- |
- |
- |
0.19 |
130 |
Yes |
No |
|
CohereLabs/command-a-translate-08-2025
command-a-translate-08-2025
|
|
- |
- |
- |
0.20 |
65 |
Yes |
No |
|
CohereLabs/command-a-reasoning-08-2025
command-a-reasoning-08-2025
|
|
- |
- |
- |
0.16 |
82 |
Yes |
No |
|
CohereLabs/c4ai-command-a-03-2025
c4ai-command-a-03-2025
|
|
- |
- |
- |
0.21 |
38 |
Yes |
No |
|
CohereLabs/c4ai-command-r7b-arabic-02-2025
c4ai-command-r7b-arabic-02-2025
|
|
- |
- |
- |
0.17 |
139 |
Yes |
No |