Spaces:
Running
Running
{"model": "auto", "provider": "OpenaiChat", "prompt_tokens": 8, "completion_tokens": 0, "total_tokens": 8} | |
{"model": "auto", "provider": "OpenaiChat", "prompt_tokens": 13, "completion_tokens": 0, "total_tokens": 13} | |
{"model": "o3-mini", "provider": "OpenaiChat", "prompt_tokens": 13, "completion_tokens": 0, "total_tokens": 13} | |
{"model": "deepseek-v3", "provider": "DeepSeekAPI", "prompt_tokens": 13, "completion_tokens": 0, "total_tokens": 13} | |
{"provider": "Gemini", "prompt_tokens": 13, "completion_tokens": 0, "total_tokens": 13} | |
{"model": "qwen-qwen2-72b-instruct", "provider": "HuggingSpace", "prompt_tokens": 13, "completion_tokens": 0, "total_tokens": 13} | |
{"model": "qwen-2.5-1m-demo", "provider": "HuggingSpace", "prompt_tokens": 13, "completion_tokens": 0, "total_tokens": 13} | |
{"model": "Qwen/QwQ-32B", "provider": "HuggingChat", "prompt_tokens": 13, "completion_tokens": 81, "total_tokens": 94} | |
{"model": "openai", "provider": "PollinationsAI", "completion_tokens": 9, "completion_tokens_details": {"accepted_prediction_tokens": 0, "audio_tokens": 0, "reasoning_tokens": 0, "rejected_prediction_tokens": 0}, "prompt_tokens": 25, "prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0}, "total_tokens": 34} | |
{"model": "Claude-Sonnet-3.5 (Premium)", "provider": "Blackbox", "prompt_tokens": 20, "completion_tokens": 0, "total_tokens": 20} | |
{"model": "Claude-Sonnet-3.5 (Premium)", "provider": "Blackbox", "prompt_tokens": 604, "completion_tokens": 0, "total_tokens": 604} | |
{"model": "Claude-Sonnet-3.5 (Premium)", "provider": "Blackbox", "prompt_tokens": 1557, "completion_tokens": 0, "total_tokens": 1557} | |
{"model": "Claude-Sonnet-3.5 (Premium)", "provider": "Blackbox", "prompt_tokens": 2200, "completion_tokens": 0, "total_tokens": 2200} | |
{"model": "Claude-Sonnet-3.5 (Premium)", "provider": "Blackbox", "prompt_tokens": 2874, "completion_tokens": 0, "total_tokens": 2874} | |
{"model": "phi-4-multimodal", "provider": "Phi_4", "prompt_tokens": 8, "completion_tokens": 0, "total_tokens": 8} | |
{"model": "phi-4-multimodal", "provider": "Phi_4", "prompt_tokens": 13, "completion_tokens": 0, "total_tokens": 13} | |
{"model": "flux", "provider": "G4F", "prompt_tokens": 27, "completion_tokens": 0, "total_tokens": 27} | |