[
  {
    "name": "Anthropic",
    "id": "anthropic",
    "api_key": "$ANTHROPIC_API_KEY",
    "api_endpoint": "$ANTHROPIC_API_ENDPOINT",
    "type": "anthropic",
    "default_large_model_id": "claude-sonnet-4-6",
    "default_small_model_id": "claude-haiku-4-5-20251001",
    "models": [
      {
        "id": "claude-sonnet-4-6",
        "name": "Claude Sonnet 4.6",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 3.75,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high",
          "max"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4-5-20250929",
        "name": "Claude Sonnet 4.5",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 3.75,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 200000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-7",
        "name": "Claude Opus 4.7",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 1000000,
        "default_max_tokens": 126000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high",
          "max"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-6",
        "name": "Claude Opus 4.6",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 1000000,
        "default_max_tokens": 126000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high",
          "max"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-5-20251101",
        "name": "Claude Opus 4.5",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 200000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "claude-haiku-4-5-20251001",
        "name": "Claude 4.5 Haiku",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 5,
        "cost_per_1m_in_cached": 1.25,
        "cost_per_1m_out_cached": 0.1,
        "context_window": 200000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-1-20250805",
        "name": "Claude Opus 4.1",
        "cost_per_1m_in": 15,
        "cost_per_1m_out": 75,
        "cost_per_1m_in_cached": 18.75,
        "cost_per_1m_out_cached": 1.5,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-20250514",
        "name": "Claude Opus 4",
        "cost_per_1m_in": 15,
        "cost_per_1m_out": 75,
        "cost_per_1m_in_cached": 18.75,
        "cost_per_1m_out_cached": 1.5,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4-20250514",
        "name": "Claude Sonnet 4",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 3.75,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 200000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "supports_attachments": true
      }
    ]
  },
  {
    "name": "OpenAI",
    "id": "openai",
    "api_key": "$OPENAI_API_KEY",
    "api_endpoint": "$OPENAI_API_ENDPOINT",
    "type": "openai",
    "default_large_model_id": "gpt-5.4",
    "default_small_model_id": "gpt-5.4-nano",
    "models": [
      {
        "id": "gpt-5.4",
        "name": "GPT-5.4",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0.25,
        "cost_per_1m_out_cached": 0,
        "context_window": 1050000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high",
          "xhigh"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.4-pro",
        "name": "GPT-5.4 Pro",
        "cost_per_1m_in": 30,
        "cost_per_1m_out": 180,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1050000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "medium",
          "high",
          "xhigh"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.4-mini",
        "name": "GPT-5.4 Mini",
        "cost_per_1m_in": 0.75,
        "cost_per_1m_out": 180,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high",
          "xhigh"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.4-nano",
        "name": "GPT-5.4 Nano",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.25,
        "cost_per_1m_in_cached": 0.02,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high",
          "xhigh"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.3-codex",
        "name": "GPT-5.3 Codex",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0.175,
        "cost_per_1m_out_cached": 0.175,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "minimal",
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.2",
        "name": "GPT-5.2",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0.175,
        "cost_per_1m_out_cached": 0.175,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "minimal",
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.2-codex",
        "name": "GPT-5.2 Codex",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0.175,
        "cost_per_1m_out_cached": 0.175,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "minimal",
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.1",
        "name": "GPT-5.1",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.125,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "minimal",
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.1-codex",
        "name": "GPT-5.1 Codex",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.125,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "minimal",
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.1-codex-max",
        "name": "GPT-5.1 Codex Max",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.125,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "minimal",
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.1-codex-mini",
        "name": "GPT-5.1 Codex Mini",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0.025,
        "cost_per_1m_out_cached": 0.025,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5-codex",
        "name": "GPT-5 Codex",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.125,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "minimal",
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5",
        "name": "GPT-5",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.125,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "minimal",
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5-mini",
        "name": "GPT-5 Mini",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0.025,
        "cost_per_1m_out_cached": 0.025,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5-nano",
        "name": "GPT-5 Nano",
        "cost_per_1m_in": 0.05,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0.005,
        "cost_per_1m_out_cached": 0.005,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "o4-mini",
        "name": "o4 Mini",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.275,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "o3",
        "name": "o3",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-4.1",
        "name": "GPT-4.1",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 1047576,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4.1-mini",
        "name": "GPT-4.1 Mini",
        "cost_per_1m_in": 0.39999999999999997,
        "cost_per_1m_out": 1.5999999999999999,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.09999999999999999,
        "context_window": 1047576,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4.1-nano",
        "name": "GPT-4.1 Nano",
        "cost_per_1m_in": 0.09999999999999999,
        "cost_per_1m_out": 0.39999999999999997,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.024999999999999998,
        "context_window": 1047576,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "o3-mini",
        "name": "o3 Mini",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.55,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "gpt-4o",
        "name": "GPT-4o",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 1.25,
        "context_window": 128000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4o-mini",
        "name": "GPT-4o-mini",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.075,
        "context_window": 128000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      }
    ]
  },
  {
    "name": "Google Gemini",
    "id": "gemini",
    "api_key": "$GEMINI_API_KEY",
    "api_endpoint": "$GEMINI_API_ENDPOINT",
    "type": "google",
    "default_large_model_id": "gemini-3.1-pro-preview-customtools",
    "default_small_model_id": "gemini-3-flash-preview",
    "models": [
      {
        "id": "gemini-3.1-pro-preview",
        "name": "Gemini 3.1 Pro (Regular)",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 12,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 1048576,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-3.1-pro-preview-customtools",
        "name": "Gemini 3.1 Pro (Optimized for Coding Agents)",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 12,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 1048576,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-3-pro-preview",
        "name": "Gemini 3 Pro",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 12,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 1048576,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "high"
        ],
        "default_reasoning_effort": "high",
        "supports_attachments": true
      },
      {
        "id": "gemini-3-flash-preview",
        "name": "Gemini 3 Flash",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 1048576,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "minimal",
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "minimal",
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-pro",
        "name": "Gemini 2.5 Pro",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 1048576,
        "default_max_tokens": 50000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-flash",
        "name": "Gemini 2.5 Flash",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 2.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.03,
        "context_window": 1048576,
        "default_max_tokens": 50000,
        "can_reason": true,
        "supports_attachments": true
      }
    ]
  },
  {
    "name": "xAI",
    "id": "xai",
    "api_key": "$XAI_API_KEY",
    "api_endpoint": "https://api.x.ai/v1",
    "type": "openai-compat",
    "default_large_model_id": "grok-4.20",
    "default_small_model_id": "grok-4-1-fast",
    "models": [
      {
        "id": "grok-3",
        "name": "Grok 3",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.75,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "grok-3-mini",
        "name": "Grok 3 Mini",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.075,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "grok-4",
        "name": "Grok 4",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.75,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "grok-4-1-fast",
        "name": "Grok 4.1 Fast",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "grok-4-1-fast-non-reasoning",
        "name": "Grok 4.1 Fast Non-Reasoning",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "grok-4-fast",
        "name": "Grok 4 Fast",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "grok-4-fast-non-reasoning",
        "name": "Grok 4 Fast Non-Reasoning",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "grok-4.20",
        "name": "Grok 4.20",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "grok-4.20-non-reasoning",
        "name": "Grok 4.20 Non-Reasoning",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "grok-code-fast",
        "name": "Grok Code Fast",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.02,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "supports_attachments": false
      }
    ]
  },
  {
    "name": "Z.AI",
    "id": "zai",
    "api_key": "$ZAI_API_KEY",
    "api_endpoint": "https://api.z.ai/api/coding/paas/v4",
    "type": "openai-compat",
    "default_large_model_id": "glm-4.7",
    "default_small_model_id": "glm-4.7-flash",
    "models": [
      {
        "id": "glm-5.1",
        "name": "GLM-5.1",
        "cost_per_1m_in": 1.4,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0.26,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 65536,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-5-turbo",
        "name": "GLM-5-Turbo",
        "cost_per_1m_in": 1.2,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0.24,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-5",
        "name": "GLM-5",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3.2,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 65536,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.7",
        "name": "GLM-4.7",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.2,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 98000,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.7-flash",
        "name": "GLM-4.7 Flash",
        "cost_per_1m_in": 0.07,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0.01,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 65550,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.6",
        "name": "GLM-4.6",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.2,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 102400,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.6v",
        "name": "GLM-4.6V",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 0.9,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 65536,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "glm-4.5",
        "name": "GLM-4.5",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.2,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 49152,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.5-air",
        "name": "GLM-4.5-Air",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.1,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 49152,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.5v",
        "name": "GLM-4.5V",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 1.8,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 65536,
        "default_max_tokens": 8192,
        "can_reason": true,
        "supports_attachments": true
      }
    ]
  },
  {
    "name": "Kimi Coding",
    "id": "kimi-coding",
    "api_key": "$KIMI_CODING_API_KEY",
    "api_endpoint": "https://api.kimi.com/coding",
    "type": "anthropic",
    "default_large_model_id": "kimi-for-coding",
    "default_small_model_id": "kimi-for-coding",
    "models": [
      {
        "id": "kimi-for-coding",
        "name": "Kimi for Coding",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 32768,
        "can_reason": true,
        "supports_attachments": true
      }
    ]
  },
  {
    "name": "MiniMax",
    "id": "minimax",
    "api_key": "$MINIMAX_API_KEY",
    "api_endpoint": "https://api.minimax.io/anthropic",
    "type": "anthropic",
    "default_large_model_id": "MiniMax-M2.7",
    "default_small_model_id": "MiniMax-M2.7",
    "models": [
      {
        "id": "MiniMax-M2.7-highspeed",
        "name": "MiniMax-M2.7-highspeed",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.4,
        "cost_per_1m_in_cached": 0.06,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 200000,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "MiniMax-M2.7",
        "name": "MiniMax-M2.7",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.06,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 200000,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "MiniMax-M2.5-highspeed",
        "name": "MiniMax-M2.5-highspeed",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.4,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 200000,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "MiniMax-M2.5",
        "name": "MiniMax-M2.5",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 200000,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "MiniMax-M2.1-highspeed",
        "name": "MiniMax-M2.1-highspeed",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.4,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 200000,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "MiniMax-M2.1",
        "name": "MiniMax-M2.1",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 200000,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "MiniMax-M2",
        "name": "MiniMax-M2",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": false,
        "supports_attachments": false
      }
    ]
  },
  {
    "name": "MiniMax China",
    "id": "minimax-china",
    "api_key": "$MINIMAX_API_KEY",
    "api_endpoint": "https://api.minimaxi.com/anthropic",
    "type": "anthropic",
    "default_large_model_id": "MiniMax-M2.7",
    "default_small_model_id": "MiniMax-M2.7",
    "models": [
      {
        "id": "MiniMax-M2.7-highspeed",
        "name": "MiniMax-M2.7-highspeed",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.4,
        "cost_per_1m_in_cached": 0.06,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 204800,
        "default_max_tokens": 131072,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "MiniMax-M2.7",
        "name": "MiniMax-M2.7",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.06,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 204800,
        "default_max_tokens": 131072,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "MiniMax-M2.5-highspeed",
        "name": "MiniMax-M2.5-highspeed",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.4,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 204800,
        "default_max_tokens": 131072,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "MiniMax-M2.5",
        "name": "MiniMax-M2.5",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 204800,
        "default_max_tokens": 131072,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "MiniMax-M2.1-highspeed",
        "name": "MiniMax-M2.1-highspeed",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.4,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 204800,
        "default_max_tokens": 131072,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "MiniMax-M2.1",
        "name": "MiniMax-M2.1",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 204800,
        "default_max_tokens": 131072,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "MiniMax-M2",
        "name": "MiniMax-M2",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 196608,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": false
      }
    ]
  },
  {
    "name": "Synthetic",
    "id": "synthetic",
    "api_key": "$SYNTHETIC_API_KEY",
    "api_endpoint": "https://api.synthetic.new/openai/v1",
    "type": "openai-compat",
    "default_large_model_id": "hf:moonshotai/Kimi-K2.5",
    "default_small_model_id": "hf:deepseek-ai/DeepSeek-V3.2",
    "models": [
      {
        "id": "hf:deepseek-ai/DeepSeek-R1-0528",
        "name": "DeepSeek R1 0528",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 8,
        "cost_per_1m_in_cached": 3,
        "cost_per_1m_out_cached": 3,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "hf:deepseek-ai/DeepSeek-V3",
        "name": "DeepSeek V3",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 1.25,
        "cost_per_1m_in_cached": 1.25,
        "cost_per_1m_out_cached": 1.25,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "hf:deepseek-ai/DeepSeek-V3.2",
        "name": "DeepSeek V3.2",
        "cost_per_1m_in": 0.56,
        "cost_per_1m_out": 1.68,
        "cost_per_1m_in_cached": 0.56,
        "cost_per_1m_out_cached": 0.56,
        "context_window": 162816,
        "default_max_tokens": 16281,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "hf:zai-org/GLM-4.7",
        "name": "GLM 4.7",
        "cost_per_1m_in": 0.45,
        "cost_per_1m_out": 2.19,
        "cost_per_1m_in_cached": 0.45,
        "cost_per_1m_out_cached": 0.45,
        "context_window": 202752,
        "default_max_tokens": 20275,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "hf:zai-org/GLM-4.7-Flash",
        "name": "GLM 4.7 Flash",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0.1,
        "cost_per_1m_out_cached": 0.1,
        "context_window": 196608,
        "default_max_tokens": 19660,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "hf:zai-org/GLM-5",
        "name": "GLM 5",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 1,
        "cost_per_1m_out_cached": 1,
        "context_window": 196608,
        "default_max_tokens": 19660,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "hf:zai-org/GLM-5.1",
        "name": "GLM 5.1",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 1,
        "cost_per_1m_out_cached": 1,
        "context_window": 196608,
        "default_max_tokens": 19660,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "hf:moonshotai/Kimi-K2.5",
        "name": "Kimi K2.5",
        "cost_per_1m_in": 0.45,
        "cost_per_1m_out": 3.4,
        "cost_per_1m_in_cached": 0.45,
        "cost_per_1m_out_cached": 0.45,
        "context_window": 262144,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "hf:nvidia/Kimi-K2.5-NVFP4",
        "name": "Kimi K2.5 NVFP4",
        "cost_per_1m_in": 0.45,
        "cost_per_1m_out": 3.4,
        "cost_per_1m_in_cached": 0.45,
        "cost_per_1m_out_cached": 0.45,
        "context_window": 262144,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "hf:meta-llama/Llama-3.3-70B-Instruct",
        "name": "Llama 3.3 70B Instruct",
        "cost_per_1m_in": 0.88,
        "cost_per_1m_out": 0.88,
        "cost_per_1m_in_cached": 0.88,
        "cost_per_1m_out_cached": 0.88,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "hf:MiniMaxAI/MiniMax-M2.5",
        "name": "MiniMax M2.5",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0.4,
        "cost_per_1m_out_cached": 0.4,
        "context_window": 191488,
        "default_max_tokens": 19148,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "hf:nvidia/NVIDIA-Nemotron-3-Super-120B-A12B-NVFP4",
        "name": "NVIDIA Nemotron 3 Super 120B A12B NVFP4",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1,
        "cost_per_1m_in_cached": 0.3,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 262144,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "hf:Qwen/Qwen3-235B-A22B-Thinking-2507",
        "name": "Qwen3 235B A22B Thinking 2507",
        "cost_per_1m_in": 0.65,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0.65,
        "cost_per_1m_out_cached": 0.65,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "hf:Qwen/Qwen3-Coder-480B-A35B-Instruct",
        "name": "Qwen3 Coder 480B A35B Instruct",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 2,
        "cost_per_1m_out_cached": 2,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "hf:Qwen/Qwen3.5-397B-A17B",
        "name": "Qwen3.5 397B A17B",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 3.6,
        "cost_per_1m_in_cached": 0.6,
        "cost_per_1m_out_cached": 0.6,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "hf:openai/gpt-oss-120b",
        "name": "gpt oss 120b",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.1,
        "cost_per_1m_in_cached": 0.1,
        "cost_per_1m_out_cached": 0.1,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      }
    ]
  },
  {
    "name": "AIHubMix",
    "id": "aihubmix",
    "api_key": "$AIHUBMIX_API_KEY",
    "api_endpoint": "https://aihubmix.com/v1",
    "type": "openai-compat",
    "default_large_model_id": "gpt-5",
    "default_small_model_id": "gpt-5-nano",
    "models": [
      {
        "id": "AiHubmix-Phi-4-mini-reasoning",
        "name": "Aihubmix Phi 4 Mini (reasoning)",
        "cost_per_1m_in": 0.12,
        "cost_per_1m_out": 0.12,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "AiHubmix-Phi-4-reasoning",
        "name": "Aihubmix Phi 4 (reasoning)",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "ByteDance-Seed/Seed-OSS-36B-Instruct",
        "name": "Seed Oss 36B Instruct",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.534,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "DeepSeek-R1",
        "name": "DeepSeek R1",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1638000,
        "default_max_tokens": 163800,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "DeepSeek-V3",
        "name": "DeepSeek V3",
        "cost_per_1m_in": 0.272,
        "cost_per_1m_out": 1.088,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1638000,
        "default_max_tokens": 163800,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "DeepSeek-V3-Fast",
        "name": "DeepSeek V3 Fast",
        "cost_per_1m_in": 0.56,
        "cost_per_1m_out": 2.24,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 32000,
        "default_max_tokens": 3200,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "DeepSeek-V3.1-Fast",
        "name": "DeepSeek V3.1 Fast",
        "cost_per_1m_in": 1.096,
        "cost_per_1m_out": 3.288,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 163000,
        "default_max_tokens": 16300,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "DeepSeek-V3.1-Terminus",
        "name": "DeepSeek V3.1 Terminus",
        "cost_per_1m_in": 0.56,
        "cost_per_1m_out": 1.68,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 160000,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "DeepSeek-V3.1-Think",
        "name": "DeepSeek V3.1 Thinking",
        "cost_per_1m_in": 0.56,
        "cost_per_1m_out": 1.68,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "DeepSeek-V3.2-Exp",
        "name": "DeepSeek V3.2 Exp",
        "cost_per_1m_in": 0.274,
        "cost_per_1m_out": 0.411,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.0274,
        "context_window": 163000,
        "default_max_tokens": 16300,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "DeepSeek-V3.2-Exp-Think",
        "name": "DeepSeek V3.2 Exp Thinking",
        "cost_per_1m_in": 0.274,
        "cost_per_1m_out": 0.411,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.0274,
        "context_window": 131000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "ERNIE-X1.1-Preview",
        "name": "ERNIE X1.1 Preview",
        "cost_per_1m_in": 0.136,
        "cost_per_1m_out": 0.544,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 119000,
        "default_max_tokens": 11900,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "Kimi-K2-0905",
        "name": "Kimi K2 0905",
        "cost_per_1m_in": 0.548,
        "cost_per_1m_out": 2.192,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "aihub-Phi-4-mini-instruct",
        "name": "Aihub Phi 4 Mini Instruct",
        "cost_per_1m_in": 0.12,
        "cost_per_1m_out": 0.48,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "aihub-Phi-4-multimodal-instruct",
        "name": "Aihub Phi 4 Multimodal Instruct",
        "cost_per_1m_in": 0.12,
        "cost_per_1m_out": 0.48,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "anthropic-opus-4-6",
        "name": "Anthropic Opus 4.6",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-3-5-haiku",
        "name": "Claude 3.5 Haiku",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 5.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "claude-3-5-sonnet",
        "name": "Claude 3.5 Sonnet",
        "cost_per_1m_in": 3.3,
        "cost_per_1m_out": 16.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "claude-3-5-sonnet-20240620",
        "name": "Claude 3.5 Sonnet 20240620",
        "cost_per_1m_in": 3.3,
        "cost_per_1m_out": 16.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "claude-3-7-sonnet",
        "name": "Claude 3.7 Sonnet",
        "cost_per_1m_in": 3.3,
        "cost_per_1m_out": 16.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-haiku-4-5",
        "name": "Claude Haiku 4.5",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 5.5,
        "cost_per_1m_in_cached": 1.375,
        "cost_per_1m_out_cached": 0.11,
        "context_window": 204800,
        "default_max_tokens": 20480,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-0",
        "name": "Claude Opus 4.0",
        "cost_per_1m_in": 16.5,
        "cost_per_1m_out": 82.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-1",
        "name": "Claude Opus 4.1",
        "cost_per_1m_in": 16.5,
        "cost_per_1m_out": 82.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-5",
        "name": "Claude Opus 4.5",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-5-think",
        "name": "Claude Opus 4.5 Thinking",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-6",
        "name": "Claude Opus 4.6",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-6-think",
        "name": "Claude Opus 4.6 Thinking",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-7",
        "name": "Claude Opus 4.7",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-7-think",
        "name": "Claude Opus 4.7 Thinking",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4-0",
        "name": "Claude Sonnet 4.0",
        "cost_per_1m_in": 3.3,
        "cost_per_1m_out": 16.5,
        "cost_per_1m_in_cached": 4.125,
        "cost_per_1m_out_cached": 0.33,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4-5",
        "name": "Claude Sonnet 4.5",
        "cost_per_1m_in": 3.3,
        "cost_per_1m_out": 16.5,
        "cost_per_1m_in_cached": 4.125,
        "cost_per_1m_out_cached": 0.33,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4-5-think",
        "name": "Claude Sonnet 4.5 Thinking",
        "cost_per_1m_in": 3.3,
        "cost_per_1m_out": 16.5,
        "cost_per_1m_in_cached": 4.125,
        "cost_per_1m_out_cached": 0.33,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4-6",
        "name": "Claude Sonnet 4.6",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 3.75,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4-6-think",
        "name": "Claude Sonnet 4.6 Thinking",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 3.75,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "coding-glm-4.6-free",
        "name": "Coding GLM 4.6 (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "coding-minimax-m2",
        "name": "Coding MiniMax M2",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 13100,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "coding-minimax-m2-free",
        "name": "Coding MiniMax M2 (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 13100,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "coding-minimax-m2.1",
        "name": "Coding MiniMax M2.1",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 13100,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "coding-minimax-m2.1-free",
        "name": "Coding MiniMax M2.1 (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 13100,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "coding-minimax-m2.5",
        "name": "Coding MiniMax M2.5",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 13100,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "coding-minimax-m2.5-free",
        "name": "Coding MiniMax M2.5 (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 13100,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "coding-minimax-m2.5-highspeed",
        "name": "Coding MiniMax M2.5 Highspeed",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 13100,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "coding-minimax-m2.7",
        "name": "Coding MiniMax M2.7",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 13100,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "coding-minimax-m2.7-free",
        "name": "Coding MiniMax M2.7 (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 13100,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "coding-minimax-m2.7-highspeed",
        "name": "Coding MiniMax M2.7 Highspeed",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 13100,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek-math-v2",
        "name": "DeepSeek Math V2",
        "cost_per_1m_in": 0.492,
        "cost_per_1m_out": 1.968,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.0984,
        "context_window": 163000,
        "default_max_tokens": 16300,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek-v3.2",
        "name": "DeepSeek V3.2",
        "cost_per_1m_in": 0.302,
        "cost_per_1m_out": 0.453,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.0302,
        "context_window": 128000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek-v3.2-fast",
        "name": "DeepSeek V3.2 Fast",
        "cost_per_1m_in": 1.096,
        "cost_per_1m_out": 3.288,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 1.096,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek-v3.2-speciale",
        "name": "DeepSeek V3.2 Speciale",
        "cost_per_1m_in": 0.58,
        "cost_per_1m_out": 1.68003,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek-v3.2-think",
        "name": "DeepSeek V3.2 Thinking",
        "cost_per_1m_in": 0.302,
        "cost_per_1m_out": 0.453,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.0302,
        "context_window": 128000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "doubao-seed-1-6",
        "name": "Doubao Seed 1.6",
        "cost_per_1m_in": 0.18,
        "cost_per_1m_out": 1.8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.036,
        "context_window": 256000,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "doubao-seed-1-6-flash",
        "name": "Doubao Seed 1.6 Flash",
        "cost_per_1m_in": 0.044,
        "cost_per_1m_out": 0.44,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.0088,
        "context_window": 256000,
        "default_max_tokens": 33000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "doubao-seed-1-6-lite",
        "name": "Doubao Seed 1.6 Lite",
        "cost_per_1m_in": 0.082,
        "cost_per_1m_out": 0.656,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.0164,
        "context_window": 256000,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "doubao-seed-1-6-thinking",
        "name": "Doubao Seed 1.6 Thinking",
        "cost_per_1m_in": 0.18,
        "cost_per_1m_out": 1.8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.036,
        "context_window": 256000,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "doubao-seed-1-8",
        "name": "Doubao Seed 1.8",
        "cost_per_1m_in": 0.10959,
        "cost_per_1m_out": 0.27398,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.02192,
        "context_window": 256000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "doubao-seed-2-0-code-preview",
        "name": "Doubao Seed 2.0 Code Preview",
        "cost_per_1m_in": 0.4822,
        "cost_per_1m_out": 2.411,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.09644,
        "context_window": 256000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "doubao-seed-2-0-lite",
        "name": "Doubao Seed 2.0 Lite",
        "cost_per_1m_in": 0.09041,
        "cost_per_1m_out": 0.54246,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.01808,
        "context_window": 256000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "doubao-seed-2-0-mini",
        "name": "Doubao Seed 2.0 Mini",
        "cost_per_1m_in": 0.03014,
        "cost_per_1m_out": 0.30136,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.00603,
        "context_window": 256000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "doubao-seed-2-0-pro",
        "name": "Doubao Seed 2.0 Pro",
        "cost_per_1m_in": 0.4822,
        "cost_per_1m_out": 2.411,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.09644,
        "context_window": 256000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "ernie-4.5",
        "name": "ERNIE 4.5",
        "cost_per_1m_in": 0.068,
        "cost_per_1m_out": 0.272,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 160000,
        "default_max_tokens": 64000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "ernie-4.5-turbo-latest",
        "name": "ERNIE 4.5 Turbo",
        "cost_per_1m_in": 0.11,
        "cost_per_1m_out": 0.44,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 135000,
        "default_max_tokens": 12000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "ernie-4.5-turbo-vl",
        "name": "ERNIE 4.5 Turbo VL",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 139000,
        "default_max_tokens": 16000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "ernie-5.0-thinking-exp",
        "name": "ERNIE 5.0 Thinking Exp",
        "cost_per_1m_in": 0.82192,
        "cost_per_1m_out": 3.28768,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.82192,
        "context_window": 119000,
        "default_max_tokens": 11900,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "ernie-5.0-thinking-preview",
        "name": "ERNIE 5.0 Thinking Preview",
        "cost_per_1m_in": 0.822,
        "cost_per_1m_out": 3.288,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.822,
        "context_window": 183000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "ernie-x1-turbo",
        "name": "ERNIE X1 Turbo",
        "cost_per_1m_in": 0.136,
        "cost_per_1m_out": 0.544,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 50500,
        "default_max_tokens": 5050,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "gemini-2.0-flash",
        "name": "Gemini 2.0 Flash",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.025,
        "context_window": 1048576,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-flash",
        "name": "Gemini 2.5 Flash",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 2.499,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.03,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-flash-image",
        "name": "Gemini 2.5 Flash Image",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 2.499,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 32800,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-flash-lite",
        "name": "Gemini 2.5 Flash Lite",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.01,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-flash-lite-nothink",
        "name": "Gemini 2.5 Flash Lite (no think)",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.01,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-flash-lite-preview-09-2025",
        "name": "Gemini 2.5 Flash Lite Preview 09 2025",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.01,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-flash-lite-preview-09-2025-nothink",
        "name": "Gemini 2.5 Flash Lite Preview 09 2025 (no think)",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.01,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-flash-nothink",
        "name": "Gemini 2.5 Flash (no think)",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 2.499,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.03,
        "context_window": 1047576,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-flash-preview-05-20-nothink",
        "name": "Gemini 2.5 Flash Preview 05-20 (no think)",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 2.499,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.03,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-flash-preview-05-20-search",
        "name": "Gemini 2.5 Flash Preview 05-20 Search",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 2.499,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.03,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-flash-preview-09-2025",
        "name": "Gemini 2.5 Flash Preview 09 2025",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 2.499,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.03,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-flash-search",
        "name": "Gemini 2.5 Flash Search",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 2.499,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.03,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-pro",
        "name": "Gemini 2.5 Pro",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-pro-preview-05-06",
        "name": "Gemini 2.5 Pro Preview 05-06",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-pro-preview-06-05",
        "name": "Gemini 2.5 Pro Preview 06-05",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-pro-search",
        "name": "Gemini 2.5 Pro Search",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-3-flash-preview",
        "name": "Gemini 3 Flash Preview",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 1048576,
        "default_max_tokens": 104857,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-3-flash-preview-free",
        "name": "Gemini 3 Flash Preview (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-3-flash-preview-search",
        "name": "Gemini 3 Flash Preview Search",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 1048576,
        "default_max_tokens": 104857,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-3.1-flash-lite-preview",
        "name": "Gemini 3.1 Flash Lite Preview",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.25,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-3.1-flash-lite-preview-nothink",
        "name": "Gemini 3.1 Flash Lite Preview (no think)",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.25,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-3.1-pro-preview",
        "name": "Gemini 3.1 Pro Preview",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 12,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-3.1-pro-preview-customtools",
        "name": "Gemini 3.1 Pro Preview Customtools",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 12,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "glm-4.5v",
        "name": "GLM 4.5 Vision",
        "cost_per_1m_in": 0.274,
        "cost_per_1m_out": 0.822,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.274,
        "context_window": 64000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "glm-4.6",
        "name": "GLM 4.6",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 20480,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "glm-4.6v",
        "name": "GLM 4.6 Vision",
        "cost_per_1m_in": 0.137,
        "cost_per_1m_out": 0.411,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.0274,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "glm-4.7",
        "name": "GLM 4.7",
        "cost_per_1m_in": 0.27397,
        "cost_per_1m_out": 1.0959,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.0548,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "glm-5",
        "name": "GLM 5",
        "cost_per_1m_in": 0.88,
        "cost_per_1m_out": 2.816,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.176,
        "context_window": 202752,
        "default_max_tokens": 20275,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "glm-5-turbo",
        "name": "GLM 5 Turbo",
        "cost_per_1m_in": 1.2,
        "cost_per_1m_out": 3.9996,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.24,
        "context_window": 202752,
        "default_max_tokens": 20275,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "glm-5.1",
        "name": "GLM 5.1",
        "cost_per_1m_in": 0.845,
        "cost_per_1m_out": 3.38,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.18311,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "glm-5v-turbo",
        "name": "GLM 5 Vision Turbo",
        "cost_per_1m_in": 0.7042,
        "cost_per_1m_out": 3.09848,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.16901,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4.1",
        "name": "GPT 4.1",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 1047576,
        "default_max_tokens": 32768,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4.1-free",
        "name": "GPT 4.1 (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1047576,
        "default_max_tokens": 32768,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4.1-mini",
        "name": "GPT 4.1 Mini",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 1.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.1,
        "context_window": 1047576,
        "default_max_tokens": 32768,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4.1-mini-free",
        "name": "GPT 4.1 Mini (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1047576,
        "default_max_tokens": 32768,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4.1-nano",
        "name": "GPT 4.1 Nano",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.025,
        "context_window": 1047576,
        "default_max_tokens": 32768,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4.1-nano-free",
        "name": "GPT 4.1 Nano (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1047576,
        "default_max_tokens": 32768,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4o",
        "name": "GPT 4o",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 1.25,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4o-2024-11-20",
        "name": "GPT 4o 2024 11-20",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 1.25,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4o-audio-preview",
        "name": "GPT 4o Audio Preview",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "gpt-4o-free",
        "name": "GPT 4o (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1047576,
        "default_max_tokens": 32768,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4o-mini",
        "name": "GPT 4o Mini",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.075,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4o-mini-search-preview",
        "name": "GPT 4o Mini Search Preview",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.075,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4o-search-preview",
        "name": "GPT 4o Search Preview",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 1.25,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-5",
        "name": "GPT 5",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5-chat-latest",
        "name": "GPT 5 Chat",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-5-codex",
        "name": "GPT-5-Codex",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5-mini",
        "name": "GPT 5 Mini",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.025,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5-nano",
        "name": "GPT 5 Nano",
        "cost_per_1m_in": 0.05,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.005,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5-pro",
        "name": "GPT 5 Pro",
        "cost_per_1m_in": 15,
        "cost_per_1m_out": 120,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.1",
        "name": "GPT 5.1",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.1-chat-latest",
        "name": "GPT 5.1 Chat",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-5.1-codex",
        "name": "GPT-5.1-Codex",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.1-codex-max",
        "name": "GPT-5.1-Codex Max",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.1-codex-mini",
        "name": "GPT-5.1-Codex Mini",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.025,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.2",
        "name": "GPT 5.2",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.175,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.2-chat-latest",
        "name": "GPT 5.2 Chat",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.175,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-5.2-codex",
        "name": "GPT-5.2-Codex",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.175,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.2-high",
        "name": "GPT 5.2 High",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.175,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.2-low",
        "name": "GPT 5.2 Low",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.175,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.2-pro",
        "name": "GPT 5.2 Pro",
        "cost_per_1m_in": 21,
        "cost_per_1m_out": 168,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 2.1,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.3-chat-latest",
        "name": "GPT 5.3 Chat",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.175,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-5.3-codex",
        "name": "GPT-5.3-Codex",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.175,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.4",
        "name": "GPT 5.4",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.25,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.4-high",
        "name": "GPT 5.4 High",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.25,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.4-low",
        "name": "GPT 5.4 Low",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.25,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.4-mini",
        "name": "GPT 5.4 Mini",
        "cost_per_1m_in": 0.75,
        "cost_per_1m_out": 4.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.075,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-5.4-nano",
        "name": "GPT 5.4 Nano",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.25,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.02,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.4-pro",
        "name": "GPT 5.4 Pro",
        "cost_per_1m_in": 30,
        "cost_per_1m_out": 180,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 30,
        "context_window": 1050000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-oss-120b",
        "name": "gpt-oss-120b",
        "cost_per_1m_in": 0.18,
        "cost_per_1m_out": 0.9,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "gpt-oss-20b",
        "name": "gpt-oss-20b",
        "cost_per_1m_in": 0.11,
        "cost_per_1m_out": 0.55,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "grok-4",
        "name": "Grok 4",
        "cost_per_1m_in": 3.3,
        "cost_per_1m_out": 16.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.825,
        "context_window": 256000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "grok-4-1-fast-non-reasoning",
        "name": "Grok 4.1 Fast",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 2000000,
        "default_max_tokens": 200000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "grok-4-1-fast-reasoning",
        "name": "Grok 4.1 Fast (reasoning)",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 2000000,
        "default_max_tokens": 200000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "grok-4-20-non-reasoning",
        "name": "Grok 4 20",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 2000000,
        "default_max_tokens": 200000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "grok-4-20-reasoning",
        "name": "Grok 4 20 (reasoning)",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 2000000,
        "default_max_tokens": 200000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "grok-4-fast-non-reasoning",
        "name": "Grok 4 Fast",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 2000000,
        "default_max_tokens": 30000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "grok-4-fast-reasoning",
        "name": "Grok 4 Fast (reasoning)",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 2000000,
        "default_max_tokens": 30000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "grok-4.20-beta-0309-non-reasoning",
        "name": "Grok 4.20 Beta 0309",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 2000000,
        "default_max_tokens": 200000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "grok-4.20-beta-0309-reasoning",
        "name": "Grok 4.20 Beta 0309 (reasoning)",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 2000000,
        "default_max_tokens": 200000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "grok-4.20-multi-agent-0309",
        "name": "Grok 4.20 Multi Agent 0309",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 2000000,
        "default_max_tokens": 200000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "grok-4.20-multi-agent-beta-0309",
        "name": "Grok 4.20 Multi Agent Beta 0309",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 2000000,
        "default_max_tokens": 200000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "grok-code-fast-1",
        "name": "Grok Code Fast 1",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 256000,
        "default_max_tokens": 10000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "jina-deepsearch-v1",
        "name": "Jina Deepsearch V1",
        "cost_per_1m_in": 0.05,
        "cost_per_1m_out": 0.05,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 100000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "k2.6-code-preview-free",
        "name": "K2.6 Code Preview (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "kat-dev",
        "name": "Kat Dev",
        "cost_per_1m_in": 0.137,
        "cost_per_1m_out": 0.548,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "kimi-for-coding-free",
        "name": "Kimi For Coding (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "kimi-k2-0711",
        "name": "Kimi K2 0711",
        "cost_per_1m_in": 0.54,
        "cost_per_1m_out": 2.16,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131000,
        "default_max_tokens": 13100,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "kimi-k2-thinking",
        "name": "Kimi K2 Thinking",
        "cost_per_1m_in": 0.548,
        "cost_per_1m_out": 2.192,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.137,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "kimi-k2-turbo-preview",
        "name": "Kimi K2 Turbo Preview",
        "cost_per_1m_in": 1.2,
        "cost_per_1m_out": 4.8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "kimi-k2.5",
        "name": "Kimi K2.5",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.105,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "kimi-k2.6",
        "name": "Kimi K2.6",
        "cost_per_1m_in": 0.95,
        "cost_per_1m_out": 3.9995,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.16084,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "ling-2.6-flash-free",
        "name": "Ling 2.6 Flash (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262100,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "llama-4-maverick",
        "name": "Llama 4 Maverick",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1048576,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "llama-4-scout",
        "name": "Llama 4 Scout",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131000,
        "default_max_tokens": 13100,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "mimo-v2-flash-free",
        "name": "MiMo V2 Flash (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mimo-v2-omni",
        "name": "MiMo V2 Omni",
        "cost_per_1m_in": 0.44,
        "cost_per_1m_out": 2.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.088,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "mimo-v2-pro",
        "name": "MiMo V2 Pro",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 3.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.22,
        "context_window": 1000000,
        "default_max_tokens": 100000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "minimax-m2",
        "name": "MiniMax M2",
        "cost_per_1m_in": 0.288,
        "cost_per_1m_out": 1.152,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 20480,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax-m2.1",
        "name": "MiniMax M2.1",
        "cost_per_1m_in": 0.288,
        "cost_per_1m_out": 1.152,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 20480,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax-m2.5",
        "name": "MiniMax M2.5",
        "cost_per_1m_in": 0.288,
        "cost_per_1m_out": 1.152,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 20480,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax-m2.5-highspeed",
        "name": "MiniMax M2.5 Highspeed",
        "cost_per_1m_in": 0.288,
        "cost_per_1m_out": 1.152,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 20480,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax-m2.7",
        "name": "MiniMax M2.7",
        "cost_per_1m_in": 0.2958,
        "cost_per_1m_out": 1.1832,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05916,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "mistral-large-3",
        "name": "Mistral Large 3",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "nvidia-nemotron-3-super-120b-a12b",
        "name": "Nvidia Nemotron 3 Super 120B A12B",
        "cost_per_1m_in": 0.11,
        "cost_per_1m_out": 0.55,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.0275,
        "context_window": 1000000,
        "default_max_tokens": 100000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "o3",
        "name": "O3",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 200000,
        "default_max_tokens": 100000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "o3-mini",
        "name": "O3 Mini",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.55,
        "context_window": 200000,
        "default_max_tokens": 100000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "o3-pro",
        "name": "O3 Pro",
        "cost_per_1m_in": 20,
        "cost_per_1m_out": 80,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 20,
        "context_window": 200000,
        "default_max_tokens": 100000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "o4-mini",
        "name": "O4 Mini",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.275,
        "context_window": 200000,
        "default_max_tokens": 100000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3-235b-a22b",
        "name": "Qwen3 235B A22B",
        "cost_per_1m_in": 0.28,
        "cost_per_1m_out": 1.12,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131100,
        "default_max_tokens": 13110,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "qwen3-235b-a22b-instruct-2507",
        "name": "Qwen3 235B A22B Instruct 2507",
        "cost_per_1m_in": 0.28,
        "cost_per_1m_out": 1.12,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "qwen3-235b-a22b-thinking-2507",
        "name": "Qwen3 235B A22B Thinking 2507",
        "cost_per_1m_in": 0.28,
        "cost_per_1m_out": 2.8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3-coder-30b-a3b-instruct",
        "name": "Qwen3 Coder 30B A3B Instruct",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 2000000,
        "default_max_tokens": 262000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen3-coder-480b-a35b-instruct",
        "name": "Qwen3 Coder 480B A35B Instruct",
        "cost_per_1m_in": 0.82,
        "cost_per_1m_out": 3.28,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.82,
        "context_window": 262000,
        "default_max_tokens": 26200,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen3-coder-flash",
        "name": "Qwen3 Coder Flash",
        "cost_per_1m_in": 0.136,
        "cost_per_1m_out": 0.544,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.136,
        "context_window": 256000,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen3-coder-next",
        "name": "Qwen3 Coder Next",
        "cost_per_1m_in": 0.137,
        "cost_per_1m_out": 0.548,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.137,
        "context_window": 2000000,
        "default_max_tokens": 64000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen3-coder-plus",
        "name": "Qwen3 Coder Plus",
        "cost_per_1m_in": 0.54,
        "cost_per_1m_out": 2.16,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.108,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen3-coder-plus-2025-07-22",
        "name": "Qwen3 Coder Plus 2025 07-22",
        "cost_per_1m_in": 0.54,
        "cost_per_1m_out": 2.16,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.54,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen3-max",
        "name": "Qwen3 Max",
        "cost_per_1m_in": 0.34246,
        "cost_per_1m_out": 1.36984,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.34246,
        "context_window": 262144,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "qwen3-max-2026-01-23",
        "name": "Qwen3 Max 2026 01-23",
        "cost_per_1m_in": 0.34246,
        "cost_per_1m_out": 1.36984,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.34246,
        "context_window": 252000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen3-next-80b-a3b-instruct",
        "name": "Qwen3 Next 80B A3B Instruct",
        "cost_per_1m_in": 0.138,
        "cost_per_1m_out": 0.552,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "qwen3-next-80b-a3b-thinking",
        "name": "Qwen3 Next 80B A3B Thinking",
        "cost_per_1m_in": 0.138,
        "cost_per_1m_out": 1.38,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3-vl-235b-a22b-instruct",
        "name": "Qwen3 VL 235B A22B Instruct",
        "cost_per_1m_in": 0.274,
        "cost_per_1m_out": 1.096,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131000,
        "default_max_tokens": 33000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "qwen3-vl-235b-a22b-thinking",
        "name": "Qwen3 VL 235B A22B Thinking",
        "cost_per_1m_in": 0.274,
        "cost_per_1m_out": 2.74,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131000,
        "default_max_tokens": 33000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3-vl-30b-a3b-instruct",
        "name": "Qwen3 VL 30B A3B Instruct",
        "cost_per_1m_in": 0.1028,
        "cost_per_1m_out": 0.4112,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "qwen3-vl-30b-a3b-thinking",
        "name": "Qwen3 VL 30B A3B Thinking",
        "cost_per_1m_in": 0.1028,
        "cost_per_1m_out": 1.028,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3-vl-flash",
        "name": "Qwen3 VL Flash",
        "cost_per_1m_in": 0.0206,
        "cost_per_1m_out": 0.206,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.00412,
        "context_window": 254000,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "qwen3-vl-flash-2026-01-22",
        "name": "Qwen3 VL Flash 2026 01-22",
        "cost_per_1m_in": 0.0206,
        "cost_per_1m_out": 0.206,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.0206,
        "context_window": 254000,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "qwen3-vl-plus",
        "name": "Qwen3 VL Plus",
        "cost_per_1m_in": 0.137,
        "cost_per_1m_out": 1.37,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.0274,
        "context_window": 256000,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "qwen3.5-122b-a10b",
        "name": "Qwen3.5 122B A10B",
        "cost_per_1m_in": 0.1126,
        "cost_per_1m_out": 0.9008,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.1126,
        "context_window": 991000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3.5-27b",
        "name": "Qwen3.5 27B",
        "cost_per_1m_in": 0.0846,
        "cost_per_1m_out": 0.6768,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.0846,
        "context_window": 991000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3.5-35b-a3b",
        "name": "Qwen3.5 35B A3B",
        "cost_per_1m_in": 0.0564,
        "cost_per_1m_out": 0.4512,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.0564,
        "context_window": 991000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3.5-397b-a17b",
        "name": "Qwen3.5 397B A17B",
        "cost_per_1m_in": 0.1644,
        "cost_per_1m_out": 0.9864,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.1644,
        "context_window": 991000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3.5-flash",
        "name": "Qwen3.5 Flash",
        "cost_per_1m_in": 0.0282,
        "cost_per_1m_out": 0.282,
        "cost_per_1m_in_cached": 0.03525,
        "cost_per_1m_out_cached": 0.00282,
        "context_window": 991000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3.5-plus",
        "name": "Qwen3.5 Plus",
        "cost_per_1m_in": 0.1096,
        "cost_per_1m_out": 0.6576,
        "cost_per_1m_in_cached": 0.137,
        "cost_per_1m_out_cached": 0.01096,
        "context_window": 991000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3.6-35b-a3b",
        "name": "Qwen3.6 35B A3B",
        "cost_per_1m_in": 0.254,
        "cost_per_1m_out": 1.524,
        "cost_per_1m_in_cached": 0.3175,
        "cost_per_1m_out_cached": 0.0254,
        "context_window": 254000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3.6-flash",
        "name": "Qwen3.6 Flash",
        "cost_per_1m_in": 0.169,
        "cost_per_1m_out": 1.014,
        "cost_per_1m_in_cached": 0.21125,
        "cost_per_1m_out_cached": 0.0169,
        "context_window": 991000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3.6-max-preview",
        "name": "Qwen3.6 Max Preview",
        "cost_per_1m_in": 1.268,
        "cost_per_1m_out": 7.608,
        "cost_per_1m_in_cached": 1.585,
        "cost_per_1m_out_cached": 0.1268,
        "context_window": 240000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3.6-plus",
        "name": "Qwen3.6 Plus",
        "cost_per_1m_in": 0.282,
        "cost_per_1m_out": 1.692,
        "cost_per_1m_in_cached": 0.3525,
        "cost_per_1m_out_cached": 0.0282,
        "context_window": 991000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3.6-plus-preview-free",
        "name": "Qwen3.6 Plus Preview (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 65535,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "sophnet-kimi-k2.6",
        "name": "Sophnet Kimi K2.6",
        "cost_per_1m_in": 0.9154,
        "cost_per_1m_out": 3.80257,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.15498,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "step-3.5-flash",
        "name": "Step 3.5 Flash",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "step-3.5-flash-free",
        "name": "Step 3.5 Flash (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": false,
        "supports_attachments": true
      }
    ],
    "default_headers": {
      "APP-Code": "IUFF7106"
    }
  },
  {
    "name": "Avian",
    "id": "avian",
    "api_key": "$AVIAN_API_KEY",
    "api_endpoint": "https://api.avian.io/v1",
    "type": "openai-compat",
    "default_large_model_id": "moonshotai/kimi-k2.5",
    "default_small_model_id": "deepseek/deepseek-v3.2",
    "models": [
      {
        "id": "deepseek/deepseek-v3.2",
        "name": "DeepSeek V3.2",
        "cost_per_1m_in": 0.23,
        "cost_per_1m_out": 0.33,
        "cost_per_1m_in_cached": 0.012,
        "cost_per_1m_out_cached": 0,
        "context_window": 163840,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "z-ai/glm-5",
        "name": "GLM-5",
        "cost_per_1m_in": 0.95,
        "cost_per_1m_out": 2.55,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 131072,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "z-ai/glm-5.1",
        "name": "GLM-5.1",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3.2,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 202752,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "moonshotai/kimi-k2.5",
        "name": "Kimi K2.5",
        "cost_per_1m_in": 0.45,
        "cost_per_1m_out": 2.2,
        "cost_per_1m_in_cached": 0.225,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 262144,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "moonshotai/kimi-k2.6",
        "name": "Kimi K2.6",
        "cost_per_1m_in": 0.95,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0.16,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 262144,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "minimax/minimax-m2.5",
        "name": "MiniMax M2.5",
        "cost_per_1m_in": 0.27,
        "cost_per_1m_out": 1.08,
        "cost_per_1m_in_cached": 0.15,
        "cost_per_1m_out_cached": 0,
        "context_window": 196608,
        "default_max_tokens": 131072,
        "can_reason": false,
        "supports_attachments": false
      }
    ]
  },
  {
    "name": "Azure OpenAI",
    "id": "azure",
    "api_key": "$AZURE_OPENAI_API_KEY",
    "api_endpoint": "$AZURE_OPENAI_API_ENDPOINT",
    "type": "azure",
    "default_large_model_id": "gpt-5",
    "default_small_model_id": "gpt-5-mini",
    "models": [
      {
        "id": "gpt-5",
        "name": "GPT-5",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.25,
        "cost_per_1m_out_cached": 0.25,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "minimal",
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "minimal",
        "supports_attachments": true
      },
      {
        "id": "gpt-5-mini",
        "name": "GPT-5 Mini",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0.025,
        "cost_per_1m_out_cached": 0.025,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "low",
        "supports_attachments": true
      },
      {
        "id": "gpt-5-nano",
        "name": "GPT-5 Nano",
        "cost_per_1m_in": 0.05,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0.005,
        "cost_per_1m_out_cached": 0.005,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "low",
        "supports_attachments": true
      },
      {
        "id": "codex-mini-latest",
        "name": "Codex Mini",
        "cost_per_1m_in": 1.5,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "o4-mini",
        "name": "o4 Mini",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.275,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "o3",
        "name": "o3",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "o3-pro",
        "name": "o3 Pro",
        "cost_per_1m_in": 20,
        "cost_per_1m_out": 80,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-4.1",
        "name": "GPT-4.1",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 1047576,
        "default_max_tokens": 50000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4.1-mini",
        "name": "GPT-4.1 Mini",
        "cost_per_1m_in": 0.39999999999999997,
        "cost_per_1m_out": 1.5999999999999999,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.09999999999999999,
        "context_window": 1047576,
        "default_max_tokens": 50000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4.1-nano",
        "name": "GPT-4.1 Nano",
        "cost_per_1m_in": 0.09999999999999999,
        "cost_per_1m_out": 0.39999999999999997,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.024999999999999998,
        "context_window": 1047576,
        "default_max_tokens": 50000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4.5-preview",
        "name": "GPT-4.5 (Preview)",
        "cost_per_1m_in": 75,
        "cost_per_1m_out": 150,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 37.5,
        "context_window": 128000,
        "default_max_tokens": 50000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "o3-mini",
        "name": "o3 Mini",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.55,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "gpt-4o",
        "name": "GPT-4o",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 1.25,
        "context_window": 128000,
        "default_max_tokens": 20000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4o-mini",
        "name": "GPT-4o-mini",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.075,
        "context_window": 128000,
        "default_max_tokens": 20000,
        "can_reason": false,
        "supports_attachments": true
      }
    ]
  },
  {
    "name": "AWS Bedrock",
    "id": "bedrock",
    "type": "bedrock",
    "default_large_model_id": "anthropic.claude-sonnet-4-6",
    "default_small_model_id": "anthropic.claude-haiku-4-5-20251001-v1:0",
    "models": [
      {
        "id": "anthropic.claude-sonnet-4-6",
        "name": "AWS Claude Sonnet 4.6",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 3.75,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "anthropic.claude-sonnet-4-5-20250929-v1:0",
        "name": "AWS Claude Sonnet 4.5",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 3.75,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 200000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "anthropic.claude-haiku-4-5-20251001-v1:0",
        "name": "AWS Claude Haiku 4.5",
        "cost_per_1m_in": 0.8,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 1,
        "cost_per_1m_out_cached": 0.08,
        "context_window": 200000,
        "default_max_tokens": 64000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "anthropic.claude-opus-4-6-v1",
        "name": "AWS Claude Opus 4.6",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 1000000,
        "default_max_tokens": 126000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "anthropic.claude-opus-4-5-20251101-v1:0",
        "name": "AWS Claude Opus 4.5",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 200000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "anthropic.claude-opus-4-1-20250805-v1:0",
        "name": "AWS Claude Opus 4.1",
        "cost_per_1m_in": 15,
        "cost_per_1m_out": 75,
        "cost_per_1m_in_cached": 18.75,
        "cost_per_1m_out_cached": 1.5,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "anthropic.claude-opus-4-20250514-v1:0",
        "name": "AWS Claude Opus 4",
        "cost_per_1m_in": 15,
        "cost_per_1m_out": 75,
        "cost_per_1m_in_cached": 18.75,
        "cost_per_1m_out_cached": 1.5,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "anthropic.claude-sonnet-4-20250514-v1:0",
        "name": "AWS Claude Sonnet 4",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 3.75,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 200000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "supports_attachments": true
      }
    ]
  },
  {
    "name": "Cerebras",
    "id": "cerebras",
    "api_key": "$CEREBRAS_API_KEY",
    "api_endpoint": "https://api.cerebras.ai/v1",
    "type": "openai-compat",
    "default_large_model_id": "gpt-oss-120b",
    "default_small_model_id": "qwen-3-235b-a22b-instruct-2507",
    "models": [
      {
        "id": "gpt-oss-120b",
        "name": "OpenAI GPT OSS",
        "cost_per_1m_in": 0.35,
        "cost_per_1m_out": 0.75,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 25000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen-3-235b-a22b-instruct-2507",
        "name": "Qwen 3 235B Instruct",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 25000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "zai-glm-4.7",
        "name": "Z.ai GLM 4.7",
        "cost_per_1m_in": 2.25,
        "cost_per_1m_out": 2.75,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 25000,
        "can_reason": false,
        "supports_attachments": false
      }
    ],
    "default_headers": {
      "X-Cerebras-3rd-Party-Integration": "crush"
    }
  },
  {
    "name": "Chutes",
    "id": "chutes",
    "api_key": "$CHUTES_API_KEY",
    "api_endpoint": "https://llm.chutes.ai/v1",
    "type": "openai-compat",
    "default_large_model_id": "zai-org/GLM-5-TEE",
    "default_small_model_id": "zai-org/GLM-5-Turbo",
    "models": [
      {
        "id": "NousResearch/DeepHermes-3-Mistral-24B-Preview",
        "name": "DeepHermes-3-Mistral-24B-Preview",
        "cost_per_1m_in": 0.0245,
        "cost_per_1m_out": 0.0978,
        "cost_per_1m_in_cached": 0.01225,
        "cost_per_1m_out_cached": 0,
        "context_window": 32768,
        "default_max_tokens": 32768,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "deepseek-ai/DeepSeek-R1-0528-TEE",
        "name": "DeepSeek-R1-0528-TEE",
        "cost_per_1m_in": 0.45,
        "cost_per_1m_out": 2.15,
        "cost_per_1m_in_cached": 0.225,
        "cost_per_1m_out_cached": 0,
        "context_window": 163840,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
        "name": "DeepSeek-R1-Distill-Llama-70B",
        "cost_per_1m_in": 0.0272,
        "cost_per_1m_out": 0.1087,
        "cost_per_1m_in_cached": 0.0136,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "tngtech/DeepSeek-TNG-R1T2-Chimera-TEE",
        "name": "DeepSeek-TNG-R1T2-Chimera-TEE",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.1,
        "cost_per_1m_in_cached": 0.15,
        "cost_per_1m_out_cached": 0,
        "context_window": 163840,
        "default_max_tokens": 163840,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek-ai/DeepSeek-V3-0324-TEE",
        "name": "DeepSeek-V3-0324-TEE",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 1,
        "cost_per_1m_in_cached": 0.125,
        "cost_per_1m_out_cached": 0,
        "context_window": 163840,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "deepseek-ai/DeepSeek-V3.1-TEE",
        "name": "DeepSeek-V3.1-TEE",
        "cost_per_1m_in": 0.27,
        "cost_per_1m_out": 1,
        "cost_per_1m_in_cached": 0.135,
        "cost_per_1m_out_cached": 0,
        "context_window": 163840,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek-ai/DeepSeek-V3.2-TEE",
        "name": "DeepSeek-V3.2-TEE",
        "cost_per_1m_in": 0.28,
        "cost_per_1m_out": 0.42,
        "cost_per_1m_in_cached": 0.14,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "zai-org/GLM-4.6V",
        "name": "GLM-4.6V",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 0.9,
        "cost_per_1m_in_cached": 0.15,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "zai-org/GLM-4.7-FP8",
        "name": "GLM-4.7-FP8",
        "cost_per_1m_in": 0.2989,
        "cost_per_1m_out": 1.1957,
        "cost_per_1m_in_cached": 0.14945,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 65535,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "zai-org/GLM-4.7-TEE",
        "name": "GLM-4.7-TEE",
        "cost_per_1m_in": 0.39,
        "cost_per_1m_out": 1.75,
        "cost_per_1m_in_cached": 0.195,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 65535,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "zai-org/GLM-5-TEE",
        "name": "GLM-5-TEE",
        "cost_per_1m_in": 0.95,
        "cost_per_1m_out": 2.55,
        "cost_per_1m_in_cached": 0.475,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 65535,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "zai-org/GLM-5-Turbo",
        "name": "GLM-5-Turbo",
        "cost_per_1m_in": 0.4891,
        "cost_per_1m_out": 1.9565,
        "cost_per_1m_in_cached": 0.24455,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 65535,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "zai-org/GLM-5.1-TEE",
        "name": "GLM-5.1-TEE",
        "cost_per_1m_in": 1.05,
        "cost_per_1m_out": 3.5,
        "cost_per_1m_in_cached": 0.525,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 65535,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "NousResearch/Hermes-4-14B",
        "name": "Hermes-4-14B",
        "cost_per_1m_in": 0.0136,
        "cost_per_1m_out": 0.0543,
        "cost_per_1m_in_cached": 0.0068,
        "cost_per_1m_out_cached": 0,
        "context_window": 40960,
        "default_max_tokens": 40960,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "moonshotai/Kimi-K2.5-TEE",
        "name": "Kimi-K2.5-TEE",
        "cost_per_1m_in": 0.44,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0.22,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 65535,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "moonshotai/Kimi-K2.6-TEE",
        "name": "Kimi-K2.6-TEE",
        "cost_per_1m_in": 0.95,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0.475,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 65535,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "XiaomiMiMo/MiMo-V2-Flash-TEE",
        "name": "MiMo-V2-Flash-TEE",
        "cost_per_1m_in": 0.09,
        "cost_per_1m_out": 0.29,
        "cost_per_1m_in_cached": 0.045,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "MiniMaxAI/MiniMax-M2.5-TEE",
        "name": "MiniMax-M2.5-TEE",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.075,
        "cost_per_1m_out_cached": 0,
        "context_window": 196608,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen2.5-72B-Instruct",
        "name": "Qwen2.5-72B-Instruct",
        "cost_per_1m_in": 0.2989,
        "cost_per_1m_out": 1.1957,
        "cost_per_1m_in_cached": 0.14945,
        "cost_per_1m_out_cached": 0,
        "context_window": 32768,
        "default_max_tokens": 32768,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-235B-A22B-Instruct-2507-TEE",
        "name": "Qwen3-235B-A22B-Instruct-2507-TEE",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0.05,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-235B-A22B-Thinking-2507",
        "name": "Qwen3-235B-A22B-Thinking-2507",
        "cost_per_1m_in": 0.11,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0.055,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 262144,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-30B-A3B",
        "name": "Qwen3-30B-A3B",
        "cost_per_1m_in": 0.06,
        "cost_per_1m_out": 0.22,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0,
        "context_window": 40960,
        "default_max_tokens": 40960,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-32B-TEE",
        "name": "Qwen3-32B-TEE",
        "cost_per_1m_in": 0.08,
        "cost_per_1m_out": 0.24,
        "cost_per_1m_in_cached": 0.04,
        "cost_per_1m_out_cached": 0,
        "context_window": 40960,
        "default_max_tokens": 40960,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-Coder-Next-TEE",
        "name": "Qwen3-Coder-Next-TEE",
        "cost_per_1m_in": 0.12,
        "cost_per_1m_out": 0.75,
        "cost_per_1m_in_cached": 0.06,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-Next-80B-A3B-Instruct",
        "name": "Qwen3-Next-80B-A3B-Instruct",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.8,
        "cost_per_1m_in_cached": 0.05,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 262144,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3.5-397B-A17B-TEE",
        "name": "Qwen3.5-397B-A17B-TEE",
        "cost_per_1m_in": 0.39,
        "cost_per_1m_out": 2.34,
        "cost_per_1m_in_cached": 0.195,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "unsloth/gemma-3-27b-it",
        "name": "gemma-3-27b-it",
        "cost_per_1m_in": 0.0272,
        "cost_per_1m_out": 0.1087,
        "cost_per_1m_in_cached": 0.0136,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-oss-120b-TEE",
        "name": "gpt-oss-120b-TEE",
        "cost_per_1m_in": 0.09,
        "cost_per_1m_out": 0.36,
        "cost_per_1m_in_cached": 0.045,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      }
    ]
  },
  {
    "name": "GitHub Copilot",
    "id": "copilot",
    "api_endpoint": "https://api.githubcopilot.com",
    "type": "openai-compat",
    "default_large_model_id": "claude-sonnet-4.6",
    "default_small_model_id": "claude-haiku-4.5",
    "models": [
      {
        "id": "claude-haiku-4.5",
        "name": "Claude Haiku 4.5",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4.5",
        "name": "Claude Opus 4.5",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4.7",
        "name": "Claude Opus 4.7",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 64000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4",
        "name": "Claude Sonnet 4",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 216000,
        "default_max_tokens": 16000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4.5",
        "name": "Claude Sonnet 4.5",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4.6",
        "name": "Claude Sonnet 4.6",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-pro",
        "name": "Gemini 2.5 Pro",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 64000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-3-flash-preview",
        "name": "Gemini 3 Flash (Preview)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 64000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-3.1-pro-preview",
        "name": "Gemini 3.1 Pro",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 64000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "goldeneye-free-auto",
        "name": "Goldeneye",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-3.5-turbo",
        "name": "GPT 3.5 Turbo",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 16384,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "gpt-4",
        "name": "GPT 4",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 32768,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "gpt-4-0125-preview",
        "name": "GPT 4 Turbo",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "gpt-4.1",
        "name": "GPT-4.1",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4o",
        "name": "GPT-4o",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-4o-mini",
        "name": "GPT-4o mini",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "gpt-5-mini",
        "name": "GPT-5 mini",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 264000,
        "default_max_tokens": 64000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-5.2",
        "name": "GPT-5.2",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 264000,
        "default_max_tokens": 64000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-5.2-codex",
        "name": "GPT-5.2-Codex",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-5.3-codex",
        "name": "GPT-5.3-Codex",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-5.4",
        "name": "GPT-5.4",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gpt-5.4-mini",
        "name": "GPT-5.4 mini",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "grok-code-fast-1",
        "name": "Grok Code Fast 1",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 64000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "minimax-m2.5",
        "name": "MiniMax M2.5 (Fast)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131000,
        "default_max_tokens": 40000,
        "can_reason": false,
        "supports_attachments": false
      }
    ]
  },
  {
    "name": "Cortecs",
    "id": "cortecs",
    "api_key": "$CORTECS_API_KEY",
    "api_endpoint": "https://api.cortecs.ai/v1",
    "type": "openai",
    "default_large_model_id": "qwen3-coder-30b-a3b-instruct",
    "default_small_model_id": "glm-4.7-flash",
    "models": [
      {
        "id": "claude-opus4-7",
        "name": "Claude Opus 4.7",
        "cost_per_1m_in": 4.7685,
        "cost_per_1m_out": 23.8425,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 100000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "minimax-m2.7",
        "name": "MiniMax M2.7",
        "cost_per_1m_in": 0.26622,
        "cost_per_1m_out": 1.06488,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 196608,
        "default_max_tokens": 19660,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "glm-5.1",
        "name": "GLM 5.1",
        "cost_per_1m_in": 1.24236,
        "cost_per_1m_out": 3.90336,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 20275,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen3.5-122b-a10b",
        "name": "Qwen3.5 122B A10B",
        "cost_per_1m_in": 0.4437,
        "cost_per_1m_out": 3.1059,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen3.5-9b",
        "name": "Qwen3.5 9B",
        "cost_per_1m_in": 0.13311,
        "cost_per_1m_out": 0.17748,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "nemotron-3-super-120b-a12b",
        "name": "Nemotron 3 Super 120B A12B",
        "cost_per_1m_in": 0.15606,
        "cost_per_1m_out": 0.67626,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262000,
        "default_max_tokens": 26214,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen3-coder-next",
        "name": "Qwen3 Coder Next",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "glm-5",
        "name": "GLM 5",
        "cost_per_1m_in": 0.8874,
        "cost_per_1m_out": 2.83968,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 20275,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "glm-4.6",
        "name": "GLM 4.6",
        "cost_per_1m_in": 0.35496,
        "cost_per_1m_out": 1.55295,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 203000,
        "default_max_tokens": 20300,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek-chat-v3.1",
        "name": "DeepSeek Chat V3.1",
        "cost_per_1m_in": 0.17748,
        "cost_per_1m_out": 0.70992,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 164000,
        "default_max_tokens": 16400,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen-2.5-72b-instruct",
        "name": "Qwen2.5 72B Instruct",
        "cost_per_1m_in": 0.062118,
        "cost_per_1m_out": 0.230724,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 33000,
        "default_max_tokens": 3300,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen3.5-397b-a17b",
        "name": "Qwen3.5 397B A17B ",
        "cost_per_1m_in": 0.53244,
        "cost_per_1m_out": 3.19464,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262000,
        "default_max_tokens": 25000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek-v3.2",
        "name": "DeepSeek V3.2",
        "cost_per_1m_in": 0.26622,
        "cost_per_1m_out": 0.4437,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 163840,
        "default_max_tokens": 16384,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "mistral-small-2603",
        "name": "Mistral Small 4 2603",
        "cost_per_1m_in": 0.1275,
        "cost_per_1m_out": 0.51,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "minimax-m2.5",
        "name": "MiniMax M2.5",
        "cost_per_1m_in": 0.26622,
        "cost_per_1m_out": 0.97614,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 196608,
        "default_max_tokens": 19660,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "claude-4-6-sonnet",
        "name": "Claude Sonnet 4.6",
        "cost_per_1m_in": 2.8691,
        "cost_per_1m_out": 14.3095,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 100000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "glm-4.7-flash",
        "name": "GLM 4.7 Flash",
        "cost_per_1m_in": 0.0716,
        "cost_per_1m_out": 0.4293,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 203000,
        "default_max_tokens": 20300,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "kimi-k2.5",
        "name": "Kimi K2.5",
        "cost_per_1m_in": 0.4437,
        "cost_per_1m_out": 2.12976,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "claude-opus4-6",
        "name": "Claude Opus 4.6",
        "cost_per_1m_in": 4.7685,
        "cost_per_1m_out": 23.8425,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 100000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "minimax-m2",
        "name": "MiniMax M2",
        "cost_per_1m_in": 0.22185,
        "cost_per_1m_out": 0.8874,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 196608,
        "default_max_tokens": 19660,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "glm-4.7",
        "name": "GLM 4.7",
        "cost_per_1m_in": 0.53244,
        "cost_per_1m_out": 1.95228,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 20275,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax-m2.1",
        "name": "MiniMax M2.1",
        "cost_per_1m_in": 0.322,
        "cost_per_1m_out": 1.2879,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 196000,
        "default_max_tokens": 19600,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen3-vl-235b-a22b",
        "name": "Qwen3 VL 235B A22B",
        "cost_per_1m_in": 0.186354,
        "cost_per_1m_out": 1.68606,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131000,
        "default_max_tokens": 13100,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "mistral-small-creative",
        "name": "Mistral Small Creative",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 32000,
        "default_max_tokens": 3200,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "nvidia-nemotron-3-nano-30b-a3b",
        "name": "Nemotron 3 Nano 30B A3B",
        "cost_per_1m_in": 0.0537,
        "cost_per_1m_out": 0.215,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "claude-opus4-5",
        "name": "Claude Opus 4.5",
        "cost_per_1m_in": 4.7695,
        "cost_per_1m_out": 23.8485,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3-next-80b-a3b-thinking",
        "name": "Qwen3 Next 80B A3B Thinking",
        "cost_per_1m_in": 0.13311,
        "cost_per_1m_out": 1.06488,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "holo2-30b-a3b",
        "name": "Holo2 30B A3B",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 0.7,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 22000,
        "default_max_tokens": 2200,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "devstral-2512",
        "name": "Devstral 2 2512",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262000,
        "default_max_tokens": 26200,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "nova-2-lite",
        "name": "Nova 2 Lite",
        "cost_per_1m_in": 0.335,
        "cost_per_1m_out": 2.822,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 100000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-oss-safeguard-120b",
        "name": "GPT OSS Safeguard 120B",
        "cost_per_1m_in": 0.161,
        "cost_per_1m_out": 0.626,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "mistral-large-2512",
        "name": "Mistral Large 3 2512",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "ministral-8b-2512",
        "name": "Ministral 3 8b 2512",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "ministral-3b-2512",
        "name": "Ministral 3 3b 2512",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "ministral-14b-2512",
        "name": "Ministral 3 14b 2512",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "intellect-3",
        "name": "INTELLECT-3",
        "cost_per_1m_in": 0.179,
        "cost_per_1m_out": 0.984,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "gpt-5.1",
        "name": "GPT 5.1",
        "cost_per_1m_in": 1.234,
        "cost_per_1m_out": 9.838,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 40000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "nemotron-nano-v2-12b",
        "name": "Nemotron Nano V2 12b",
        "cost_per_1m_in": 0.215,
        "cost_per_1m_out": 0.635,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-haiku-4-5",
        "name": "Claude Haiku 4.5",
        "cost_per_1m_in": 0.894,
        "cost_per_1m_out": 4.472,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-4-5-sonnet",
        "name": "Claude 4.5 Sonnet",
        "cost_per_1m_in": 2.683,
        "cost_per_1m_out": 13.416,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "magistral-medium-2509",
        "name": "Magistral Medium 2509",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "magistral-small-2509",
        "name": "Magistral Small 2509",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "hermes-4-70b",
        "name": "Hermes 4 70B",
        "cost_per_1m_in": 0.116,
        "cost_per_1m_out": 0.358,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "gpt-5",
        "name": "GPT 5",
        "cost_per_1m_in": 1.234,
        "cost_per_1m_out": 9.838,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 40000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-oss-120b",
        "name": "GPT Oss 120b",
        "cost_per_1m_in": 0.035496,
        "cost_per_1m_out": 0.17748,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131000,
        "default_max_tokens": 13100,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen3-30b-a3b-instruct-2507",
        "name": "Qwen3 30B A3B Instruct 2507",
        "cost_per_1m_in": 0.089,
        "cost_per_1m_out": 0.268,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262000,
        "default_max_tokens": 26200,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "gpt-oss-20b",
        "name": "GPT Oss 20b",
        "cost_per_1m_in": 0.026622,
        "cost_per_1m_out": 0.124236,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131000,
        "default_max_tokens": 13100,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "mistral-7b-instruct-v0.3",
        "name": "Mistral 7B Instruct v0.3",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 127000,
        "default_max_tokens": 12700,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistral-large-2402",
        "name": "Mistral Large 2402",
        "cost_per_1m_in": 3.846,
        "cost_per_1m_out": 11.627,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 32000,
        "default_max_tokens": 3200,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "pixtral-large-2502",
        "name": "Pixtral Large 25.02",
        "cost_per_1m_in": 1.789,
        "cost_per_1m_out": 5.366,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "mistral-small-3.2-24b-instruct-2506",
        "name": "Mistral Small 3.2 24B Instruct 2506",
        "cost_per_1m_in": 0.09,
        "cost_per_1m_out": 0.28,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "qwen3-32b",
        "name": "Qwen3 32B",
        "cost_per_1m_in": 0.089,
        "cost_per_1m_out": 0.268,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 40000,
        "default_max_tokens": 4000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen3-235b-a22b-instruct-2507",
        "name": "Qwen3 235B A22B Instruct 2507",
        "cost_per_1m_in": 0.062118,
        "cost_per_1m_out": 0.408204,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131000,
        "default_max_tokens": 13100,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen3-coder-30b-a3b-instruct",
        "name": "Qwen3 Coder 30b a3b Instruct",
        "cost_per_1m_in": 0.053244,
        "cost_per_1m_out": 0.22185,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262000,
        "default_max_tokens": 26200,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "gpt-4.1",
        "name": "GPT 4.1",
        "cost_per_1m_in": 1.968,
        "cost_per_1m_out": 7.872,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1047576,
        "default_max_tokens": 104757,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-4.1-mini",
        "name": "GPT 4.1 mini",
        "cost_per_1m_in": 0.39,
        "cost_per_1m_out": 1.53,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1047576,
        "default_max_tokens": 104757,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-4.1-nano",
        "name": "GPT 4.1 nano",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.39,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1047576,
        "default_max_tokens": 104757,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "nova-micro-v1",
        "name": "Nova Micro 1.0",
        "cost_per_1m_in": 0.036,
        "cost_per_1m_out": 0.143,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "nova-lite-v1",
        "name": "Nova Lite 1.0",
        "cost_per_1m_in": 0.062,
        "cost_per_1m_out": 0.247,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 300000,
        "default_max_tokens": 30000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "nova-pro-v1",
        "name": "Nova Pro 1.0",
        "cost_per_1m_in": 0.824,
        "cost_per_1m_out": 3.295,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 300000,
        "default_max_tokens": 30000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4",
        "name": "Claude Sonnet 4",
        "cost_per_1m_in": 2.601,
        "cost_per_1m_out": 13.01,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "llama-3.1-nemotron-ultra-253b-v1",
        "name": "Llama 3.1 Nemotron Ultra 253B v1",
        "cost_per_1m_in": 0.537,
        "cost_per_1m_out": 1.61,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "llama-4-maverick",
        "name": "Llama 4 Maverick",
        "cost_per_1m_in": 0.124236,
        "cost_per_1m_out": 0.602832,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1050000,
        "default_max_tokens": 105000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "deepseek-v3-0324",
        "name": "DeepSeek V3 0324",
        "cost_per_1m_in": 0.26622,
        "cost_per_1m_out": 0.8874,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 163840,
        "default_max_tokens": 16384,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "mistral-small-2503",
        "name": "Mistral Small 2503",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "mistral-small-2506",
        "name": "Mistral Small 2506",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.0-flash-001",
        "name": "Gemini 2.0 Flash",
        "cost_per_1m_in": 0.13416,
        "cost_per_1m_out": 0.53664,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1048576,
        "default_max_tokens": 104857,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.0-flash-lite-001",
        "name": "Gemini 2.0 Flash Lite",
        "cost_per_1m_in": 0.06708,
        "cost_per_1m_out": 0.26832,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1048576,
        "default_max_tokens": 104857,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-flash",
        "name": "Gemini 2.5 Flash",
        "cost_per_1m_in": 0.26832,
        "cost_per_1m_out": 2.236,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1048576,
        "default_max_tokens": 104857,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-pro",
        "name": "Gemini 2.5 Pro",
        "cost_per_1m_in": 1.3416,
        "cost_per_1m_out": 8.944,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1048576,
        "default_max_tokens": 104857,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemma-3-27b-it",
        "name": "Gemma 3 27b it",
        "cost_per_1m_in": 0.089,
        "cost_per_1m_out": 0.268,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131000,
        "default_max_tokens": 13100,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "deepseek-r1-0528",
        "name": "DeepSeek R1 0528",
        "cost_per_1m_in": 0.585084,
        "cost_per_1m_out": 2.30724,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 164000,
        "default_max_tokens": 16400,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "codestral-2508",
        "name": "Codestral 25.08",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 0.9,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "llama-3.3-70b-instruct",
        "name": "Llama 3.3 70B Instruct",
        "cost_per_1m_in": 0.08874,
        "cost_per_1m_out": 0.274994,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131000,
        "default_max_tokens": 13100,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "gpt-4o",
        "name": "GPT 4o",
        "cost_per_1m_in": 2.38664,
        "cost_per_1m_out": 9.5466,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5-mini",
        "name": "GPT 5 mini",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 1.968,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 40000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5-nano",
        "name": "GPT 5 nano",
        "cost_per_1m_in": 0.054,
        "cost_per_1m_out": 0.394,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 40000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "mistral-large-2411",
        "name": "Mistral Large 2411",
        "cost_per_1m_in": 1.8,
        "cost_per_1m_out": 5.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "hermes-4-405b",
        "name": "Hermes 4 405B",
        "cost_per_1m_in": 0.894,
        "cost_per_1m_out": 2.683,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistral-nemo-instruct-2407",
        "name": "Mistral Nemo 2407",
        "cost_per_1m_in": 0.13,
        "cost_per_1m_out": 0.13,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "devstral-medium-2507",
        "name": "Devstral Medium 2507",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "devstral-small-2507",
        "name": "Devstral Small 2507",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistral-medium-2508",
        "name": "Mistral Medium 2508",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "llama-3.1-405b-instruct",
        "name": "Llama 3.1 405B Instruct",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 1.75,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "gpt-4o-mini",
        "name": "GPT 4o mini",
        "cost_per_1m_in": 0.1432,
        "cost_per_1m_out": 0.5728,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "llama-3.1-8b-instruct",
        "name": "Llama 3.1 8B Instruct",
        "cost_per_1m_in": 0.018,
        "cost_per_1m_out": 0.054,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      }
    ]
  },
  {
    "name": "DeepSeek",
    "id": "deepseek",
    "api_key": "$DEEPSEEK_API_KEY",
    "api_endpoint": "https://api.deepseek.com/v1",
    "type": "openai-compat",
    "default_large_model_id": "deepseek-reasoner",
    "default_small_model_id": "deepseek-chat",
    "models": [
      {
        "id": "deepseek-chat",
        "name": "DeepSeek-V3.2 (Non-thinking Mode)",
        "cost_per_1m_in": 0.28,
        "cost_per_1m_out": 0.42,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.028,
        "context_window": 128000,
        "default_max_tokens": 4000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "deepseek-reasoner",
        "name": "DeepSeek-V3.2 (Thinking Mode)",
        "cost_per_1m_in": 0.28,
        "cost_per_1m_out": 0.42,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.028,
        "context_window": 128000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      }
    ]
  },
  {
    "name": "Groq",
    "id": "groq",
    "api_key": "$GROQ_API_KEY",
    "api_endpoint": "https://api.groq.com/openai/v1",
    "type": "openai-compat",
    "default_large_model_id": "moonshotai/kimi-k2-instruct-0905",
    "default_small_model_id": "qwen/qwen3-32b",
    "models": [
      {
        "id": "moonshotai/kimi-k2-instruct-0905",
        "name": "Kimi K2 0905",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0.5,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 131072,
        "default_max_tokens": 10000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-32b",
        "name": "Qwen3 32B",
        "cost_per_1m_in": 0.29,
        "cost_per_1m_out": 0.59,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 10000,
        "can_reason": false,
        "supports_attachments": false
      }
    ]
  },
  {
    "name": "Hugging Face",
    "id": "huggingface",
    "api_key": "$HF_TOKEN",
    "api_endpoint": "https://router.huggingface.co/v1",
    "type": "openai-compat",
    "default_large_model_id": "moonshotai/Kimi-K2.5:fireworks-ai",
    "default_small_model_id": "openai/gpt-oss-20b:groq",
    "models": [
      {
        "id": "MiniMaxAI/MiniMax-M2.5:fireworks-ai",
        "name": "MiniMaxAI/MiniMax-M2.5 (fireworks-ai)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "MiniMaxAI/MiniMax-M2.7:fireworks-ai",
        "name": "MiniMaxAI/MiniMax-M2.7 (fireworks-ai)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 196608,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-235B-A22B-Instruct-2507:cerebras",
        "name": "Qwen/Qwen3-235B-A22B-Instruct-2507 (cerebras)",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-32B:groq",
        "name": "Qwen/Qwen3-32B (groq)",
        "cost_per_1m_in": 0.29,
        "cost_per_1m_out": 0.59,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-8B:fireworks-ai",
        "name": "Qwen/Qwen3-8B (fireworks-ai)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 40960,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-VL-30B-A3B-Instruct:fireworks-ai",
        "name": "Qwen/Qwen3-VL-30B-A3B-Instruct (fireworks-ai)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-VL-30B-A3B-Thinking:fireworks-ai",
        "name": "Qwen/Qwen3-VL-30B-A3B-Thinking (fireworks-ai)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "meta-llama/Llama-4-Scout-17B-16E-Instruct:groq",
        "name": "meta-llama/Llama-4-Scout-17B-16E-Instruct (groq)",
        "cost_per_1m_in": 0.11,
        "cost_per_1m_out": 0.34,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "moonshotai/Kimi-K2.5:fireworks-ai",
        "name": "moonshotai/Kimi-K2.5 (fireworks-ai)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "moonshotai/Kimi-K2.6:fireworks-ai",
        "name": "moonshotai/Kimi-K2.6 (fireworks-ai)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-oss-120b:cerebras",
        "name": "openai/gpt-oss-120b (cerebras)",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 0.69,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-oss-120b:fireworks-ai",
        "name": "openai/gpt-oss-120b (fireworks-ai)",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-oss-120b:groq",
        "name": "openai/gpt-oss-120b (groq)",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.75,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-oss-20b:fireworks-ai",
        "name": "openai/gpt-oss-20b (fireworks-ai)",
        "cost_per_1m_in": 0.05,
        "cost_per_1m_out": 0.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-oss-20b:groq",
        "name": "openai/gpt-oss-20b (groq)",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-oss-safeguard-20b:groq",
        "name": "openai/gpt-oss-safeguard-20b (groq)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "zai-org/GLM-4.7:cerebras",
        "name": "zai-org/GLM-4.7 (cerebras)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "zai-org/GLM-4.7:fireworks-ai",
        "name": "zai-org/GLM-4.7 (fireworks-ai)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "zai-org/GLM-5:fireworks-ai",
        "name": "zai-org/GLM-5 (fireworks-ai)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "zai-org/GLM-5.1:fireworks-ai",
        "name": "zai-org/GLM-5.1 (fireworks-ai)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "zai-org/GLM-5.1-FP8:fireworks-ai",
        "name": "zai-org/GLM-5.1-FP8 (fireworks-ai)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      }
    ],
    "default_headers": {
      "HTTP-Referer": "https://charm.land",
      "X-Title": "Crush"
    }
  },
  {
    "name": "io.net",
    "id": "ionet",
    "api_key": "$IONET_API_KEY",
    "api_endpoint": "https://api.intelligence.io.solutions/api/v1",
    "type": "openai-compat",
    "default_large_model_id": "moonshotai/Kimi-K2.5",
    "default_small_model_id": "zai-org/GLM-4.7-Flash",
    "models": [
      {
        "id": "google/gemma-4-26b-a4b-it",
        "name": "Google: Gemma 4 26B A4B",
        "cost_per_1m_in": 0.145,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0.08,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "Intel/Qwen3-Coder-480B-A35B-Instruct-int4-mixed-ar",
        "name": "Intel: Qwen3 Coder 480B A35B Instruct INT4 Mixed AR",
        "cost_per_1m_in": 0.22,
        "cost_per_1m_out": 0.95,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0.44,
        "context_window": 106000,
        "default_max_tokens": 10600,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "meta-llama/Llama-3.3-70B-Instruct",
        "name": "Meta: Llama 3.3 70B Instruct",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.32,
        "cost_per_1m_in_cached": 0.05,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "MiniMaxAI/MiniMax-M2.5",
        "name": "MiniMaxAI/MiniMax-M2.5",
        "cost_per_1m_in": 0.118,
        "cost_per_1m_out": 0.99,
        "cost_per_1m_in_cached": 0.1,
        "cost_per_1m_out_cached": 0,
        "context_window": 196600,
        "default_max_tokens": 19660,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistralai/Mistral-Large-Instruct-2411",
        "name": "Mistral: Mistral Large Instruct 2411",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 1,
        "cost_per_1m_out_cached": 4,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "moonshotai/Kimi-K2-Instruct-0905",
        "name": "MoonshotAI: Kimi K2 Instruct 0905",
        "cost_per_1m_in": 0.39,
        "cost_per_1m_out": 1.9,
        "cost_per_1m_in_cached": 0.195,
        "cost_per_1m_out_cached": 0.78,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "moonshotai/Kimi-K2-Thinking",
        "name": "MoonshotAI: Kimi K2 Thinking",
        "cost_per_1m_in": 0.32,
        "cost_per_1m_out": 0.48,
        "cost_per_1m_in_cached": 0.16,
        "cost_per_1m_out_cached": 0.64,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "moonshotai/Kimi-K2.5",
        "name": "MoonshotAI: Kimi K2.5",
        "cost_per_1m_in": 0.445,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0.225,
        "cost_per_1m_out_cached": 1.1,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "moonshotai/Kimi-K2.6",
        "name": "MoonshotAI: Kimi K2.6",
        "cost_per_1m_in": 0.8,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-oss-120b",
        "name": "OpenAI: gpt-oss-120b",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0.01,
        "cost_per_1m_out_cached": 0.04,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-oss-20b",
        "name": "OpenAI: gpt-oss-20b",
        "cost_per_1m_in": 0.016,
        "cost_per_1m_out": 0.06,
        "cost_per_1m_in_cached": 0.008,
        "cost_per_1m_out_cached": 0.032,
        "context_window": 64000,
        "default_max_tokens": 6400,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-Next-80B-A3B-Instruct",
        "name": "Qwen: Qwen3 Next 80B A3B Instruct",
        "cost_per_1m_in": 0.06,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.12,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "zai-org/GLM-4.6",
        "name": "Z.ai: GLM 4.6",
        "cost_per_1m_in": 0.35,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0.175,
        "cost_per_1m_out_cached": 0.7,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "zai-org/GLM-4.7",
        "name": "Z.ai: GLM 4.7",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.4,
        "cost_per_1m_in_cached": 0.15,
        "cost_per_1m_out_cached": 0.6,
        "context_window": 202752,
        "default_max_tokens": 20275,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "zai-org/GLM-4.7-Flash",
        "name": "Z.ai: GLM 4.7 Flash",
        "cost_per_1m_in": 0.07,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0.035,
        "cost_per_1m_out_cached": 0.14,
        "context_window": 200000,
        "default_max_tokens": 20000,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "zai-org/GLM-5",
        "name": "Z.ai: GLM 5",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0.5,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 20275,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "zai-org/GLM-5.1",
        "name": "Z.ai: GLM 5.1",
        "cost_per_1m_in": 1.06,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0.26,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 20275,
        "can_reason": true,
        "supports_attachments": false
      }
    ]
  },
  {
    "name": "Nebius Token Factory",
    "id": "nebius",
    "api_key": "$NEBIUS_API_KEY",
    "api_endpoint": "https://api.tokenfactory.nebius.com/v1",
    "type": "openai-compat",
    "default_large_model_id": "moonshotai/Kimi-K2.5",
    "default_small_model_id": "nvidia/NVIDIA-Nemotron-3-Nano-30B-A3B",
    "models": [
      {
        "id": "deepseek-ai/DeepSeek-V3.2",
        "name": "DeepSeek-V3.2",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 0.45,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 163000,
        "default_max_tokens": 16300,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek-ai/DeepSeek-V3.2-fast",
        "name": "DeepSeek-V3.2 (fast)",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 8000,
        "default_max_tokens": 800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "zai-org/GLM-5",
        "name": "GLM-5",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 20275,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "NousResearch/Hermes-4-405B",
        "name": "Hermes-4-405B",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "NousResearch/Hermes-4-70B",
        "name": "Hermes-4-70B",
        "cost_per_1m_in": 0.13,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "PrimeIntellect/INTELLECT-3",
        "name": "INTELLECT-3",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "moonshotai/Kimi-K2.5",
        "name": "Kimi-K2.5",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 2.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "moonshotai/Kimi-K2.5-fast",
        "name": "Kimi-K2.5 (fast)",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 2.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 8000,
        "default_max_tokens": 800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "meta-llama/Llama-3.3-70B-Instruct",
        "name": "Llama-3.3-70B-Instruct",
        "cost_per_1m_in": 0.13,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "meta-llama/Meta-Llama-3.1-8B-Instruct",
        "name": "Meta-Llama-3.1-8B-Instruct",
        "cost_per_1m_in": 0.02,
        "cost_per_1m_out": 0.06,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "MiniMaxAI/MiniMax-M2.5",
        "name": "MiniMax-M2.5",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 196608,
        "default_max_tokens": 19660,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "MiniMaxAI/MiniMax-M2.5-fast",
        "name": "MiniMax-M2.5 (fast)",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 8000,
        "default_max_tokens": 800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "nvidia/NVIDIA-Nemotron-3-Nano-30B-A3B",
        "name": "Nemotron-3-Nano-30B-A3B",
        "cost_per_1m_in": 0.06,
        "cost_per_1m_out": 0.24,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "nvidia/nemotron-3-super-120b-a12b",
        "name": "Nemotron-3-Super-120b-a12b",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 0.9,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-235B-A22B-Instruct-2507",
        "name": "Qwen3-235B-A22B-Instruct-2507",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-235B-A22B-Thinking-2507-fast",
        "name": "Qwen3-235B-A22B-Thinking-2507 (fast)",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 8000,
        "default_max_tokens": 800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-30B-A3B-Instruct-2507",
        "name": "Qwen3-30B-A3B-Instruct-2507",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-32B",
        "name": "Qwen3-32B",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 40960,
        "default_max_tokens": 4096,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-Next-80B-A3B-Thinking",
        "name": "Qwen3-Next-80B-A3B-Thinking",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3-Next-80B-A3B-Thinking-fast",
        "name": "Qwen3-Next-80B-A3B-Thinking (fast)",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 8000,
        "default_max_tokens": 800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3.5-397B-A17B",
        "name": "Qwen3.5-397B-A17B",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 3.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3.5-397B-A17B-fast",
        "name": "Qwen3.5-397B-A17B (fast)",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 3.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 8000,
        "default_max_tokens": 800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-oss-120b",
        "name": "gpt-oss-120b",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-oss-120b-fast",
        "name": "gpt-oss-120b (fast)",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 8000,
        "default_max_tokens": 800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      }
    ]
  },
  {
    "name": "Neuralwatt",
    "id": "neuralwatt",
    "api_key": "$NEURALWATT_API_KEY",
    "api_endpoint": "https://api.neuralwatt.com/v1",
    "type": "openai-compat",
    "default_large_model_id": "zai-org/GLM-5.1-FP8",
    "default_small_model_id": "mistralai/Devstral-Small-2-24B-Instruct-2512",
    "models": [
      {
        "id": "mistralai/Devstral-Small-2-24B-Instruct-2512",
        "name": "Devstral Small 2 24B Instruct 2512",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "zai-org/GLM-5.1-FP8",
        "name": "GLM 5.1 FP8",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 3.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 20275,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "glm-5.1-fast",
        "name": "GLM 5.1 Fast",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 3.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 20275,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "moonshotai/Kimi-K2.5",
        "name": "Kimi K2.5",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 2.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "kimi-k2.5-fast",
        "name": "Kimi K2.5 Fast",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 2.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "MiniMaxAI/MiniMax-M2.5",
        "name": "MiniMax M2.5",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 196608,
        "default_max_tokens": 19660,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "Qwen/Qwen3.5-397B-A17B-FP8",
        "name": "Qwen3.5 397B A17B FP8",
        "cost_per_1m_in": 0.7,
        "cost_per_1m_out": 4.1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen3.5-397b-fast",
        "name": "Qwen3.5 397B Fast",
        "cost_per_1m_in": 0.7,
        "cost_per_1m_out": 4.1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": false
      }
    ]
  },
  {
    "name": "OpenCode Go",
    "id": "opencode-go",
    "api_key": "$OPENCODE_API_KEY",
    "api_endpoint": "https://opencode.ai/zen/go/v1",
    "type": "openai-compat",
    "default_large_model_id": "minimax-m2.7",
    "default_small_model_id": "minimax-m2.7",
    "models": [
      {
        "id": "glm-5",
        "name": "GLM-5",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3.2,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "glm-5.1",
        "name": "GLM-5.1",
        "cost_per_1m_in": 1.4,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0.26,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "kimi-k2.5",
        "name": "Kimi K2.5",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0.1,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "kimi-k2.6",
        "name": "Kimi K2.6 (3x limits)",
        "cost_per_1m_in": 0.32,
        "cost_per_1m_out": 1.34,
        "cost_per_1m_in_cached": 0.05,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "mimo-v2-omni",
        "name": "MiMo V2 Omni",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0.08,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "mimo-v2-pro",
        "name": "MiMo V2 Pro",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 1048576,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "mimo-v2.5",
        "name": "MiMo V2.5",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0.08,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "mimo-v2.5-pro",
        "name": "MiMo V2.5 Pro",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 1048576,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "minimax-m2.5",
        "name": "MiniMax M2.5",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax-m2.7",
        "name": "MiniMax M2.7",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.06,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen3.5-plus",
        "name": "Qwen3.5 Plus",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.02,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3.6-plus",
        "name": "Qwen3.6 Plus",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0.05,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      }
    ]
  },
  {
    "name": "OpenCode Zen",
    "id": "opencode-zen",
    "api_key": "$OPENCODE_API_KEY",
    "api_endpoint": "https://opencode.ai/zen/v1",
    "type": "openai-compat",
    "default_large_model_id": "minimax-m2.5-free",
    "default_small_model_id": "minimax-m2.5-free",
    "models": [
      {
        "id": "big-pickle",
        "name": "Big Pickle",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "claude-3-5-haiku",
        "name": "Claude Haiku 3.5",
        "cost_per_1m_in": 0.8,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0.08,
        "cost_per_1m_out_cached": 1,
        "context_window": 200000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "claude-haiku-4-5",
        "name": "Claude Haiku 4.5",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 5,
        "cost_per_1m_in_cached": 0.1,
        "cost_per_1m_out_cached": 1.25,
        "context_window": 200000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-1",
        "name": "Claude Opus 4.1",
        "cost_per_1m_in": 15,
        "cost_per_1m_out": 75,
        "cost_per_1m_in_cached": 1.5,
        "cost_per_1m_out_cached": 18.75,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-5",
        "name": "Claude Opus 4.5",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 0.5,
        "cost_per_1m_out_cached": 6.25,
        "context_window": 200000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-6",
        "name": "Claude Opus 4.6",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 0.5,
        "cost_per_1m_out_cached": 6.25,
        "context_window": 1000000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-7",
        "name": "Claude Opus 4.7",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 0.5,
        "cost_per_1m_out_cached": 6.25,
        "context_window": 1000000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4",
        "name": "Claude Sonnet 4",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0.3,
        "cost_per_1m_out_cached": 3.75,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4-5",
        "name": "Claude Sonnet 4.5",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0.3,
        "cost_per_1m_out_cached": 3.75,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4-6",
        "name": "Claude Sonnet 4.6",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0.3,
        "cost_per_1m_out_cached": 3.75,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "glm-5",
        "name": "GLM-5",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3.2,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "glm-5.1",
        "name": "GLM-5.1",
        "cost_per_1m_in": 1.4,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0.26,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "gpt-5",
        "name": "GPT-5",
        "cost_per_1m_in": 1.07,
        "cost_per_1m_out": 8.5,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5-codex",
        "name": "GPT-5 Codex",
        "cost_per_1m_in": 1.07,
        "cost_per_1m_out": 8.5,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5-nano",
        "name": "GPT-5 Nano",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.1",
        "name": "GPT-5.1",
        "cost_per_1m_in": 1.07,
        "cost_per_1m_out": 8.5,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.1-codex",
        "name": "GPT-5.1 Codex",
        "cost_per_1m_in": 1.07,
        "cost_per_1m_out": 8.5,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.1-codex-max",
        "name": "GPT-5.1 Codex Max",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.13,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.1-codex-mini",
        "name": "GPT-5.1 Codex Mini",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.2",
        "name": "GPT-5.2",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0.18,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.2-codex",
        "name": "GPT-5.2 Codex",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0.18,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.3-codex",
        "name": "GPT-5.3 Codex",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0.18,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.3-codex-spark",
        "name": "GPT-5.3 Codex Spark",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0.18,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "gpt-5.4",
        "name": "GPT-5.4",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0.25,
        "cost_per_1m_out_cached": 0,
        "context_window": 1050000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.4-mini",
        "name": "GPT-5.4 Mini",
        "cost_per_1m_in": 0.75,
        "cost_per_1m_out": 4.5,
        "cost_per_1m_in_cached": 0.08,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.4-nano",
        "name": "GPT-5.4 Nano",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.25,
        "cost_per_1m_in_cached": 0.02,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gpt-5.4-pro",
        "name": "GPT-5.4 Pro",
        "cost_per_1m_in": 30,
        "cost_per_1m_out": 180,
        "cost_per_1m_in_cached": 30,
        "cost_per_1m_out_cached": 0,
        "context_window": 1050000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-3-flash",
        "name": "Gemini 3 Flash",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0.05,
        "cost_per_1m_out_cached": 0,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-3-pro",
        "name": "Gemini 3 Pro",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 12,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-3.1-pro",
        "name": "Gemini 3.1 Pro Preview",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 12,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "kimi-k2.5",
        "name": "Kimi K2.5",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0.08,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "kimi-k2.6",
        "name": "Kimi K2.6",
        "cost_per_1m_in": 0.95,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0.16,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "ling-2.6-flash-free",
        "name": "Ling 2.6 Flash Free",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262100,
        "default_max_tokens": 32800,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "minimax-m2.5",
        "name": "MiniMax M2.5",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.06,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax-m2.5-free",
        "name": "MiniMax M2.5 Free",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax-m2.7",
        "name": "MiniMax M2.7",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.06,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "nemotron-3-super-free",
        "name": "Nemotron 3 Super Free",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen3.5-plus",
        "name": "Qwen3.5 Plus",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.02,
        "cost_per_1m_out_cached": 0.25,
        "context_window": 262144,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3.6-plus",
        "name": "Qwen3.6 Plus",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0.05,
        "cost_per_1m_out_cached": 0.63,
        "context_window": 262144,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "trinity-large-preview-free",
        "name": "Trinity Large Preview",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 131072,
        "can_reason": false,
        "supports_attachments": false
      }
    ]
  },
  {
    "name": "OpenRouter",
    "id": "openrouter",
    "api_key": "$OPENROUTER_API_KEY",
    "api_endpoint": "https://openrouter.ai/api/v1",
    "type": "openrouter",
    "default_large_model_id": "anthropic/claude-sonnet-4",
    "default_small_model_id": "anthropic/claude-3.5-haiku",
    "models": [
      {
        "id": "ai21/jamba-large-1.7",
        "name": "AI21: Jamba Large 1.7",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 2048,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "allenai/olmo-3.1-32b-instruct",
        "name": "AllenAI: Olmo 3.1 32B Instruct",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 65536,
        "default_max_tokens": 6553,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "amazon/nova-2-lite-v1",
        "name": "Amazon: Nova 2 Lite",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 2.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 32767,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "amazon/nova-lite-v1",
        "name": "Amazon: Nova Lite 1.0",
        "cost_per_1m_in": 0.06,
        "cost_per_1m_out": 0.24,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 300000,
        "default_max_tokens": 2560,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "amazon/nova-micro-v1",
        "name": "Amazon: Nova Micro 1.0",
        "cost_per_1m_in": 0.035,
        "cost_per_1m_out": 0.14,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 2560,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "amazon/nova-premier-v1",
        "name": "Amazon: Nova Premier 1.0",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 12.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.625,
        "context_window": 1000000,
        "default_max_tokens": 16000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "amazon/nova-pro-v1",
        "name": "Amazon: Nova Pro 1.0",
        "cost_per_1m_in": 0.8,
        "cost_per_1m_out": 3.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 300000,
        "default_max_tokens": 2560,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-3-haiku",
        "name": "Anthropic: Claude 3 Haiku",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 1.25,
        "cost_per_1m_in_cached": 0.3,
        "cost_per_1m_out_cached": 0.03,
        "context_window": 200000,
        "default_max_tokens": 2048,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-3.5-haiku",
        "name": "Anthropic: Claude 3.5 Haiku",
        "cost_per_1m_in": 0.8,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 1,
        "cost_per_1m_out_cached": 0.08,
        "context_window": 200000,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-3.7-sonnet",
        "name": "Anthropic: Claude 3.7 Sonnet",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 3.75,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 200000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-3.7-sonnet:thinking",
        "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 3.75,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-haiku-4.5",
        "name": "Anthropic: Claude Haiku 4.5",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 5,
        "cost_per_1m_in_cached": 1.25,
        "cost_per_1m_out_cached": 0.1,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-opus-4",
        "name": "Anthropic: Claude Opus 4",
        "cost_per_1m_in": 15,
        "cost_per_1m_out": 75,
        "cost_per_1m_in_cached": 18.75,
        "cost_per_1m_out_cached": 1.5,
        "context_window": 200000,
        "default_max_tokens": 16000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-opus-4.1",
        "name": "Anthropic: Claude Opus 4.1",
        "cost_per_1m_in": 15,
        "cost_per_1m_out": 75,
        "cost_per_1m_in_cached": 18.75,
        "cost_per_1m_out_cached": 1.5,
        "context_window": 200000,
        "default_max_tokens": 16000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-opus-4.5",
        "name": "Anthropic: Claude Opus 4.5",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-opus-4.6",
        "name": "Anthropic: Claude Opus 4.6",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-opus-4.6-fast",
        "name": "Anthropic: Claude Opus 4.6 (Fast)",
        "cost_per_1m_in": 30,
        "cost_per_1m_out": 150,
        "cost_per_1m_in_cached": 37.5,
        "cost_per_1m_out_cached": 3,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-opus-4.7",
        "name": "Anthropic: Claude Opus 4.7",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-sonnet-4",
        "name": "Anthropic: Claude Sonnet 4",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 3.75,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 1000000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-sonnet-4.5",
        "name": "Anthropic: Claude Sonnet 4.5",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 3.75,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 1000000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-sonnet-4.6",
        "name": "Anthropic: Claude Sonnet 4.6",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 3.75,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "arcee-ai/trinity-large-preview",
        "name": "Arcee AI: Trinity Large Preview",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.45,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131000,
        "default_max_tokens": 13100,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "arcee-ai/trinity-large-thinking",
        "name": "Arcee AI: Trinity Large Thinking",
        "cost_per_1m_in": 0.22,
        "cost_per_1m_out": 0.85,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.06,
        "context_window": 262144,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "arcee-ai/trinity-mini",
        "name": "Arcee AI: Trinity Mini",
        "cost_per_1m_in": 0.045,
        "cost_per_1m_out": 0.15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "arcee-ai/virtuoso-large",
        "name": "Arcee AI: Virtuoso Large",
        "cost_per_1m_in": 0.75,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "baidu/ernie-4.5-21b-a3b",
        "name": "Baidu: ERNIE 4.5 21B A3B",
        "cost_per_1m_in": 0.07,
        "cost_per_1m_out": 0.28,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 120000,
        "default_max_tokens": 4000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "baidu/ernie-4.5-vl-28b-a3b",
        "name": "Baidu: ERNIE 4.5 VL 28B A3B",
        "cost_per_1m_in": 0.14,
        "cost_per_1m_out": 0.56,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 30000,
        "default_max_tokens": 4000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "bytedance-seed/seed-1.6",
        "name": "ByteDance Seed: Seed 1.6",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 16384,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "bytedance-seed/seed-1.6-flash",
        "name": "ByteDance Seed: Seed 1.6 Flash",
        "cost_per_1m_in": 0.075,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 16384,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "bytedance-seed/seed-2.0-lite",
        "name": "ByteDance Seed: Seed-2.0-Lite",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "bytedance-seed/seed-2.0-mini",
        "name": "ByteDance Seed: Seed-2.0-Mini",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "cohere/command-r-08-2024",
        "name": "Cohere: Command R (08-2024)",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 2000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "cohere/command-r-plus-08-2024",
        "name": "Cohere: Command R+ (08-2024)",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 2000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "deepseek/deepseek-chat",
        "name": "DeepSeek: DeepSeek V3",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 1.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 64000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "deepseek/deepseek-chat-v3-0324",
        "name": "DeepSeek: DeepSeek V3 0324",
        "cost_per_1m_in": 0.27,
        "cost_per_1m_out": 1.12,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.135,
        "context_window": 163840,
        "default_max_tokens": 81920,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "deepseek/deepseek-chat-v3.1",
        "name": "DeepSeek: DeepSeek V3.1",
        "cost_per_1m_in": 0.21,
        "cost_per_1m_out": 0.79,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.13,
        "context_window": 163840,
        "default_max_tokens": 16384,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek/deepseek-v3.1-terminus",
        "name": "DeepSeek: DeepSeek V3.1 Terminus",
        "cost_per_1m_in": 0.21,
        "cost_per_1m_out": 0.79,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.13,
        "context_window": 163840,
        "default_max_tokens": 16384,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek/deepseek-v3.2",
        "name": "DeepSeek: DeepSeek V3.2",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.25,
        "context_window": 163840,
        "default_max_tokens": 81920,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek/deepseek-v3.2-exp",
        "name": "DeepSeek: DeepSeek V3.2 Exp",
        "cost_per_1m_in": 0.27,
        "cost_per_1m_out": 0.41,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 163840,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek/deepseek-r1",
        "name": "DeepSeek: R1",
        "cost_per_1m_in": 0.7,
        "cost_per_1m_out": 2.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 64000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek/deepseek-r1-0528",
        "name": "DeepSeek: R1 0528",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 2.18,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 163840,
        "default_max_tokens": 81920,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "essentialai/rnj-1-instruct",
        "name": "EssentialAI: Rnj 1 Instruct",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 32768,
        "default_max_tokens": 3276,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "google/gemini-2.0-flash-001",
        "name": "Google: Gemini 2.0 Flash",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0.08333,
        "cost_per_1m_out_cached": 0.025,
        "context_window": 1048576,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "google/gemini-2.0-flash-lite-001",
        "name": "Google: Gemini 2.0 Flash Lite",
        "cost_per_1m_in": 0.075,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1048576,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "google/gemini-2.5-flash",
        "name": "Google: Gemini 2.5 Flash",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 2.5,
        "cost_per_1m_in_cached": 0.08333,
        "cost_per_1m_out_cached": 0.03,
        "context_window": 1048576,
        "default_max_tokens": 32767,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemini-2.5-flash-lite",
        "name": "Google: Gemini 2.5 Flash Lite",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0.08333,
        "cost_per_1m_out_cached": 0.01,
        "context_window": 1048576,
        "default_max_tokens": 32767,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemini-2.5-flash-lite-preview-09-2025",
        "name": "Google: Gemini 2.5 Flash Lite Preview 09-2025",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0.08333,
        "cost_per_1m_out_cached": 0.01,
        "context_window": 1048576,
        "default_max_tokens": 32767,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemini-2.5-pro",
        "name": "Google: Gemini 2.5 Pro",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.375,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 1048576,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemini-2.5-pro-preview-05-06",
        "name": "Google: Gemini 2.5 Pro Preview 05-06",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.375,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 1048576,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemini-2.5-pro-preview",
        "name": "Google: Gemini 2.5 Pro Preview 06-05",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.375,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 1048576,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemini-3-flash-preview",
        "name": "Google: Gemini 3 Flash Preview",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0.08333,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 1048576,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemini-3.1-flash-lite-preview",
        "name": "Google: Gemini 3.1 Flash Lite Preview",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0.08333,
        "cost_per_1m_out_cached": 0.025,
        "context_window": 1048576,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemini-3.1-pro-preview",
        "name": "Google: Gemini 3.1 Pro Preview",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 12,
        "cost_per_1m_in_cached": 0.375,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 1048576,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemini-3.1-pro-preview-customtools",
        "name": "Google: Gemini 3.1 Pro Preview Custom Tools",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 12,
        "cost_per_1m_in_cached": 0.375,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 1048576,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemma-4-26b-a4b-it",
        "name": "Google: Gemma 4 26B A4B ",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 262144,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemma-4-26b-a4b-it:free",
        "name": "Google: Gemma 4 26B A4B  (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 16384,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemma-4-31b-it",
        "name": "Google: Gemma 4 31B",
        "cost_per_1m_in": 0.13,
        "cost_per_1m_out": 0.38,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemma-4-31b-it:free",
        "name": "Google: Gemma 4 31B (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 16384,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "inception/mercury-2",
        "name": "Inception: Mercury 2",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 0.75,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.025,
        "context_window": 128000,
        "default_max_tokens": 25000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "kwaipilot/kat-coder-pro-v2",
        "name": "Kwaipilot: KAT-Coder-Pro V2",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.06,
        "context_window": 256000,
        "default_max_tokens": 40000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "meta-llama/llama-3.1-70b-instruct",
        "name": "Meta: Llama 3.1 70B Instruct",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "meta-llama/llama-3.3-70b-instruct",
        "name": "Meta: Llama 3.3 70B Instruct",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.6,
        "context_window": 131072,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "meta-llama/llama-3.3-70b-instruct:free",
        "name": "Meta: Llama 3.3 70B Instruct (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 65536,
        "default_max_tokens": 6553,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "meta-llama/llama-4-scout",
        "name": "Meta: Llama 4 Scout",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 0.7,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1310720,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "minimax/minimax-m2",
        "name": "MiniMax: MiniMax M2",
        "cost_per_1m_in": 0.255,
        "cost_per_1m_out": 1.02,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax/minimax-m2.1",
        "name": "MiniMax: MiniMax M2.1",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.03,
        "context_window": 204800,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax/minimax-m2.5",
        "name": "MiniMax: MiniMax M2.5",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.03,
        "context_window": 204800,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax/minimax-m2.5:free",
        "name": "MiniMax: MiniMax M2.5 (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 196608,
        "default_max_tokens": 4096,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax/minimax-m2.7",
        "name": "MiniMax: MiniMax M2.7",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.06,
        "context_window": 204800,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "mistralai/mistral-large",
        "name": "Mistral Large",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistralai/mistral-large-2407",
        "name": "Mistral Large 2407",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistralai/mistral-large-2411",
        "name": "Mistral Large 2411",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistralai/codestral-2508",
        "name": "Mistral: Codestral 2508",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 0.9,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.03,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistralai/devstral-2512",
        "name": "Mistral: Devstral 2 2512",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.04,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistralai/devstral-medium",
        "name": "Mistral: Devstral Medium",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.04,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistralai/devstral-small",
        "name": "Mistral: Devstral Small 1.1",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.01,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistralai/ministral-14b-2512",
        "name": "Mistral: Ministral 3 14B 2512",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.02,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "mistralai/ministral-3b-2512",
        "name": "Mistral: Ministral 3 3B 2512",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.01,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "mistralai/ministral-8b-2512",
        "name": "Mistral: Ministral 3 8B 2512",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.015,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "mistralai/mistral-large-2512",
        "name": "Mistral: Mistral Large 3 2512",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "mistralai/mistral-medium-3",
        "name": "Mistral: Mistral Medium 3",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.04,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "mistralai/mistral-medium-3.1",
        "name": "Mistral: Mistral Medium 3.1",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.04,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "mistralai/mistral-nemo",
        "name": "Mistral: Mistral Nemo",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.015,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistralai/mistral-small-3.2-24b-instruct",
        "name": "Mistral: Mistral Small 3.2 24B",
        "cost_per_1m_in": 0.09375,
        "cost_per_1m_out": 0.25,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "mistralai/mistral-small-2603",
        "name": "Mistral: Mistral Small 4",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.015,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "mistralai/mistral-small-creative",
        "name": "Mistral: Mistral Small Creative",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.01,
        "context_window": 32768,
        "default_max_tokens": 3276,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistralai/mixtral-8x22b-instruct",
        "name": "Mistral: Mixtral 8x22B Instruct",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 65536,
        "default_max_tokens": 6553,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistralai/mixtral-8x7b-instruct",
        "name": "Mistral: Mixtral 8x7B Instruct",
        "cost_per_1m_in": 0.54,
        "cost_per_1m_out": 0.54,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 32768,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistralai/pixtral-large-2411",
        "name": "Mistral: Pixtral Large 2411",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "mistralai/mistral-saba",
        "name": "Mistral: Saba",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.02,
        "context_window": 32768,
        "default_max_tokens": 3276,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistralai/voxtral-small-24b-2507",
        "name": "Mistral: Voxtral Small 24B 2507",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.01,
        "context_window": 32000,
        "default_max_tokens": 3200,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "moonshotai/kimi-k2",
        "name": "MoonshotAI: Kimi K2 0711",
        "cost_per_1m_in": 0.57,
        "cost_per_1m_out": 2.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "moonshotai/kimi-k2-0905",
        "name": "MoonshotAI: Kimi K2 0905",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.6,
        "context_window": 262144,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "moonshotai/kimi-k2-thinking",
        "name": "MoonshotAI: Kimi K2 Thinking",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.6,
        "context_window": 262144,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "moonshotai/kimi-k2.5",
        "name": "MoonshotAI: Kimi K2.5",
        "cost_per_1m_in": 0.45,
        "cost_per_1m_out": 2.25,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.07,
        "context_window": 262144,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "moonshotai/kimi-k2.6",
        "name": "MoonshotAI: Kimi K2.6",
        "cost_per_1m_in": 0.8,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 262144,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "nvidia/llama-3.1-nemotron-70b-instruct",
        "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
        "cost_per_1m_in": 1.2,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "nvidia/llama-3.3-nemotron-super-49b-v1.5",
        "name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "nvidia/nemotron-3-nano-30b-a3b",
        "name": "NVIDIA: Nemotron 3 Nano 30B A3B",
        "cost_per_1m_in": 0.05,
        "cost_per_1m_out": 0.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "nvidia/nemotron-3-nano-30b-a3b:free",
        "name": "NVIDIA: Nemotron 3 Nano 30B A3B (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "nvidia/nemotron-3-super-120b-a12b",
        "name": "NVIDIA: Nemotron 3 Super",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "nvidia/nemotron-3-super-120b-a12b:free",
        "name": "NVIDIA: Nemotron 3 Super (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "nvidia/nemotron-nano-12b-v2-vl:free",
        "name": "NVIDIA: Nemotron Nano 12B 2 VL (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "nvidia/nemotron-nano-9b-v2",
        "name": "NVIDIA: Nemotron Nano 9B V2",
        "cost_per_1m_in": 0.04,
        "cost_per_1m_out": 0.16,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "nvidia/nemotron-nano-9b-v2:free",
        "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "nex-agi/deepseek-v3.1-nex-n1",
        "name": "Nex AGI: DeepSeek V3.1 Nex N1",
        "cost_per_1m_in": 0.135,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 81920,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-audio",
        "name": "OpenAI: GPT Audio",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-audio-mini",
        "name": "OpenAI: GPT Audio Mini",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-4-turbo",
        "name": "OpenAI: GPT-4 Turbo",
        "cost_per_1m_in": 10,
        "cost_per_1m_out": 30,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 2048,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-4-1106-preview",
        "name": "OpenAI: GPT-4 Turbo (older v1106)",
        "cost_per_1m_in": 10,
        "cost_per_1m_out": 30,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 2048,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-4-turbo-preview",
        "name": "OpenAI: GPT-4 Turbo Preview",
        "cost_per_1m_in": 10,
        "cost_per_1m_out": 30,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 2048,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-4.1",
        "name": "OpenAI: GPT-4.1",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 1047576,
        "default_max_tokens": 104757,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-4.1-mini",
        "name": "OpenAI: GPT-4.1 Mini",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 1.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.1,
        "context_window": 1047576,
        "default_max_tokens": 104757,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-4.1-nano",
        "name": "OpenAI: GPT-4.1 Nano",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.03,
        "context_window": 1047576,
        "default_max_tokens": 104757,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-4o",
        "name": "OpenAI: GPT-4o",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-4o-2024-05-13",
        "name": "OpenAI: GPT-4o (2024-05-13)",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 2048,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-4o-2024-08-06",
        "name": "OpenAI: GPT-4o (2024-08-06)",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 1.25,
        "context_window": 128000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-4o-2024-11-20",
        "name": "OpenAI: GPT-4o (2024-11-20)",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 1.25,
        "context_window": 128000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-4o-audio-preview",
        "name": "OpenAI: GPT-4o Audio",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-4o-mini",
        "name": "OpenAI: GPT-4o-mini",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.075,
        "context_window": 128000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-4o-mini-2024-07-18",
        "name": "OpenAI: GPT-4o-mini (2024-07-18)",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.075,
        "context_window": 128000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5",
        "name": "OpenAI: GPT-5",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 400000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5-codex",
        "name": "OpenAI: GPT-5 Codex",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 400000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5-mini",
        "name": "OpenAI: GPT-5 Mini",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.025,
        "context_window": 400000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5-nano",
        "name": "OpenAI: GPT-5 Nano",
        "cost_per_1m_in": 0.05,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.01,
        "context_window": 400000,
        "default_max_tokens": 40000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5-pro",
        "name": "OpenAI: GPT-5 Pro",
        "cost_per_1m_in": 15,
        "cost_per_1m_out": 120,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.1",
        "name": "OpenAI: GPT-5.1",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.13,
        "context_window": 400000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.1-chat",
        "name": "OpenAI: GPT-5.1 Chat",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 128000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.1-codex",
        "name": "OpenAI: GPT-5.1-Codex",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 400000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.1-codex-max",
        "name": "OpenAI: GPT-5.1-Codex-Max",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 400000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.1-codex-mini",
        "name": "OpenAI: GPT-5.1-Codex-Mini",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.025,
        "context_window": 400000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.2",
        "name": "OpenAI: GPT-5.2",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.175,
        "context_window": 400000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.2-chat",
        "name": "OpenAI: GPT-5.2 Chat",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.175,
        "context_window": 128000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.2-pro",
        "name": "OpenAI: GPT-5.2 Pro",
        "cost_per_1m_in": 21,
        "cost_per_1m_out": 168,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.2-codex",
        "name": "OpenAI: GPT-5.2-Codex",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.175,
        "context_window": 400000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.3-chat",
        "name": "OpenAI: GPT-5.3 Chat",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.175,
        "context_window": 128000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.3-codex",
        "name": "OpenAI: GPT-5.3-Codex",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.175,
        "context_window": 400000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.4",
        "name": "OpenAI: GPT-5.4",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.25,
        "context_window": 1050000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.4-mini",
        "name": "OpenAI: GPT-5.4 Mini",
        "cost_per_1m_in": 0.75,
        "cost_per_1m_out": 4.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.075,
        "context_window": 400000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.4-nano",
        "name": "OpenAI: GPT-5.4 Nano",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.25,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.02,
        "context_window": 400000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.4-pro",
        "name": "OpenAI: GPT-5.4 Pro",
        "cost_per_1m_in": 30,
        "cost_per_1m_out": 180,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1050000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-oss-120b",
        "name": "OpenAI: gpt-oss-120b",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-oss-120b:free",
        "name": "OpenAI: gpt-oss-120b (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-oss-20b",
        "name": "OpenAI: gpt-oss-20b",
        "cost_per_1m_in": 0.03,
        "cost_per_1m_out": 0.14,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-oss-20b:free",
        "name": "OpenAI: gpt-oss-20b (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 4096,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-oss-safeguard-20b",
        "name": "OpenAI: gpt-oss-safeguard-20b",
        "cost_per_1m_in": 0.075,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.037,
        "context_window": 131072,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "openai/o1",
        "name": "OpenAI: o1",
        "cost_per_1m_in": 15,
        "cost_per_1m_out": 60,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 7.5,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/o3",
        "name": "OpenAI: o3",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/o3-deep-research",
        "name": "OpenAI: o3 Deep Research",
        "cost_per_1m_in": 10,
        "cost_per_1m_out": 40,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 2.5,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/o3-mini",
        "name": "OpenAI: o3 Mini",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.55,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "openai/o3-mini-high",
        "name": "OpenAI: o3 Mini High",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.55,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "openai/o3-pro",
        "name": "OpenAI: o3 Pro",
        "cost_per_1m_in": 20,
        "cost_per_1m_out": 80,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/o4-mini",
        "name": "OpenAI: o4 Mini",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.275,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/o4-mini-deep-research",
        "name": "OpenAI: o4 Mini Deep Research",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/o4-mini-high",
        "name": "OpenAI: o4 Mini High",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.275,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "prime-intellect/intellect-3",
        "name": "Prime Intellect: INTELLECT-3",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen-2.5-72b-instruct",
        "name": "Qwen2.5 72B Instruct",
        "cost_per_1m_in": 0.12,
        "cost_per_1m_out": 0.39,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 32768,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwq-32b",
        "name": "Qwen: QwQ 32B",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.58,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen-plus-2025-07-28",
        "name": "Qwen: Qwen Plus 0728",
        "cost_per_1m_in": 0.26,
        "cost_per_1m_out": 0.78,
        "cost_per_1m_in_cached": 0.325,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen-plus-2025-07-28:thinking",
        "name": "Qwen: Qwen Plus 0728 (thinking)",
        "cost_per_1m_in": 0.26,
        "cost_per_1m_out": 0.78,
        "cost_per_1m_in_cached": 0.325,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 16384,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen-vl-max",
        "name": "Qwen: Qwen VL Max",
        "cost_per_1m_in": 0.52,
        "cost_per_1m_out": 2.08,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "qwen/qwen-max",
        "name": "Qwen: Qwen-Max ",
        "cost_per_1m_in": 1.04,
        "cost_per_1m_out": 4.16,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.208,
        "context_window": 32768,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen-plus",
        "name": "Qwen: Qwen-Plus",
        "cost_per_1m_in": 0.26,
        "cost_per_1m_out": 0.78,
        "cost_per_1m_in_cached": 0.325,
        "cost_per_1m_out_cached": 0.052,
        "context_window": 1000000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen-turbo",
        "name": "Qwen: Qwen-Turbo",
        "cost_per_1m_in": 0.0325,
        "cost_per_1m_out": 0.13,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.0065,
        "context_window": 131072,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen-2.5-7b-instruct",
        "name": "Qwen: Qwen2.5 7B Instruct",
        "cost_per_1m_in": 0.04,
        "cost_per_1m_out": 0.1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.04,
        "context_window": 32768,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-14b",
        "name": "Qwen: Qwen3 14B",
        "cost_per_1m_in": 0.12,
        "cost_per_1m_out": 0.24,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 40960,
        "default_max_tokens": 20480,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-235b-a22b",
        "name": "Qwen: Qwen3 235B A22B",
        "cost_per_1m_in": 0.455,
        "cost_per_1m_out": 1.82,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 4096,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-235b-a22b-2507",
        "name": "Qwen: Qwen3 235B A22B Instruct 2507",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.1,
        "context_window": 262144,
        "default_max_tokens": 131072,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-235b-a22b-thinking-2507",
        "name": "Qwen: Qwen3 235B A22B Thinking 2507",
        "cost_per_1m_in": 0.1495,
        "cost_per_1m_out": 1.495,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-30b-a3b",
        "name": "Qwen: Qwen3 30B A3B",
        "cost_per_1m_in": 0.13,
        "cost_per_1m_out": 0.52,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 4096,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-30b-a3b-instruct-2507",
        "name": "Qwen: Qwen3 30B A3B Instruct 2507",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.1,
        "context_window": 262144,
        "default_max_tokens": 131072,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-30b-a3b-thinking-2507",
        "name": "Qwen: Qwen3 30B A3B Thinking 2507",
        "cost_per_1m_in": 0.08,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.08,
        "context_window": 131072,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-32b",
        "name": "Qwen: Qwen3 32B",
        "cost_per_1m_in": 0.104,
        "cost_per_1m_out": 0.416,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 4096,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-8b",
        "name": "Qwen: Qwen3 8B",
        "cost_per_1m_in": 0.117,
        "cost_per_1m_out": 0.455,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 4096,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-coder-30b-a3b-instruct",
        "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
        "cost_per_1m_in": 0.07,
        "cost_per_1m_out": 0.27,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 160000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-coder",
        "name": "Qwen: Qwen3 Coder 480B A35B",
        "cost_per_1m_in": 0.22,
        "cost_per_1m_out": 1.8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 32768,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-coder:free",
        "name": "Qwen: Qwen3 Coder 480B A35B (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262000,
        "default_max_tokens": 131000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-coder-flash",
        "name": "Qwen: Qwen3 Coder Flash",
        "cost_per_1m_in": 0.195,
        "cost_per_1m_out": 0.975,
        "cost_per_1m_in_cached": 0.24375,
        "cost_per_1m_out_cached": 0.039,
        "context_window": 1000000,
        "default_max_tokens": 32768,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-coder-next",
        "name": "Qwen: Qwen3 Coder Next",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.11,
        "context_window": 262144,
        "default_max_tokens": 131072,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-coder-plus",
        "name": "Qwen: Qwen3 Coder Plus",
        "cost_per_1m_in": 0.65,
        "cost_per_1m_out": 3.25,
        "cost_per_1m_in_cached": 0.8125,
        "cost_per_1m_out_cached": 0.13,
        "context_window": 1000000,
        "default_max_tokens": 32768,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-max",
        "name": "Qwen: Qwen3 Max",
        "cost_per_1m_in": 0.78,
        "cost_per_1m_out": 3.9,
        "cost_per_1m_in_cached": 0.975,
        "cost_per_1m_out_cached": 0.156,
        "context_window": 262144,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-max-thinking",
        "name": "Qwen: Qwen3 Max Thinking",
        "cost_per_1m_in": 0.78,
        "cost_per_1m_out": 3.9,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 16384,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-next-80b-a3b-instruct",
        "name": "Qwen: Qwen3 Next 80B A3B Instruct",
        "cost_per_1m_in": 0.09,
        "cost_per_1m_out": 1.1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-next-80b-a3b-instruct:free",
        "name": "Qwen: Qwen3 Next 80B A3B Instruct (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-next-80b-a3b-thinking",
        "name": "Qwen: Qwen3 Next 80B A3B Thinking",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 16384,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "qwen/qwen3-vl-235b-a22b-instruct",
        "name": "Qwen: Qwen3 VL 235B A22B Instruct",
        "cost_per_1m_in": 0.26,
        "cost_per_1m_out": 1.04,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "qwen/qwen3-vl-235b-a22b-thinking",
        "name": "Qwen: Qwen3 VL 235B A22B Thinking",
        "cost_per_1m_in": 0.26,
        "cost_per_1m_out": 2.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 16384,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen/qwen3-vl-30b-a3b-instruct",
        "name": "Qwen: Qwen3 VL 30B A3B Instruct",
        "cost_per_1m_in": 0.29,
        "cost_per_1m_out": 1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 131072,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "qwen/qwen3-vl-30b-a3b-thinking",
        "name": "Qwen: Qwen3 VL 30B A3B Thinking",
        "cost_per_1m_in": 0.13,
        "cost_per_1m_out": 1.56,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 16384,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen/qwen3-vl-32b-instruct",
        "name": "Qwen: Qwen3 VL 32B Instruct",
        "cost_per_1m_in": 0.104,
        "cost_per_1m_out": 0.416,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "qwen/qwen3-vl-8b-instruct",
        "name": "Qwen: Qwen3 VL 8B Instruct",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 0.75,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.12,
        "context_window": 262144,
        "default_max_tokens": 131072,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "qwen/qwen3-vl-8b-thinking",
        "name": "Qwen: Qwen3 VL 8B Thinking",
        "cost_per_1m_in": 0.117,
        "cost_per_1m_out": 1.365,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 16384,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen/qwen3.5-397b-a17b",
        "name": "Qwen: Qwen3.5 397B A17B",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 3.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 262144,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen/qwen3.5-plus-02-15",
        "name": "Qwen: Qwen3.5 Plus 2026-02-15",
        "cost_per_1m_in": 0.26,
        "cost_per_1m_out": 1.56,
        "cost_per_1m_in_cached": 0.325,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen/qwen3.5-122b-a10b",
        "name": "Qwen: Qwen3.5-122B-A10B",
        "cost_per_1m_in": 0.26,
        "cost_per_1m_out": 2.08,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen/qwen3.5-27b",
        "name": "Qwen: Qwen3.5-27B",
        "cost_per_1m_in": 0.27,
        "cost_per_1m_out": 2.16,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.27,
        "context_window": 262144,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen/qwen3.5-35b-a3b",
        "name": "Qwen: Qwen3.5-35B-A3B",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.1,
        "context_window": 262144,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen/qwen3.5-9b",
        "name": "Qwen: Qwen3.5-9B",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 26214,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen/qwen3.5-flash-02-23",
        "name": "Qwen: Qwen3.5-Flash",
        "cost_per_1m_in": 0.065,
        "cost_per_1m_out": 0.26,
        "cost_per_1m_in_cached": 0.08125,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen/qwen3.6-plus",
        "name": "Qwen: Qwen3.6 Plus",
        "cost_per_1m_in": 0.325,
        "cost_per_1m_out": 1.95,
        "cost_per_1m_in_cached": 0.40625,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "relace/relace-search",
        "name": "Relace: Relace Search",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 64000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "stepfun/step-3.5-flash",
        "name": "StepFun: Step 3.5 Flash",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "tngtech/deepseek-r1t2-chimera",
        "name": "TNG: DeepSeek R1T2 Chimera",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.15,
        "context_window": 163840,
        "default_max_tokens": 81920,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "thedrummer/rocinante-12b",
        "name": "TheDrummer: Rocinante 12B",
        "cost_per_1m_in": 0.17,
        "cost_per_1m_out": 0.43,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 32768,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "thedrummer/unslopnemo-12b",
        "name": "TheDrummer: UnslopNemo 12B",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 32768,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "alibaba/tongyi-deepresearch-30b-a3b",
        "name": "Tongyi DeepResearch 30B A3B",
        "cost_per_1m_in": 0.09,
        "cost_per_1m_out": 0.45,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.09,
        "context_window": 131072,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "upstage/solar-pro-3",
        "name": "Upstage: Solar Pro 3",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.015,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "xiaomi/mimo-v2-flash",
        "name": "Xiaomi: MiMo-V2-Flash",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.02,
        "context_window": 262144,
        "default_max_tokens": 16000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "xiaomi/mimo-v2-omni",
        "name": "Xiaomi: MiMo-V2-Omni",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.08,
        "context_window": 262144,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "xiaomi/mimo-v2-pro",
        "name": "Xiaomi: MiMo-V2-Pro",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "xiaomi/mimo-v2.5",
        "name": "Xiaomi: MiMo-V2.5",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.08,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "xiaomi/mimo-v2.5-pro",
        "name": "Xiaomi: MiMo-V2.5-Pro",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 1048576,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "z-ai/glm-4-32b",
        "name": "Z.ai: GLM 4 32B ",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 12800,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "z-ai/glm-4.5",
        "name": "Z.ai: GLM 4.5",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.11,
        "context_window": 131072,
        "default_max_tokens": 48000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "z-ai/glm-4.5-air",
        "name": "Z.ai: GLM 4.5 Air",
        "cost_per_1m_in": 0.13,
        "cost_per_1m_out": 0.85,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.025,
        "context_window": 131072,
        "default_max_tokens": 49152,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "z-ai/glm-4.5-air:free",
        "name": "Z.ai: GLM 4.5 Air (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 48000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "z-ai/glm-4.5v",
        "name": "Z.ai: GLM 4.5V",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 1.8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.11,
        "context_window": 65536,
        "default_max_tokens": 8192,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "z-ai/glm-4.6",
        "name": "Z.ai: GLM 4.6",
        "cost_per_1m_in": 0.39,
        "cost_per_1m_out": 1.9,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 102400,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "z-ai/glm-4.6v",
        "name": "Z.ai: GLM 4.6V",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 0.9,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "z-ai/glm-4.7",
        "name": "Z.ai: GLM 4.7",
        "cost_per_1m_in": 0.38,
        "cost_per_1m_out": 1.74,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 202752,
        "default_max_tokens": 20275,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "z-ai/glm-4.7-flash",
        "name": "Z.ai: GLM 4.7 Flash",
        "cost_per_1m_in": 0.06,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.01,
        "context_window": 202752,
        "default_max_tokens": 20275,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "z-ai/glm-5",
        "name": "Z.ai: GLM 5",
        "cost_per_1m_in": 0.95,
        "cost_per_1m_out": 2.55,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 204800,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "z-ai/glm-5-turbo",
        "name": "Z.ai: GLM 5 Turbo",
        "cost_per_1m_in": 1.2,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.24,
        "context_window": 262144,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "z-ai/glm-5.1",
        "name": "Z.ai: GLM 5.1",
        "cost_per_1m_in": 1.4,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.26,
        "context_window": 204800,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "z-ai/glm-5v-turbo",
        "name": "Z.ai: GLM 5V Turbo",
        "cost_per_1m_in": 1.2,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.24,
        "context_window": 202752,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "inclusionai/ling-2.6-flash:free",
        "name": "inclusionAI: Ling-2.6-flash (free)",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "x-ai/grok-3",
        "name": "xAI: Grok 3",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 1.25,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "x-ai/grok-3-beta",
        "name": "xAI: Grok 3 Beta",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 1.25,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "x-ai/grok-3-mini",
        "name": "xAI: Grok 3 Mini",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.075,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "x-ai/grok-3-mini-beta",
        "name": "xAI: Grok 3 Mini Beta",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.075,
        "context_window": 131072,
        "default_max_tokens": 13107,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "x-ai/grok-4",
        "name": "xAI: Grok 4",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.75,
        "context_window": 256000,
        "default_max_tokens": 25600,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "x-ai/grok-4-fast",
        "name": "xAI: Grok 4 Fast",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 2000000,
        "default_max_tokens": 15000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "x-ai/grok-4.1-fast",
        "name": "xAI: Grok 4.1 Fast",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 2000000,
        "default_max_tokens": 15000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "x-ai/grok-4.20",
        "name": "xAI: Grok 4.20",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 2000000,
        "default_max_tokens": 200000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "x-ai/grok-code-fast-1",
        "name": "xAI: Grok Code Fast 1",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.02,
        "context_window": 256000,
        "default_max_tokens": 5000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      }
    ],
    "default_headers": {
      "HTTP-Referer": "https://charm.land",
      "X-Title": "Crush"
    }
  },
  {
    "name": "QiniuCloud",
    "id": "qiniucloud",
    "api_key": "$QINIUCLOUD_API_KEY",
    "api_endpoint": "https://api.qnaigc.com/v1",
    "type": "openai-compat",
    "default_large_model_id": "minimax/minimax-m2.5",
    "default_small_model_id": "glm-4.5",
    "models": [
      {
        "id": "minimax/minimax-m2.5",
        "name": "Minimax/Minimax-M2.5",
        "cost_per_1m_in": 0.29,
        "cost_per_1m_out": 1.17,
        "cost_per_1m_in_cached": 0.29,
        "cost_per_1m_out_cached": 1.17,
        "context_window": 204800,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "z-ai/glm-5",
        "name": "Z-Ai/GLM 5",
        "cost_per_1m_in": 0.56,
        "cost_per_1m_out": 2.5,
        "cost_per_1m_in_cached": 0.56,
        "cost_per_1m_out_cached": 2.5,
        "context_window": 200000,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "minimax/minimax-m2.1",
        "name": "Minimax/Minimax-M2.1",
        "cost_per_1m_in": 0.29,
        "cost_per_1m_out": 1.17,
        "cost_per_1m_in_cached": 0.29,
        "cost_per_1m_out_cached": 1.17,
        "context_window": 204800,
        "default_max_tokens": 4096,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "moonshotai/kimi-k2-thinking",
        "name": "Kimi K2 Thinking",
        "cost_per_1m_in": 0.56,
        "cost_per_1m_out": 2.22,
        "cost_per_1m_in_cached": 0.56,
        "cost_per_1m_out_cached": 2.22,
        "context_window": 256000,
        "default_max_tokens": 100000,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "z-ai/glm-4.7",
        "name": "Z-Ai/GLM 4.7",
        "cost_per_1m_in": 0.44,
        "cost_per_1m_out": 1.74,
        "cost_per_1m_in_cached": 0.44,
        "cost_per_1m_out_cached": 1.74,
        "context_window": 200000,
        "default_max_tokens": 4096,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "minimax/minimax-m2",
        "name": "Minimax/Minimax-M2",
        "cost_per_1m_in": 0.29,
        "cost_per_1m_out": 1.17,
        "cost_per_1m_in_cached": 0.29,
        "cost_per_1m_out_cached": 1.17,
        "context_window": 200000,
        "default_max_tokens": 4096,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "z-ai/glm-4.6",
        "name": "Z-AI/GLM 4.6",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 1.75,
        "cost_per_1m_in_cached": 1,
        "cost_per_1m_out_cached": 1.75,
        "context_window": 200000,
        "default_max_tokens": 4096,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "deepseek/deepseek-v3.1-terminus",
        "name": "DeepSeek/DeepSeek-V3.1-Terminus",
        "cost_per_1m_in": 0.56,
        "cost_per_1m_out": 1.67,
        "cost_per_1m_in_cached": 0.56,
        "cost_per_1m_out_cached": 1.67,
        "context_window": 128000,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "deepseek-v3.1",
        "name": "DeepSeek-V3.1",
        "cost_per_1m_in": 0.56,
        "cost_per_1m_out": 1.67,
        "cost_per_1m_in_cached": 0.56,
        "cost_per_1m_out_cached": 1.67,
        "context_window": 128000,
        "default_max_tokens": 32000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "doubao-seed-2.0-pro",
        "name": "Doubao Seed 2.0 Pro",
        "cost_per_1m_in": 0.44,
        "cost_per_1m_out": 2.22,
        "cost_per_1m_in_cached": 0.44,
        "cost_per_1m_out_cached": 2.22,
        "context_window": 256000,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "doubao-seed-2.0-code",
        "name": "Doubao Seed 2.0 Code",
        "cost_per_1m_in": 0.44,
        "cost_per_1m_out": 2.22,
        "cost_per_1m_in_cached": 0.44,
        "cost_per_1m_out_cached": 2.22,
        "context_window": 256000,
        "default_max_tokens": 128000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "qwen3-coder-480b-a35b-instruct",
        "name": "Qwen3 Coder 480B A35B Instruct",
        "cost_per_1m_in": 0.83,
        "cost_per_1m_out": 3.33,
        "cost_per_1m_in_cached": 0.83,
        "cost_per_1m_out_cached": 3.33,
        "context_window": 262000,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "glm-4.5",
        "name": "GLM 4.5",
        "cost_per_1m_in": 0.56,
        "cost_per_1m_out": 2.22,
        "cost_per_1m_in_cached": 0.56,
        "cost_per_1m_out_cached": 2.22,
        "context_window": 131072,
        "default_max_tokens": 98304,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "moonshotai/kimi-k2-0905",
        "name": "Kimi K2 0905",
        "cost_per_1m_in": 0.56,
        "cost_per_1m_out": 2.22,
        "cost_per_1m_in_cached": 0.56,
        "cost_per_1m_out_cached": 2.22,
        "context_window": 256000,
        "default_max_tokens": 100000,
        "can_reason": true,
        "supports_attachments": false
      }
    ]
  },
  {
    "name": "Vercel",
    "id": "vercel",
    "api_key": "$VERCEL_API_KEY",
    "api_endpoint": "https://ai-gateway.vercel.sh/v1",
    "type": "vercel",
    "default_large_model_id": "anthropic/claude-sonnet-4",
    "default_small_model_id": "anthropic/claude-haiku-4.5",
    "models": [
      {
        "id": "anthropic/claude-3-haiku",
        "name": "Claude 3 Haiku",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 1.25,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 200000,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-3.5-haiku",
        "name": "Claude 3.5 Haiku",
        "cost_per_1m_in": 0.8,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0.08,
        "cost_per_1m_out_cached": 1,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-3.7-sonnet",
        "name": "Claude 3.7 Sonnet",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0.3,
        "cost_per_1m_out_cached": 3.75,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "none",
          "minimal",
          "low",
          "medium",
          "high",
          "xhigh"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-haiku-4.5",
        "name": "Claude Haiku 4.5",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 5,
        "cost_per_1m_in_cached": 0.1,
        "cost_per_1m_out_cached": 1.25,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "none",
          "minimal",
          "low",
          "medium",
          "high",
          "xhigh"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-opus-4",
        "name": "Claude Opus 4",
        "cost_per_1m_in": 15,
        "cost_per_1m_out": 75,
        "cost_per_1m_in_cached": 1.5,
        "cost_per_1m_out_cached": 18.75,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "none",
          "minimal",
          "low",
          "medium",
          "high",
          "xhigh"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-opus-4.1",
        "name": "Claude Opus 4.1",
        "cost_per_1m_in": 15,
        "cost_per_1m_out": 75,
        "cost_per_1m_in_cached": 1.5,
        "cost_per_1m_out_cached": 18.75,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "none",
          "minimal",
          "low",
          "medium",
          "high",
          "xhigh"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-opus-4.5",
        "name": "Claude Opus 4.5",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 0.5,
        "cost_per_1m_out_cached": 6.25,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "none",
          "minimal",
          "low",
          "medium",
          "high",
          "xhigh"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-opus-4.6",
        "name": "Claude Opus 4.6",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 0.5,
        "cost_per_1m_out_cached": 6.25,
        "context_window": 1000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "none",
          "minimal",
          "low",
          "medium",
          "high",
          "xhigh"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-opus-4.7",
        "name": "Claude Opus 4.7",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 0.5,
        "cost_per_1m_out_cached": 6.25,
        "context_window": 1000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "none",
          "minimal",
          "low",
          "medium",
          "high",
          "xhigh"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-sonnet-4",
        "name": "Claude Sonnet 4",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0.3,
        "cost_per_1m_out_cached": 3.75,
        "context_window": 1000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "none",
          "minimal",
          "low",
          "medium",
          "high",
          "xhigh"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-sonnet-4.5",
        "name": "Claude Sonnet 4.5",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0.3,
        "cost_per_1m_out_cached": 3.75,
        "context_window": 1000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "none",
          "minimal",
          "low",
          "medium",
          "high",
          "xhigh"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "anthropic/claude-sonnet-4.6",
        "name": "Claude Sonnet 4.6",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0.3,
        "cost_per_1m_out_cached": 3.75,
        "context_window": 1000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "none",
          "minimal",
          "low",
          "medium",
          "high",
          "xhigh"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "cohere/command-a",
        "name": "Command A",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "deepseek/deepseek-v3",
        "name": "DeepSeek V3 0324",
        "cost_per_1m_in": 0.77,
        "cost_per_1m_out": 0.77,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 163840,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "deepseek/deepseek-v3.1-terminus",
        "name": "DeepSeek V3.1 Terminus",
        "cost_per_1m_in": 0.27,
        "cost_per_1m_out": 1,
        "cost_per_1m_in_cached": 0.135,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek/deepseek-v3.2",
        "name": "DeepSeek V3.2",
        "cost_per_1m_in": 0.28,
        "cost_per_1m_out": 0.42,
        "cost_per_1m_in_cached": 0.028,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "deepseek/deepseek-v3.2-thinking",
        "name": "DeepSeek V3.2 Thinking",
        "cost_per_1m_in": 0.28,
        "cost_per_1m_out": 0.42,
        "cost_per_1m_in_cached": 0.028,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek/deepseek-r1",
        "name": "DeepSeek-R1",
        "cost_per_1m_in": 1.35,
        "cost_per_1m_out": 5.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "deepseek/deepseek-v3.1",
        "name": "DeepSeek-V3.1",
        "cost_per_1m_in": 0.56,
        "cost_per_1m_out": 1.68,
        "cost_per_1m_in_cached": 0.28,
        "cost_per_1m_out_cached": 0,
        "context_window": 163840,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "mistral/devstral-2",
        "name": "Devstral 2",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistral/devstral-small",
        "name": "Devstral Small 1.1",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistral/devstral-small-2",
        "name": "Devstral Small 2",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "zai/glm-4.5-air",
        "name": "GLM 4.5 Air",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.1,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "zai/glm-4.5v",
        "name": "GLM 4.5V",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 1.8,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 66000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "zai/glm-4.6",
        "name": "GLM 4.6",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.2,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "zai/glm-4.7",
        "name": "GLM 4.7",
        "cost_per_1m_in": 2.25,
        "cost_per_1m_out": 2.75,
        "cost_per_1m_in_cached": 2.25,
        "cost_per_1m_out_cached": 0,
        "context_window": 131000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "zai/glm-4.7-flash",
        "name": "GLM 4.7 Flash",
        "cost_per_1m_in": 0.07,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "zai/glm-4.7-flashx",
        "name": "GLM 4.7 FlashX",
        "cost_per_1m_in": 0.06,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0.01,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "zai/glm-5",
        "name": "GLM 5",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3.2,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 202800,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "zai/glm-5-turbo",
        "name": "GLM 5 Turbo",
        "cost_per_1m_in": 1.2,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0.24,
        "cost_per_1m_out_cached": 0,
        "context_window": 202800,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "zai/glm-5.1",
        "name": "GLM 5.1",
        "cost_per_1m_in": 1.4,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0.26,
        "cost_per_1m_out_cached": 0,
        "context_window": 202800,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "zai/glm-5v-turbo",
        "name": "GLM 5V Turbo",
        "cost_per_1m_in": 1.2,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0.24,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "zai/glm-4.5",
        "name": "GLM-4.5",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.2,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "zai/glm-4.6v",
        "name": "GLM-4.6V",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 0.9,
        "cost_per_1m_in_cached": 0.05,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "zai/glm-4.6v-flash",
        "name": "GLM-4.6V-Flash",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5-chat",
        "name": "GPT 5 Chat",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.125,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.1-codex-max",
        "name": "GPT 5.1 Codex Max",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.125,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.1-codex-mini",
        "name": "GPT 5.1 Codex Mini",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0.025,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.1-thinking",
        "name": "GPT 5.1 Thinking",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.125,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.2",
        "name": "GPT 5.2",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0.175,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.2-pro",
        "name": "GPT 5.2 ",
        "cost_per_1m_in": 21,
        "cost_per_1m_out": 168,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.2-chat",
        "name": "GPT 5.2 Chat",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0.175,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.2-codex",
        "name": "GPT 5.2 Codex",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0.175,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.3-codex",
        "name": "GPT 5.3 Codex",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0.175,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.4",
        "name": "GPT 5.4",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0.25,
        "cost_per_1m_out_cached": 0,
        "context_window": 1050000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.4-mini",
        "name": "GPT 5.4 Mini",
        "cost_per_1m_in": 0.75,
        "cost_per_1m_out": 4.5,
        "cost_per_1m_in_cached": 0.075,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.4-nano",
        "name": "GPT 5.4 Nano",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.25,
        "cost_per_1m_in_cached": 0.02,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.4-pro",
        "name": "GPT 5.4 Pro",
        "cost_per_1m_in": 30,
        "cost_per_1m_out": 180,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1050000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-oss-20b",
        "name": "GPT OSS 120B",
        "cost_per_1m_in": 0.05,
        "cost_per_1m_out": 0.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-oss-safeguard-20b",
        "name": "GPT OSS Safeguard 20B",
        "cost_per_1m_in": 0.075,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0.037,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-4-turbo",
        "name": "GPT-4 Turbo",
        "cost_per_1m_in": 10,
        "cost_per_1m_out": 30,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-4.1",
        "name": "GPT-4.1",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 8,
        "cost_per_1m_in_cached": 0.5,
        "cost_per_1m_out_cached": 0,
        "context_window": 1047576,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-4.1-mini",
        "name": "GPT-4.1 mini",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 1.6,
        "cost_per_1m_in_cached": 0.1,
        "cost_per_1m_out_cached": 0,
        "context_window": 1047576,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-4.1-nano",
        "name": "GPT-4.1 nano",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0.025,
        "cost_per_1m_out_cached": 0,
        "context_window": 1047576,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-4o",
        "name": "GPT-4o",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 1.25,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-4o-mini",
        "name": "GPT-4o mini",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0.075,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5",
        "name": "GPT-5",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.125,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5-mini",
        "name": "GPT-5 mini",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0.025,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5-nano",
        "name": "GPT-5 nano",
        "cost_per_1m_in": 0.05,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0.005,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5-pro",
        "name": "GPT-5 pro",
        "cost_per_1m_in": 15,
        "cost_per_1m_out": 120,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5-codex",
        "name": "GPT-5-Codex",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.125,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "openai/gpt-5.1-instant",
        "name": "GPT-5.1 Instant",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.125,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.1-codex",
        "name": "GPT-5.1-Codex",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.125,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/gpt-5.3-chat",
        "name": "GPT-5.3 Chat",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 14,
        "cost_per_1m_in_cached": 0.175,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemini-2.0-flash",
        "name": "Gemini 2.0 Flash",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0.025,
        "cost_per_1m_out_cached": 0,
        "context_window": 1048576,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "google/gemini-2.0-flash-lite",
        "name": "Gemini 2.0 Flash Lite",
        "cost_per_1m_in": 0.075,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0.02,
        "cost_per_1m_out_cached": 0,
        "context_window": 1048576,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "google/gemini-2.5-flash",
        "name": "Gemini 2.5 Flash",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 2.5,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemini-2.5-flash-lite",
        "name": "Gemini 2.5 Flash Lite",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0.01,
        "cost_per_1m_out_cached": 0,
        "context_window": 1048576,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemini-2.5-pro",
        "name": "Gemini 2.5 Pro",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0.125,
        "cost_per_1m_out_cached": 0,
        "context_window": 1048576,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemini-3-flash",
        "name": "Gemini 3 Flash",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0.05,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemini-3-pro-preview",
        "name": "Gemini 3 Pro Preview",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 12,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemini-3.1-flash-lite-preview",
        "name": "Gemini 3.1 Flash Lite Preview",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemini-3.1-pro-preview",
        "name": "Gemini 3.1 Pro Preview",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 12,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "google/gemma-4-26b-a4b-it",
        "name": "Gemma 4 26B A4B IT",
        "cost_per_1m_in": 0.13,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "google/gemma-4-31b-it",
        "name": "Gemma 4 31B IT",
        "cost_per_1m_in": 0.14,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "xai/grok-3",
        "name": "Grok 3 Beta",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0.75,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "xai/grok-3-fast",
        "name": "Grok 3 Fast Beta",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 1.25,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "xai/grok-3-mini",
        "name": "Grok 3 Mini Beta",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0.075,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "xai/grok-3-mini-fast",
        "name": "Grok 3 Mini Fast Beta",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "xai/grok-4",
        "name": "Grok 4",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0.75,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "xai/grok-4-fast-non-reasoning",
        "name": "Grok 4 Fast Non-Reasoning",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0.05,
        "cost_per_1m_out_cached": 0,
        "context_window": 2000000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "xai/grok-4-fast-reasoning",
        "name": "Grok 4 Fast Reasoning",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0.05,
        "cost_per_1m_out_cached": 0,
        "context_window": 2000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "xai/grok-4.1-fast-non-reasoning",
        "name": "Grok 4.1 Fast Non-Reasoning",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0.05,
        "cost_per_1m_out_cached": 0,
        "context_window": 2000000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "xai/grok-4.1-fast-reasoning",
        "name": "Grok 4.1 Fast Reasoning",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0.05,
        "cost_per_1m_out_cached": 0,
        "context_window": 2000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "xai/grok-4.20-non-reasoning-beta",
        "name": "Grok 4.20 Beta Non-Reasoning",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 2000000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "xai/grok-4.20-reasoning-beta",
        "name": "Grok 4.20 Beta Reasoning",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 2000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "xai/grok-4.20-multi-agent-beta",
        "name": "Grok 4.20 Multi Agent Beta",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 2000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "xai/grok-4.20-multi-agent",
        "name": "Grok 4.20 Multi-Agent",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 2000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "xai/grok-4.20-non-reasoning",
        "name": "Grok 4.20 Non-Reasoning",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 2000000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "xai/grok-4.20-reasoning",
        "name": "Grok 4.20 Reasoning",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 2000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "xai/grok-code-fast-1",
        "name": "Grok Code Fast 1",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0.02,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "prime-intellect/intellect-3",
        "name": "INTELLECT 3",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "kwaipilot/kat-coder-pro-v2",
        "name": "Kat Coder Pro V2",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.06,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "moonshotai/kimi-k2-0905",
        "name": "Kimi K2 0905",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.5,
        "cost_per_1m_in_cached": 0.3,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "moonshotai/kimi-k2",
        "name": "Kimi K2 Instruct",
        "cost_per_1m_in": 0.57,
        "cost_per_1m_out": 2.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "moonshotai/kimi-k2-thinking",
        "name": "Kimi K2 Thinking",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.5,
        "cost_per_1m_in_cached": 0.15,
        "cost_per_1m_out_cached": 0,
        "context_window": 262114,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "moonshotai/kimi-k2-thinking-turbo",
        "name": "Kimi K2 Thinking Turbo",
        "cost_per_1m_in": 1.15,
        "cost_per_1m_out": 8,
        "cost_per_1m_in_cached": 0.15,
        "cost_per_1m_out_cached": 0,
        "context_window": 262114,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "moonshotai/kimi-k2-turbo",
        "name": "Kimi K2 Turbo",
        "cost_per_1m_in": 1.15,
        "cost_per_1m_out": 8,
        "cost_per_1m_in_cached": 0.15,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "moonshotai/kimi-k2.5",
        "name": "Kimi K2.5",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0.1,
        "cost_per_1m_out_cached": 0,
        "context_window": 262114,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "moonshotai/kimi-k2.6",
        "name": "Kimi K2.6",
        "cost_per_1m_in": 0.95,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0.16,
        "cost_per_1m_out_cached": 0,
        "context_window": 262000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "meta/llama-3.1-70b",
        "name": "Llama 3.1 70B Instruct",
        "cost_per_1m_in": 0.72,
        "cost_per_1m_out": 0.72,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "meta/llama-3.1-8b",
        "name": "Llama 3.1 8B Instruct",
        "cost_per_1m_in": 0.22,
        "cost_per_1m_out": 0.22,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "meta/llama-3.2-11b",
        "name": "Llama 3.2 11B Vision Instruct",
        "cost_per_1m_in": 0.16,
        "cost_per_1m_out": 0.16,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "meta/llama-3.2-90b",
        "name": "Llama 3.2 90B Vision Instruct",
        "cost_per_1m_in": 0.72,
        "cost_per_1m_out": 0.72,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "meta/llama-3.3-70b",
        "name": "Llama 3.3 70B Instruct",
        "cost_per_1m_in": 0.72,
        "cost_per_1m_out": 0.72,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "meta/llama-4-maverick",
        "name": "Llama 4 Maverick 17B Instruct",
        "cost_per_1m_in": 0.24,
        "cost_per_1m_out": 0.97,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "meta/llama-4-scout",
        "name": "Llama 4 Scout 17B Instruct",
        "cost_per_1m_in": 0.17,
        "cost_per_1m_out": 0.66,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "meituan/longcat-flash-chat",
        "name": "LongCat Flash Chat",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "inception/mercury-2",
        "name": "Mercury 2",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 0.75,
        "cost_per_1m_in_cached": 0.025,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "inception/mercury-coder-small",
        "name": "Mercury Coder Small Beta",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 32000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "xiaomi/mimo-v2-flash",
        "name": "MiMo V2 Flash",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0.01,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "xiaomi/mimo-v2-pro",
        "name": "MiMo V2 Pro",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax/minimax-m2",
        "name": "MiniMax M2",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 205000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax/minimax-m2.1",
        "name": "MiniMax M2.1",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 204800,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax/minimax-m2.1-lightning",
        "name": "MiniMax M2.1 Lightning",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 2.4,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 204800,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax/minimax-m2.5",
        "name": "MiniMax M2.5",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 204800,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax/minimax-m2.5-highspeed",
        "name": "MiniMax M2.5 High Speed",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.4,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 204800,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax/minimax-m2.7-highspeed",
        "name": "MiniMax M2.7 High Speed",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.4,
        "cost_per_1m_in_cached": 0.06,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 204800,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "minimax/minimax-m2.7",
        "name": "Minimax M2.7",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.06,
        "cost_per_1m_out_cached": 0.375,
        "context_window": 204800,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "mistral/ministral-3b",
        "name": "Ministral 3B",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistral/ministral-8b",
        "name": "Ministral 8B",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistral/codestral",
        "name": "Mistral Codestral",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 0.9,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistral/mistral-medium",
        "name": "Mistral Medium 3.1",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "mistral/mistral-small",
        "name": "Mistral Small",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 32000,
        "default_max_tokens": 4000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "nvidia/nemotron-nano-12b-v2-vl",
        "name": "Nvidia Nemotron Nano 12B V2 VL",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "nvidia/nemotron-nano-9b-v2",
        "name": "Nvidia Nemotron Nano 9B V2",
        "cost_per_1m_in": 0.06,
        "cost_per_1m_out": 0.23,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "mistral/pixtral-12b",
        "name": "Pixtral 12B 2409",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "mistral/pixtral-large",
        "name": "Pixtral Large",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "alibaba/qwen-3-32b",
        "name": "Qwen 3 32B",
        "cost_per_1m_in": 0.16,
        "cost_per_1m_out": 0.64,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "alibaba/qwen3-coder-30b-a3b",
        "name": "Qwen 3 Coder 30B A3B Instruct",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "alibaba/qwen3-max-thinking",
        "name": "Qwen 3 Max Thinking",
        "cost_per_1m_in": 1.2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0.24,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "alibaba/qwen3.5-flash",
        "name": "Qwen 3.5 Flash",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0.001,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 1000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "alibaba/qwen3.5-plus",
        "name": "Qwen 3.5 Plus",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 2.4,
        "cost_per_1m_in_cached": 0.04,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 1000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "alibaba/qwen-3.6-max-preview",
        "name": "Qwen 3.6 Max Preview",
        "cost_per_1m_in": 1.3,
        "cost_per_1m_out": 7.8,
        "cost_per_1m_in_cached": 0.26,
        "cost_per_1m_out_cached": 1.625,
        "context_window": 240000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "alibaba/qwen3.6-plus",
        "name": "Qwen 3.6 Plus",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0.1,
        "cost_per_1m_out_cached": 0.625,
        "context_window": 1000000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "alibaba/qwen3-235b-a22b-thinking",
        "name": "Qwen3 235B A22B Thinking 2507",
        "cost_per_1m_in": 0.23,
        "cost_per_1m_out": 2.3,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 262114,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "alibaba/qwen-3-235b",
        "name": "Qwen3 235B A22b Instruct 2507",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0.6,
        "cost_per_1m_out_cached": 0,
        "context_window": 131000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "alibaba/qwen3-coder",
        "name": "Qwen3 Coder 480B A35B Instruct",
        "cost_per_1m_in": 1.5,
        "cost_per_1m_out": 7.5,
        "cost_per_1m_in_cached": 0.3,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "alibaba/qwen3-coder-next",
        "name": "Qwen3 Coder Next",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 1.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "alibaba/qwen3-coder-plus",
        "name": "Qwen3 Coder Plus",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 5,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "alibaba/qwen3-max",
        "name": "Qwen3 Max",
        "cost_per_1m_in": 1.2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0.24,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "alibaba/qwen3-max-preview",
        "name": "Qwen3 Max Preview",
        "cost_per_1m_in": 1.2,
        "cost_per_1m_out": 6,
        "cost_per_1m_in_cached": 0.24,
        "cost_per_1m_out_cached": 0,
        "context_window": 262144,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "alibaba/qwen3-vl-thinking",
        "name": "Qwen3 VL 235B A22B Thinking",
        "cost_per_1m_in": 0.4,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "alibaba/qwen-3-14b",
        "name": "Qwen3-14B",
        "cost_per_1m_in": 0.12,
        "cost_per_1m_out": 0.24,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 40960,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "alibaba/qwen-3-30b",
        "name": "Qwen3-30B-A3B",
        "cost_per_1m_in": 0.08,
        "cost_per_1m_out": 0.29,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 40960,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "bytedance/seed-1.6",
        "name": "Seed 1.6",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0.05,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "perplexity/sonar",
        "name": "Sonar",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 127000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "perplexity/sonar-pro",
        "name": "Sonar Pro",
        "cost_per_1m_in": 0,
        "cost_per_1m_out": 0,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "arcee-ai/trinity-large-preview",
        "name": "Trinity Large Preview",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 1,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131000,
        "default_max_tokens": 8000,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "arcee-ai/trinity-large-thinking",
        "name": "Trinity Large Thinking",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 0.9,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 262100,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "openai/o1",
        "name": "o1",
        "cost_per_1m_in": 15,
        "cost_per_1m_out": 60,
        "cost_per_1m_in_cached": 7.5,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/o3",
        "name": "o3",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 8,
        "cost_per_1m_in_cached": 0.5,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/o3-pro",
        "name": "o3 Pro",
        "cost_per_1m_in": 20,
        "cost_per_1m_out": 80,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/o3-deep-research",
        "name": "o3-deep-research",
        "cost_per_1m_in": 10,
        "cost_per_1m_out": 40,
        "cost_per_1m_in_cached": 2.5,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai/o3-mini",
        "name": "o3-mini",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0.55,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "openai/o4-mini",
        "name": "o4-mini",
        "cost_per_1m_in": 1.1,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0.275,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 8000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      }
    ],
    "default_headers": {
      "HTTP-Referer": "https://charm.land",
      "X-Title": "Crush"
    }
  },
  {
    "name": "Venice AI",
    "id": "venice",
    "api_key": "$VENICE_API_KEY",
    "api_endpoint": "https://api.venice.ai/api/v1",
    "type": "openai-compat",
    "default_large_model_id": "claude-opus-4-6-fast",
    "default_small_model_id": "mistral-small-2603",
    "models": [
      {
        "id": "claude-opus-4-5",
        "name": "Claude Opus 4.5",
        "cost_per_1m_in": 6,
        "cost_per_1m_out": 30,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 198000,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-6",
        "name": "Claude Opus 4.6",
        "cost_per_1m_in": 6,
        "cost_per_1m_out": 30,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-6-fast",
        "name": "Claude Opus 4.6 Fast",
        "cost_per_1m_in": 36,
        "cost_per_1m_out": 180,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-7",
        "name": "Claude Opus 4.7",
        "cost_per_1m_in": 6,
        "cost_per_1m_out": 30,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4-5",
        "name": "Claude Sonnet 4.5",
        "cost_per_1m_in": 3.75,
        "cost_per_1m_out": 18.75,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 198000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4-6",
        "name": "Claude Sonnet 4.6",
        "cost_per_1m_in": 3.6,
        "cost_per_1m_out": 18,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "deepseek-v3.2",
        "name": "DeepSeek V3.2",
        "cost_per_1m_in": 0.33,
        "cost_per_1m_out": 0.48,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 160000,
        "default_max_tokens": 32768,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "zai-org-glm-4.6",
        "name": "GLM 4.6",
        "cost_per_1m_in": 0.85,
        "cost_per_1m_out": 2.75,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 198000,
        "default_max_tokens": 16384,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "zai-org-glm-4.7",
        "name": "GLM 4.7",
        "cost_per_1m_in": 0.55,
        "cost_per_1m_out": 2.65,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 198000,
        "default_max_tokens": 16384,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "zai-org-glm-4.7-flash",
        "name": "GLM 4.7 Flash",
        "cost_per_1m_in": 0.125,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "olafangensan-glm-4.7-flash-heretic",
        "name": "GLM 4.7 Flash Heretic",
        "cost_per_1m_in": 0.14,
        "cost_per_1m_out": 0.8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 24000,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "zai-org-glm-5",
        "name": "GLM 5",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 198000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "z-ai-glm-5-turbo",
        "name": "GLM 5 Turbo",
        "cost_per_1m_in": 1.2,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "zai-org-glm-5-1",
        "name": "GLM 5.1",
        "cost_per_1m_in": 1.75,
        "cost_per_1m_out": 5.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 24000,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "z-ai-glm-5v-turbo",
        "name": "GLM 5V Turbo",
        "cost_per_1m_in": 1.5,
        "cost_per_1m_out": 5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai-gpt-4o-2024-11-20",
        "name": "GPT-4o",
        "cost_per_1m_in": 3.125,
        "cost_per_1m_out": 12.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai-gpt-4o-mini-2024-07-18",
        "name": "GPT-4o Mini",
        "cost_per_1m_in": 0.1875,
        "cost_per_1m_out": 0.75,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "openai-gpt-52",
        "name": "GPT-5.2",
        "cost_per_1m_in": 2.19,
        "cost_per_1m_out": 17.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "openai-gpt-52-codex",
        "name": "GPT-5.2 Codex",
        "cost_per_1m_in": 2.19,
        "cost_per_1m_out": 17.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai-gpt-53-codex",
        "name": "GPT-5.3 Codex",
        "cost_per_1m_in": 2.19,
        "cost_per_1m_out": 17.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai-gpt-54",
        "name": "GPT-5.4",
        "cost_per_1m_in": 3.13,
        "cost_per_1m_out": 18.8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 131072,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai-gpt-54-mini",
        "name": "GPT-5.4 Mini",
        "cost_per_1m_in": 0.9375,
        "cost_per_1m_out": 5.625,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 400000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "openai-gpt-54-pro",
        "name": "GPT-5.4 Pro",
        "cost_per_1m_in": 37.5,
        "cost_per_1m_out": 225,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-3-flash-preview",
        "name": "Gemini 3 Flash Preview",
        "cost_per_1m_in": 0.7,
        "cost_per_1m_out": 3.75,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-3-1-pro-preview",
        "name": "Gemini 3.1 Pro Preview",
        "cost_per_1m_in": 2.5,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemma-4-uncensored",
        "name": "Gemma 4 Uncensored",
        "cost_per_1m_in": 0.1625,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "google-gemma-3-27b-it",
        "name": "Google Gemma 3 27B Instruct",
        "cost_per_1m_in": 0.12,
        "cost_per_1m_out": 0.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 198000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "google-gemma-4-26b-a4b-it",
        "name": "Google Gemma 4 26B A4B Instruct",
        "cost_per_1m_in": 0.1625,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 8192,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "google-gemma-4-31b-it",
        "name": "Google Gemma 4 31B Instruct",
        "cost_per_1m_in": 0.175,
        "cost_per_1m_out": 0.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 8192,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "grok-41-fast",
        "name": "Grok 4.1 Fast",
        "cost_per_1m_in": 0.23,
        "cost_per_1m_out": 0.57,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 30000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "grok-4-20",
        "name": "Grok 4.20",
        "cost_per_1m_in": 2.27,
        "cost_per_1m_out": 6.8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 2000000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "kimi-k2-thinking",
        "name": "Kimi K2 Thinking",
        "cost_per_1m_in": 0.75,
        "cost_per_1m_out": 3.2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "kimi-k2-5",
        "name": "Kimi K2.5",
        "cost_per_1m_in": 0.56,
        "cost_per_1m_out": 3.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "kimi-k2-6",
        "name": "Kimi K2.6",
        "cost_per_1m_in": 0.56,
        "cost_per_1m_out": 3.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "llama-3.2-3b",
        "name": "Llama 3.2 3B",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.6,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "llama-3.3-70b",
        "name": "Llama 3.3 70B",
        "cost_per_1m_in": 0.7,
        "cost_per_1m_out": 2.8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mercury-2",
        "name": "Mercury 2",
        "cost_per_1m_in": 0.3125,
        "cost_per_1m_out": 0.9375,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax-m25",
        "name": "MiniMax M2.5",
        "cost_per_1m_in": 0.34,
        "cost_per_1m_out": 1.19,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 198000,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "minimax-m27",
        "name": "MiniMax M2.7",
        "cost_per_1m_in": 0.375,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 198000,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "mistral-small-3-2-24b-instruct",
        "name": "Mistral Small 3.2 24B Instruct",
        "cost_per_1m_in": 0.09375,
        "cost_per_1m_out": 0.25,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "mistral-small-2603",
        "name": "Mistral Small 4",
        "cost_per_1m_in": 0.1875,
        "cost_per_1m_out": 0.75,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "nvidia-nemotron-3-nano-30b-a3b",
        "name": "NVIDIA Nemotron 3 Nano 30B",
        "cost_per_1m_in": 0.075,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "nvidia-nemotron-cascade-2-30b-a3b",
        "name": "Nemotron Cascade 2 30B A3B",
        "cost_per_1m_in": 0.14,
        "cost_per_1m_out": 0.8,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 32768,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "openai-gpt-oss-120b",
        "name": "OpenAI GPT OSS 120B",
        "cost_per_1m_in": 0.07,
        "cost_per_1m_out": 0.3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen3-235b-a22b-instruct-2507",
        "name": "Qwen 3 235B A22B Instruct 2507",
        "cost_per_1m_in": 0.15,
        "cost_per_1m_out": 0.75,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen3-235b-a22b-thinking-2507",
        "name": "Qwen 3 235B A22B Thinking 2507",
        "cost_per_1m_in": 0.45,
        "cost_per_1m_out": 3.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 16384,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "qwen3-coder-480b-a35b-instruct-turbo",
        "name": "Qwen 3 Coder 480B Turbo",
        "cost_per_1m_in": 0.35,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen3-coder-480b-a35b-instruct",
        "name": "Qwen 3 Coder 480b",
        "cost_per_1m_in": 0.75,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 65536,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen3-next-80b",
        "name": "Qwen 3 Next 80b",
        "cost_per_1m_in": 0.35,
        "cost_per_1m_out": 1.9,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen3-5-35b-a3b",
        "name": "Qwen 3.5 35B A3B",
        "cost_per_1m_in": 0.3125,
        "cost_per_1m_out": 1.25,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 65536,
        "can_reason": true,
        "supports_attachments": true,
        "options": {
          "temperature": 1,
          "top_p": 0.95
        }
      },
      {
        "id": "qwen3-5-397b-a17b",
        "name": "Qwen 3.5 397B",
        "cost_per_1m_in": 0.75,
        "cost_per_1m_out": 4.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 32768,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "qwen3-5-9b",
        "name": "Qwen 3.5 9B",
        "cost_per_1m_in": 0.1,
        "cost_per_1m_out": 0.15,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 32768,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "qwen-3-6-plus",
        "name": "Qwen 3.6 Plus Uncensored",
        "cost_per_1m_in": 0.625,
        "cost_per_1m_out": 3.75,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 1000000,
        "default_max_tokens": 65536,
        "can_reason": true,
        "supports_attachments": true,
        "options": {
          "temperature": 0.7,
          "top_p": 0.8
        }
      },
      {
        "id": "e2ee-qwen3-30b-a3b-p",
        "name": "Qwen3 30B A3B",
        "cost_per_1m_in": 0.19,
        "cost_per_1m_out": 0.69,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 32768,
        "can_reason": false,
        "supports_attachments": false
      },
      {
        "id": "qwen3-vl-235b-a22b",
        "name": "Qwen3 VL 235B",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 1.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 16384,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "e2ee-qwen3-vl-30b-a3b-p",
        "name": "Qwen3 VL 30B A3B",
        "cost_per_1m_in": 0.25,
        "cost_per_1m_out": 0.9,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "e2ee-qwen3-5-122b-a10b",
        "name": "Qwen3.5 122B A10B",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 32768,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "arcee-trinity-large-thinking",
        "name": "Trinity Large Thinking",
        "cost_per_1m_in": 0.3125,
        "cost_per_1m_out": 1.125,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 256000,
        "default_max_tokens": 65536,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": false
      },
      {
        "id": "venice-uncensored-role-play",
        "name": "Venice Role Play Uncensored",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 2,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 4096,
        "can_reason": false,
        "supports_attachments": true
      },
      {
        "id": "venice-uncensored-1-2",
        "name": "Venice Uncensored 1.2",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 0.9,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 128000,
        "default_max_tokens": 8192,
        "can_reason": false,
        "supports_attachments": true
      }
    ]
  },
  {
    "name": "Google Vertex AI",
    "id": "vertexai",
    "type": "google-vertex",
    "default_large_model_id": "gemini-3.1-pro-preview",
    "default_small_model_id": "gemini-3-flash-preview",
    "models": [
      {
        "id": "gemini-3.1-pro-preview",
        "name": "Gemini 3.1 Pro (Regular)",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 12,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 1048576,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-3.1-pro-preview-customtools",
        "name": "Gemini 3.1 Pro (Optimized for Coding Agents)",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 12,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 1048576,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "gemini-3-pro-preview",
        "name": "Gemini 3 Pro",
        "cost_per_1m_in": 2,
        "cost_per_1m_out": 12,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.2,
        "context_window": 1048576,
        "default_max_tokens": 64000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "high"
        ],
        "default_reasoning_effort": "high",
        "supports_attachments": true
      },
      {
        "id": "gemini-3-flash-preview",
        "name": "Gemini 3 Flash",
        "cost_per_1m_in": 0.5,
        "cost_per_1m_out": 3,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.05,
        "context_window": 1048576,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "minimal",
          "low",
          "medium",
          "high"
        ],
        "default_reasoning_effort": "minimal",
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-pro",
        "name": "Gemini 2.5 Pro",
        "cost_per_1m_in": 1.25,
        "cost_per_1m_out": 10,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.125,
        "context_window": 1048576,
        "default_max_tokens": 50000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "gemini-2.5-flash",
        "name": "Gemini 2.5 Flash",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 2.5,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0.03,
        "context_window": 1048576,
        "default_max_tokens": 50000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4-6",
        "name": "Claude Sonnet 4.6",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 3.75,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 1000000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high",
          "max"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-sonnet-4-5-20250929",
        "name": "Claude Sonnet 4.5",
        "cost_per_1m_in": 3,
        "cost_per_1m_out": 15,
        "cost_per_1m_in_cached": 3.75,
        "cost_per_1m_out_cached": 0.3,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-6",
        "name": "Claude Opus 4.6",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 1000000,
        "default_max_tokens": 126000,
        "can_reason": true,
        "reasoning_levels": [
          "low",
          "medium",
          "high",
          "max"
        ],
        "default_reasoning_effort": "medium",
        "supports_attachments": true
      },
      {
        "id": "claude-opus-4-5-20251101",
        "name": "Claude Opus 4.5",
        "cost_per_1m_in": 5,
        "cost_per_1m_out": 25,
        "cost_per_1m_in_cached": 6.25,
        "cost_per_1m_out_cached": 0.5,
        "context_window": 200000,
        "default_max_tokens": 50000,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "claude-haiku-4-5-20251001",
        "name": "Claude 4.5 Haiku",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 5,
        "cost_per_1m_in_cached": 1.25,
        "cost_per_1m_out_cached": 0.09999999999999999,
        "context_window": 200000,
        "default_max_tokens": 32000,
        "can_reason": true,
        "supports_attachments": true
      }
    ]
  },
  {
    "name": "Zhipu",
    "id": "zhipu",
    "api_key": "$ZHIPU_API_KEY",
    "api_endpoint": "https://open.bigmodel.cn/api/paas/v4",
    "type": "openai-compat",
    "default_large_model_id": "glm-4.7",
    "default_small_model_id": "glm-4.7-flash",
    "models": [
      {
        "id": "glm-5.1",
        "name": "GLM-5.1",
        "cost_per_1m_in": 1.4,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0.26,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 65536,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-5-turbo",
        "name": "GLM-5-Turbo",
        "cost_per_1m_in": 1.2,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0.24,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-5",
        "name": "GLM-5",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3.2,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 65536,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.7",
        "name": "GLM-4.7",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.2,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 98000,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.7-flash",
        "name": "GLM-4.7 Flash",
        "cost_per_1m_in": 0.07,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0.01,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 65550,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.6",
        "name": "GLM-4.6",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.2,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 102400,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.6v",
        "name": "GLM-4.6V",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 0.9,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 65536,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "glm-4.5",
        "name": "GLM-4.5",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.2,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 49152,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.5-air",
        "name": "GLM-4.5-Air",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.1,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 49152,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.5v",
        "name": "GLM-4.5V",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 1.8,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 65536,
        "default_max_tokens": 8192,
        "can_reason": true,
        "supports_attachments": true
      }
    ]
  },
  {
    "name": "Zhipu Coding",
    "id": "zhipu-coding",
    "api_key": "$ZHIPU_API_KEY",
    "api_endpoint": "https://open.bigmodel.cn/api/coding/paas/v4",
    "type": "openai-compat",
    "default_large_model_id": "glm-4.7",
    "default_small_model_id": "glm-4.7-flash",
    "models": [
      {
        "id": "glm-5.1",
        "name": "GLM-5.1",
        "cost_per_1m_in": 1.4,
        "cost_per_1m_out": 4.4,
        "cost_per_1m_in_cached": 0.26,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 65536,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-5-turbo",
        "name": "GLM-5-Turbo",
        "cost_per_1m_in": 1.2,
        "cost_per_1m_out": 4,
        "cost_per_1m_in_cached": 0.24,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 128000,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-5",
        "name": "GLM-5",
        "cost_per_1m_in": 1,
        "cost_per_1m_out": 3.2,
        "cost_per_1m_in_cached": 0.2,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 65536,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.7",
        "name": "GLM-4.7",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.2,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 98000,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.7-flash",
        "name": "GLM-4.7 Flash",
        "cost_per_1m_in": 0.07,
        "cost_per_1m_out": 0.4,
        "cost_per_1m_in_cached": 0.01,
        "cost_per_1m_out_cached": 0,
        "context_window": 200000,
        "default_max_tokens": 65550,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.6",
        "name": "GLM-4.6",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.2,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 204800,
        "default_max_tokens": 102400,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.6v",
        "name": "GLM-4.6V",
        "cost_per_1m_in": 0.3,
        "cost_per_1m_out": 0.9,
        "cost_per_1m_in_cached": 0,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 65536,
        "can_reason": true,
        "supports_attachments": true
      },
      {
        "id": "glm-4.5",
        "name": "GLM-4.5",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 2.2,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 49152,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.5-air",
        "name": "GLM-4.5-Air",
        "cost_per_1m_in": 0.2,
        "cost_per_1m_out": 1.1,
        "cost_per_1m_in_cached": 0.03,
        "cost_per_1m_out_cached": 0,
        "context_window": 131072,
        "default_max_tokens": 49152,
        "can_reason": true,
        "supports_attachments": false
      },
      {
        "id": "glm-4.5v",
        "name": "GLM-4.5V",
        "cost_per_1m_in": 0.6,
        "cost_per_1m_out": 1.8,
        "cost_per_1m_in_cached": 0.11,
        "cost_per_1m_out_cached": 0,
        "context_window": 65536,
        "default_max_tokens": 8192,
        "can_reason": true,
        "supports_attachments": true
      }
    ]
  }
]