https://info.smol.services/

GPT-4 32k: no wait / OpenAI o1: no wait / OpenAI o1 mini: no wait / GPT-4o Mini / 3.5 Turbo: no wait / GPT-4: no wait / GPT-4 Turbo: no wait / GPT-4o: no wait / Mistral 7B: no wait / Mistral Nemo: no wait / Mistral Medium: no wait / Mistral Large: no wait / Gemini Flash: no wait / Gemini Pro: no wait / Deepseek Chat: no wait / OpenAI o3 mini: no wait


Service Info

{
  "uptime": 18882,
  "endpoints": {
    "openai": "https://smol.services/proxy/openai",
    "google-ai": "https://smol.services/proxy/google-ai",
    "mistral-ai": "https://smol.services/proxy/mistral-ai",
    "deepseek": "https://smol.services/proxy/deepseek"
  },
  "proompts": 1913,
  "tookens": "36.64m",
  "proomptersNow": 3,
  "openaiKeys": 83,
  "openaiOrgs": 63,
  "google-aiKeys": 144,
  "mistral-aiKeys": 15,
  "deepseekKeys": 63,
  "turbo": {
    "usage": "0 tokens",
    "activeKeys": 22,
    "revokedKeys": 22,
    "overQuotaKeys": 39,
    "trialKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "o1-mini": {
    "usage": "0 tokens",
    "activeKeys": 19,
    "overQuotaKeys": 39,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt4o": {
    "usage": "28.21m tokens",
    "activeKeys": 22,
    "overQuotaKeys": 22,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt4-turbo": {
    "usage": "1.15m tokens",
    "activeKeys": 22,
    "overQuotaKeys": 22,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "o1": {
    "usage": "157.4k tokens",
    "activeKeys": 18,
    "overQuotaKeys": 21,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "o3-mini": {
    "usage": "74.1k tokens",
    "activeKeys": 19,
    "overQuotaKeys": 21,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt4": {
    "usage": "109.5k tokens",
    "activeKeys": 21,
    "overQuotaKeys": 22,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gpt4-32k": {
    "usage": "0 tokens",
    "activeKeys": 2,
    "overQuotaKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gemini-pro": {
    "usage": "4.12m tokens",
    "activeKeys": 36,
    "revokedKeys": 108,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "gemini-flash": {
    "usage": "427.8k tokens",
    "activeKeys": 36,
    "revokedKeys": 99,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "mistral-small": {
    "usage": "0 tokens",
    "activeKeys": 15,
    "revokedKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "mistral-tiny": {
    "usage": "0 tokens",
    "activeKeys": 15,
    "revokedKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "mistral-medium": {
    "usage": "0 tokens",
    "activeKeys": 15,
    "revokedKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "mistral-large": {
    "usage": "202.2k tokens",
    "activeKeys": 15,
    "revokedKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "deepseek": {
    "usage": "2.19m tokens",
    "activeKeys": 59,
    "revokedKeys": 4,
    "overQuotaKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "no wait"
  },
  "config": {
    "gatekeeper": "user_token",
    "maxIpsAutoBan": "false",
    "captchaMode": "none",
    "powTokenHours": "24",
    "powTokenMaxIps": "2",
    "powDifficultyLevel": "low",
    "powChallengeTimeout": "30",
    "textModelRateLimit": "6",
    "imageModelRateLimit": "5",
    "maxContextTokensOpenAI": "0",
    "maxContextTokensAnthropic": "80000",
    "maxOutputTokensOpenAI": "65535",
    "maxOutputTokensAnthropic": "2048",
    "allowAwsLogging": "false",
    "promptLogging": "false",
    "tokenQuota": {
      "deepseek": "0",
      "turbo": "0",
      "gpt4": "0",
      "gpt4-32k": "0",
      "gpt4-turbo": "0",
      "gpt4o": "0",
      "o1": "0",
      "o1-mini": "0",
      "o3-mini": "0",
      "dall-e": "0",
      "claude": "4000000",
      "claude-opus": "2500000",
      "gemini-flash": "0",
      "gemini-pro": "0",
      "gemini-ultra": "0",
      "mistral-tiny": "0",
      "mistral-small": "0",
      "mistral-medium": "0",
      "mistral-large": "0",
      "aws-claude": "8000000",
      "aws-claude-opus": "5000000",
      "aws-mistral-tiny": "0",
      "aws-mistral-small": "0",
      "aws-mistral-medium": "0",
      "aws-mistral-large": "0",
      "gcp-claude": "4000000",
      "gcp-claude-opus": "0",
      "azure-turbo": "0",
      "azure-gpt4": "0",
      "azure-gpt4-32k": "0",
      "azure-gpt4-turbo": "0",
      "azure-gpt4o": "0",
      "azure-dall-e": "0",
      "azure-o1": "0",
      "azure-o1-mini": "0",
      "azure-o3-mini": "0"
    },
    "quotaRefreshPeriod": "daily",
    "allowOpenAIToolUsage": "1",
    "tokensPunishmentFactor": "0"
  },
  "build": "d2b37b8 (modified) (main@penurin/oai-reverse-proxy)"
}