ihbs02 / RTFM

Estimated Wait Times

If the AI is busy, your prompt will processed when a slot frees up.

Oobabooga WebUI: {{wait_time}}

Model: {{model_name}}

Server Greeting

Please Read The Fucking Manual before you using this proxy. Please. I warned you.

Server settings : {{server_settings}}


Service Info

{
  "uptime": {{uptime}},
  "endpoints": {
    "Oobabooga WebUI": "{{server_link}}"
  },
  "proompts": "{{proompts}}",
  "input tookens": "{{input_tookens}}",
  "output tookens": "{{output_tookens}}",
  "proomptersNow": molu,
  "openaiKeys": 0,
  "anthropicKeys": 0,
  "openaiOrgs": 0,
  "Oobabooga WebUI": {
    "usage": "{{tookens}} tokens",
    "activeKeys": 1,
    "trialKeys": 0,
    "revokedKeys": 0,
    "overQuotaKeys": 0,
    "proomptersInQueue": 0,
    "estimatedQueueTime": "molu"
  },
  "config": {
    "gatekeeper": "none",
    "modelRateLimit": "none",
    "maxContextTokensOobabooga": "8192",
    "maxContextTokensOpenAI": "0",
    "maxContextTokensAnthropic": "0",
    "maxOutputTokensOobabooga": "0",
    "maxOutputTokensOpenAI": "1000",
    "maxOutputTokensAnthropic": "400",
    "allowedModelFamilies": "Oobabooga",
    "rejectDisallowed": "false",
    "rejectMessage": "This content violates Arcalive AiChat Channel's acceptable use policy.",
    "promptLogging": "true",
    "tokenQuota": {
      "Oobabooga Webui": "1",
      "turbo": "0",
      "gpt4": "0",
      "gpt4-32k": "0",
      "claude": "0"
    }
  },
  "build": "675d4b (main@ihbs02/oai-reverse-proxy)"
}