mirror of
https://github.com/memohai/Memoh.git
synced 2026-04-25 07:00:48 +09:00
897cc32194
Replace FontAwesome/CDN brand icons with local SVG-based Vue components in a new shared @memoh/icon package. Provider icon URLs in conf/providers YAML files are replaced with preset names, intercepted by ProviderIcon component on the frontend. SearchProviderLogo and ChannelIcon components are migrated to @memoh/icon. All icon containers now use a unified circular gray (rounded-full bg-muted) style. Adds wechat and matrix channel icons.
342 lines
6.3 KiB
YAML
342 lines
6.3 KiB
YAML
name: Ollama
|
|
client_type: openai-completions
|
|
icon: ollama
|
|
base_url: http://127.0.0.1:11434/v1
|
|
|
|
models:
|
|
- model_id: "deepseek-v3.1:671b"
|
|
name: DeepSeek V3.1
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call, reasoning]
|
|
context_window: 163840
|
|
|
|
- model_id: "gpt-oss:20b"
|
|
name: GPT-OSS 20B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call, reasoning]
|
|
context_window: 131072
|
|
|
|
- model_id: "gpt-oss:120b"
|
|
name: GPT-OSS 120B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call, reasoning]
|
|
context_window: 131072
|
|
|
|
- model_id: "qwen3-coder:480b"
|
|
name: Qwen3 Coder 480B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call]
|
|
context_window: 262144
|
|
|
|
- model_id: deepseek-r1
|
|
name: DeepSeek R1
|
|
type: chat
|
|
config:
|
|
compatibilities: [reasoning]
|
|
context_window: 65536
|
|
|
|
- model_id: deepseek-v3
|
|
name: DeepSeek V3 671B
|
|
type: chat
|
|
config:
|
|
context_window: 65536
|
|
|
|
- model_id: llama3.1
|
|
name: Llama 3.1 8B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call]
|
|
context_window: 128000
|
|
|
|
- model_id: "llama3.1:70b"
|
|
name: Llama 3.1 70B
|
|
type: chat
|
|
config:
|
|
context_window: 128000
|
|
|
|
- model_id: "llama3.1:405b"
|
|
name: Llama 3.1 405B
|
|
type: chat
|
|
config:
|
|
context_window: 128000
|
|
|
|
- model_id: codellama
|
|
name: Code Llama 7B
|
|
type: chat
|
|
config:
|
|
context_window: 16384
|
|
|
|
- model_id: "codellama:13b"
|
|
name: Code Llama 13B
|
|
type: chat
|
|
config:
|
|
context_window: 16384
|
|
|
|
- model_id: "codellama:34b"
|
|
name: Code Llama 34B
|
|
type: chat
|
|
config:
|
|
context_window: 16384
|
|
|
|
- model_id: "codellama:70b"
|
|
name: Code Llama 70B
|
|
type: chat
|
|
config:
|
|
context_window: 16384
|
|
|
|
- model_id: qwq
|
|
name: QwQ 32B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call, reasoning]
|
|
context_window: 128000
|
|
|
|
- model_id: qwen3
|
|
name: Qwen3 7B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call]
|
|
context_window: 65536
|
|
|
|
- model_id: "qwen2.5:0.5b"
|
|
name: Qwen2.5 0.5B
|
|
type: chat
|
|
config:
|
|
context_window: 128000
|
|
|
|
- model_id: "qwen2.5:1.5b"
|
|
name: Qwen2.5 1.5B
|
|
type: chat
|
|
config:
|
|
context_window: 128000
|
|
|
|
- model_id: qwen2.5
|
|
name: Qwen2.5 7B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call]
|
|
context_window: 128000
|
|
|
|
- model_id: "qwen2.5:72b"
|
|
name: Qwen2.5 72B
|
|
type: chat
|
|
config:
|
|
context_window: 128000
|
|
|
|
- model_id: codeqwen
|
|
name: CodeQwen1.5 7B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call]
|
|
context_window: 65536
|
|
|
|
- model_id: "qwen2:0.5b"
|
|
name: Qwen2 0.5B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call]
|
|
context_window: 128000
|
|
|
|
- model_id: "qwen2:1.5b"
|
|
name: Qwen2 1.5B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call]
|
|
context_window: 128000
|
|
|
|
- model_id: qwen2
|
|
name: Qwen2 7B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call]
|
|
context_window: 128000
|
|
|
|
- model_id: "qwen2:72b"
|
|
name: Qwen2 72B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call]
|
|
context_window: 128000
|
|
|
|
- model_id: "gemma2:2b"
|
|
name: Gemma 2 2B
|
|
type: chat
|
|
config:
|
|
context_window: 8192
|
|
|
|
- model_id: gemma2
|
|
name: Gemma 2 9B
|
|
type: chat
|
|
config:
|
|
context_window: 8192
|
|
|
|
- model_id: "gemma2:27b"
|
|
name: Gemma 2 27B
|
|
type: chat
|
|
config:
|
|
context_window: 8192
|
|
|
|
- model_id: "codegemma:2b"
|
|
name: CodeGemma 2B
|
|
type: chat
|
|
config:
|
|
context_window: 8192
|
|
|
|
- model_id: codegemma
|
|
name: CodeGemma 7B
|
|
type: chat
|
|
config:
|
|
context_window: 8192
|
|
|
|
- model_id: phi3
|
|
name: Phi-3 3.8B
|
|
type: chat
|
|
config:
|
|
context_window: 128000
|
|
|
|
- model_id: "phi3:14b"
|
|
name: Phi-3 14B
|
|
type: chat
|
|
config:
|
|
context_window: 128000
|
|
|
|
- model_id: wizardlm2
|
|
name: WizardLM 2 7B
|
|
type: chat
|
|
config:
|
|
context_window: 32768
|
|
|
|
- model_id: "wizardlm2:8x22b"
|
|
name: WizardLM 2 8x22B
|
|
type: chat
|
|
config:
|
|
context_window: 65536
|
|
|
|
- model_id: mathstral
|
|
name: MathΣtral 7B
|
|
type: chat
|
|
config:
|
|
context_window: 32768
|
|
|
|
- model_id: mistral
|
|
name: Mistral 7B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call]
|
|
context_window: 32768
|
|
|
|
- model_id: mixtral
|
|
name: Mixtral 8x7B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call]
|
|
context_window: 32768
|
|
|
|
- model_id: "mixtral:8x22b"
|
|
name: Mixtral 8x22B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call]
|
|
context_window: 65536
|
|
|
|
- model_id: mistral-large
|
|
name: Mixtral Large 123B
|
|
type: chat
|
|
config:
|
|
context_window: 128000
|
|
|
|
- model_id: mistral-nemo
|
|
name: Mixtral Nemo 12B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call]
|
|
context_window: 128000
|
|
|
|
- model_id: codestral
|
|
name: Codestral 22B
|
|
type: chat
|
|
config:
|
|
context_window: 32768
|
|
|
|
- model_id: aya
|
|
name: Aya 23 8B
|
|
type: chat
|
|
config:
|
|
context_window: 8192
|
|
|
|
- model_id: "aya:35b"
|
|
name: Aya 23 35B
|
|
type: chat
|
|
config:
|
|
context_window: 8192
|
|
|
|
- model_id: command-r
|
|
name: Command R 35B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call]
|
|
context_window: 131072
|
|
|
|
- model_id: command-r-plus
|
|
name: Command R+ 104B
|
|
type: chat
|
|
config:
|
|
compatibilities: [tool-call]
|
|
context_window: 131072
|
|
|
|
- model_id: deepseek-v2
|
|
name: DeepSeek V2 16B
|
|
type: chat
|
|
config:
|
|
context_window: 32768
|
|
|
|
- model_id: "deepseek-v2:236b"
|
|
name: DeepSeek V2 236B
|
|
type: chat
|
|
config:
|
|
context_window: 128000
|
|
|
|
- model_id: deepseek-coder-v2
|
|
name: DeepSeek Coder V2 16B
|
|
type: chat
|
|
config:
|
|
context_window: 128000
|
|
|
|
- model_id: "deepseek-coder-v2:236b"
|
|
name: DeepSeek Coder V2 236B
|
|
type: chat
|
|
config:
|
|
context_window: 128000
|
|
|
|
- model_id: llava
|
|
name: LLaVA 7B
|
|
type: chat
|
|
config:
|
|
compatibilities: [vision]
|
|
context_window: 4096
|
|
|
|
- model_id: "llava:13b"
|
|
name: LLaVA 13B
|
|
type: chat
|
|
config:
|
|
compatibilities: [vision]
|
|
context_window: 4096
|
|
|
|
- model_id: "llava:34b"
|
|
name: LLaVA 34B
|
|
type: chat
|
|
config:
|
|
compatibilities: [vision]
|
|
context_window: 4096
|
|
|
|
- model_id: minicpm-v
|
|
name: MiniCPM-V 8B
|
|
type: chat
|
|
config:
|
|
compatibilities: [vision]
|
|
context_window: 128000
|