-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathconfig.yaml
More file actions
30 lines (29 loc) · 867 Bytes
/
config.yaml
File metadata and controls
30 lines (29 loc) · 867 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
name: Test LSCore
service:
host: 0.0.0.0
port: 8080
auth_enabled: false
workers: 1
color_log: true
access_log: true
customization:
# system-prompt from here: https://github.com/road-core/service/blob/9d65d15a4d1dec47e5aac15ee86fef39db975006/ols/customize/rhdh/prompts.py#L14
system_prompt_path: lightspeed_system_prompt.txt
llama_stack:
use_as_library_client: false
# url: http://llama-stack:8321
url: "http://host.docker.internal:8321"
user_data_collection:
feedback_enabled: true
feedback_storage: "/tmp/data/feedback"
transcripts_enabled: true
transcripts_storage: "/tmp/data/transcripts"
authentication:
module: "noop"
mcp_servers:
- name: "server1"
provider_id: "model-context-protocol"
url: "http://host.docker.internal:8000/mcp/"
inference:
default_model: vllm-inference/qwen3:4b
default_provider: vllm-inference