LLMServer / app /config.yaml
AurelioAguirre's picture
Fixed Dockerfile v10
bd89e28
raw
history blame
516 Bytes
server:
host: "0.0.0.0"
port: 8000
model:
base_path: "."
generation:
max_new_tokens: 256
do_sample: true
temperature: 0.7
repetition_penalty: 1.1
defaults:
model_name: "huihui-ai/Qwen2.5-Coder-32B-Instruct-abliterated"
folders:
models: "models"
cache: "app/.cache"
logs: "logs"
logging:
level: "INFO"
format: "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
file: "llm_api.log"
api:
version: "v1"
prefix: "/api"
cors:
origins: ["*"]
credentials: true