chore: update bifrost phoenix and ollama configs

This commit is contained in:
Sun-ZhenXing
2026-03-28 23:41:32 +08:00
parent 441b8a74f5
commit 54e549724d
10 changed files with 13 additions and 17 deletions
+1 -1
View File
@@ -1,5 +1,5 @@
# Bifrost Gateway Version
BIFROST_VERSION=v1.3.63
BIFROST_VERSION=v1.4.17
# Port to bind to on the host machine
BIFROST_PORT=28080
+1 -1
View File
@@ -12,7 +12,7 @@ Bifrost is a lightweight, high-performance LLM gateway that supports multiple mo
## Configuration
- `BIFROST_VERSION`: The version of the Bifrost image, default is `v1.3.63`.
- `BIFROST_VERSION`: The version of the Bifrost image, default is `v1.4.17`.
- `BIFROST_PORT`: The port for the Bifrost service, default is `28080`.
### Telemetry
+1 -1
View File
@@ -12,7 +12,7 @@ Bifrost 是一个轻量级、高性能的 LLM 网关,支持多种模型和提
## 配置
- `BIFROST_VERSION`: Bifrost 镜像的版本,默认为 `v1.3.63`
- `BIFROST_VERSION`: Bifrost 镜像的版本,默认为 `v1.4.17`
- `BIFROST_PORT`: Bifrost 服务的端口,默认为 `28080`
### 遥测 (Telemetry)
+1 -1
View File
@@ -9,7 +9,7 @@ x-defaults: &defaults
services:
bifrost:
<<: *defaults
image: ${GLOBAL_REGISTRY:-}maximhq/bifrost:${BIFROST_VERSION:-v1.3.63}
image: ${GLOBAL_REGISTRY:-}maximhq/bifrost:${BIFROST_VERSION:-v1.4.17}
volumes:
- bifrost_data:/app/data
ports:
+3 -3
View File
@@ -15,7 +15,7 @@ healthCheckTimeout: 300
# Macro definitions: reusable command snippets for model configuration.
# Reference with $${macro-name} inside cmd fields.
macros:
"llama-server": >
llama-server: >
/app/llama-server
--port ${PORT}
@@ -25,14 +25,14 @@ models:
# The volume `llama_swap_models` is mounted to /root/.cache/llama.cpp inside
# the container. Place your .gguf files there and reference them with
# /root/.cache/llama.cpp/<filename>.gguf
"my-local-model":
my-local-model:
# ${PORT} is automatically assigned by llama-swap
cmd: >
$${llama-server}
--model /root/.cache/llama.cpp/model.gguf
--ctx-size 4096
--n-gpu-layers 0
proxy: "http://localhost:${PORT}"
proxy: 'http://localhost:${PORT}'
# Automatically unload the model after 15 minutes of inactivity
ttl: 900
+2 -6
View File
@@ -20,12 +20,8 @@ services:
healthcheck:
test:
- CMD
- wget
- --no-verbose
- --tries=1
- --spider
- 'http://localhost:11434/'
- ollama
- list
interval: 30s
timeout: 10s
retries: 3
+1 -1
View File
@@ -1,5 +1,5 @@
# Phoenix version
PHOENIX_VERSION=13.3.0
PHOENIX_VERSION=13.19.2
# Timezone
TZ=UTC
+1 -1
View File
@@ -32,7 +32,7 @@ This project supports two modes of operation via Docker Compose profiles:
| Variable Name | Description | Default Value |
| -------------------------------- | ---------------------------------------- | ----------------- |
| COMPOSE_PROFILES | Active profiles (`sqlite` or `postgres`) | `sqlite` |
| PHOENIX_VERSION | Phoenix image version | `13.3.0` |
| PHOENIX_VERSION | Phoenix image version | `13.19.2` |
| PHOENIX_PORT_OVERRIDE | Host port for Phoenix UI and HTTP API | `6006` |
| PHOENIX_GRPC_PORT_OVERRIDE | Host port for OTLP gRPC collector | `4317` |
| PHOENIX_PROMETHEUS_PORT_OVERRIDE | Host port for Prometheus metrics | `9090` |
+1 -1
View File
@@ -32,7 +32,7 @@ Arize Phoenix 是一个开源的 AI 可观测性平台,专为 LLM 应用设计
| 变量名 | 描述 | 默认值 |
| -------------------------------- | ---------------------------------------- | ----------------- |
| COMPOSE_PROFILES | 激活的配置文件(`sqlite``postgres` | `sqlite` |
| PHOENIX_VERSION | Phoenix 镜像版本 | `13.3.0` |
| PHOENIX_VERSION | Phoenix 镜像版本 | `13.19.2` |
| PHOENIX_PORT_OVERRIDE | Phoenix UI 和 HTTP API 的主机端口 | `6006` |
| PHOENIX_GRPC_PORT_OVERRIDE | OTLP gRPC 采集器的主机端口 | `4317` |
| PHOENIX_PROMETHEUS_PORT_OVERRIDE | Prometheus 指标的主机端口 | `9090` |
+1 -1
View File
@@ -11,7 +11,7 @@ x-defaults: &defaults
x-phoenix-common: &phoenix-common
<<: *defaults
image: ${GLOBAL_REGISTRY:-}arizephoenix/phoenix:${PHOENIX_VERSION:-13.3.0}
image: ${GLOBAL_REGISTRY:-}arizephoenix/phoenix:${PHOENIX_VERSION:-13.19.2}
ports:
- '${PHOENIX_PORT_OVERRIDE:-6006}:6006' # UI and OTLP HTTP collector
- '${PHOENIX_GRPC_PORT_OVERRIDE:-4317}:4317' # OTLP gRPC collector