mirror of
https://github.com/geoffsee/predict-otron-9001.git
synced 2025-09-08 22:46:44 +00:00
Add Docker Compose and Kubernetes metadata to Cargo.toml files
This commit is contained in:
@@ -25,11 +25,15 @@ rand = "0.8.5"
|
||||
async-openai = "0.28.3"
|
||||
once_cell = "1.19.0"
|
||||
|
||||
|
||||
# generates docker compose configuration
|
||||
[package.metadata.compose]
|
||||
image = "ghcr.io/geoffsee/embeddings-service:latest"
|
||||
port = 8080
|
||||
|
||||
|
||||
# generates kubernetes manifests
|
||||
[package.metadata.kube]
|
||||
image = "ghcr.io/geoffsee/embeddings-service:latest"
|
||||
replicas = 1
|
||||
port = 8080
|
||||
resources.cpu = "500m"
|
||||
resources.memory = "256Mi"
|
||||
#ingress.host = "my-service.example.com"
|
||||
#env = { RUST_LOG = "info", DATABASE_URL = "postgres://..." }
|
||||
port = 8080
|
@@ -83,11 +83,15 @@ tokio = "1.43.0"
|
||||
anyhow = { version = "1", features = ["backtrace"] }
|
||||
bindgen_cuda = { version = "0.1.1", optional = true }
|
||||
|
||||
|
||||
# generates docker compose configuration
|
||||
[package.metadata.compose]
|
||||
image = "ghcr.io/geoffsee/inference-engine:latest"
|
||||
port = 8080
|
||||
|
||||
|
||||
# generates kubernetes manifests
|
||||
[package.metadata.kube]
|
||||
image = "ghcr.io/geoffsee/inference-service:latest"
|
||||
replicas = 1
|
||||
port = 8080
|
||||
resources.cpu = "500m"
|
||||
resources.memory = "256Mi"
|
||||
#ingress.host = "my-service.example.com"
|
||||
#env = { RUST_LOG = "info", DATABASE_URL = "postgres://..." }
|
||||
|
@@ -51,11 +51,14 @@ features = [
|
||||
"js", # Enable JavaScript RNG for WASM targets
|
||||
]
|
||||
|
||||
# generates docker compose configuration
|
||||
[package.metadata.compose]
|
||||
image = "ghcr.io/geoffsee/leptos-chat:latest"
|
||||
port = 8788
|
||||
|
||||
|
||||
# generates kubernetes manifests
|
||||
[package.metadata.kube]
|
||||
image = "ghcr.io/geoffsee/leptos-chat:latest"
|
||||
replicas = 1
|
||||
port = 8788
|
||||
resources.cpu = "500m"
|
||||
resources.memory = "256Mi"
|
||||
#ingress.host = "my-service.example.com"
|
||||
#env = { RUST_LOG = "info", DATABASE_URL = "postgres://..." }
|
||||
port = 8788
|
@@ -25,12 +25,15 @@ embeddings-engine = { path = "../embeddings-engine" }
|
||||
# Dependencies for inference functionality
|
||||
inference-engine = { path = "../inference-engine" }
|
||||
|
||||
# generates docker compose configuration
|
||||
[package.metadata.compose]
|
||||
name = "predict-otron-9000"
|
||||
image = "ghcr.io/geoffsee/predict-otron-9000:latest"
|
||||
port = 8080
|
||||
|
||||
|
||||
# generates kubernetes manifests
|
||||
[package.metadata.kube]
|
||||
image = "ghcr.io/geoffsee/predict-otron-9000:latest"
|
||||
replicas = 1
|
||||
port = 8080
|
||||
resources.cpu = "500m"
|
||||
resources.memory = "256Mi"
|
||||
#ingress.host = "my-service.example.com"
|
||||
#env = { RUST_LOG = "info", DATABASE_URL = "postgres://..." }
|
||||
port = 8080
|
Reference in New Issue
Block a user