fix local inference config

This commit is contained in:
geoffsee
2025-06-18 12:38:38 -04:00
parent 3d16bd94b4
commit 38b364caeb

View File

@@ -21,8 +21,8 @@ configure_dev_vars() {
echo "OLLAMA_API_KEY=active" >> "${DEV_VARS_PATH}" echo "OLLAMA_API_KEY=active" >> "${DEV_VARS_PATH}"
fi fi
if [[ "$endpoint_url" == *"10240"* ]]; then if [[ "$endpoint_url" == *"10240"* ]]; then
echo "MLX_API_KEY=active" >> "${ENV_LOCAL_PATH}" echo "MLX_API_KEY=active" >> "${ENV_LOCAL_PATH}"
echo "MLX_API_KEY=active" >> "${DEV_VARS_PATH}" echo "MLX_API_KEY=active" >> "${DEV_VARS_PATH}"
fi fi
echo "Local inference is configured for $endpoint_url" echo "Local inference is configured for $endpoint_url"
@@ -39,8 +39,7 @@ echo "Checking for local inference services..."
if nc -z -w1 localhost 11434 >/dev/null 2>&1; then if nc -z -w1 localhost 11434 >/dev/null 2>&1; then
echo "Ollama service detected on port 11434." echo "Ollama service detected on port 11434."
configure_dev_vars "http://localhost:11434" configure_dev_vars "http://localhost:11434"
# check for mlx-omni-server elif nc -z -w1 localhost 10240 >/dev/null 2>&1; then
if nc -z -w1 localhost 10240 >/dev/null 2>&1; then
echo "mlx-omni-server service detected on port 10240." echo "mlx-omni-server service detected on port 10240."
configure_dev_vars "http://localhost:10240" configure_dev_vars "http://localhost:10240"
else else