mirror of
https://github.com/geoffsee/open-gsio.git
synced 2025-09-08 22:56:46 +00:00
fix local inference config
This commit is contained in:
@@ -21,8 +21,8 @@ configure_dev_vars() {
|
||||
echo "OLLAMA_API_KEY=active" >> "${DEV_VARS_PATH}"
|
||||
fi
|
||||
if [[ "$endpoint_url" == *"10240"* ]]; then
|
||||
echo "MLX_API_KEY=active" >> "${ENV_LOCAL_PATH}"
|
||||
echo "MLX_API_KEY=active" >> "${DEV_VARS_PATH}"
|
||||
echo "MLX_API_KEY=active" >> "${ENV_LOCAL_PATH}"
|
||||
echo "MLX_API_KEY=active" >> "${DEV_VARS_PATH}"
|
||||
fi
|
||||
|
||||
echo "Local inference is configured for $endpoint_url"
|
||||
@@ -39,8 +39,7 @@ echo "Checking for local inference services..."
|
||||
if nc -z -w1 localhost 11434 >/dev/null 2>&1; then
|
||||
echo "Ollama service detected on port 11434."
|
||||
configure_dev_vars "http://localhost:11434"
|
||||
# check for mlx-omni-server
|
||||
if nc -z -w1 localhost 10240 >/dev/null 2>&1; then
|
||||
elif nc -z -w1 localhost 10240 >/dev/null 2>&1; then
|
||||
echo "mlx-omni-server service detected on port 10240."
|
||||
configure_dev_vars "http://localhost:10240"
|
||||
else
|
||||
@@ -49,4 +48,4 @@ else
|
||||
echo ".dev.vars was not modified by this script for OpenAI local inference settings."
|
||||
fi
|
||||
|
||||
echo "Script finished."
|
||||
echo "Script finished."
|
Reference in New Issue
Block a user