mirror of
https://github.com/geoffsee/open-gsio.git
synced 2025-09-08 22:56:46 +00:00
- Add killport.js script for terminating processes on specific ports
- Introduce `supportedModels` in `ClientChatStore` and update model validation logic - Enhance OpenAI inferencing with local setup adaptations and improved streaming options - Modify ChatService to handle local and remote model fetching - Update input menu to dynamically fetch and display supported models - Add start_inference_server.sh for initiating local inference server - Upgrade OpenAI SDK to v5.0.1 and adjust dependencies accordingly
This commit is contained in:

committed by
Geoff Seemueller

parent
c9ee7c7690
commit
cc0da17b5f
@@ -53,7 +53,17 @@ const InputMenu: React.FC<{ isDisabled?: boolean }> = observer(
|
||||
setControlledOpen(isOpen);
|
||||
}, [isOpen]);
|
||||
|
||||
const textModels = SUPPORTED_MODELS;
|
||||
|
||||
const getSupportedModels = async () => {
|
||||
return await (await fetch("/api/models")).json();
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
getSupportedModels().then((supportedModels) => {
|
||||
ClientChatStore.setSupportedModels(supportedModels);
|
||||
});
|
||||
}, []);
|
||||
|
||||
|
||||
const handleClose = useCallback(() => {
|
||||
onClose();
|
||||
@@ -75,9 +85,7 @@ const InputMenu: React.FC<{ isDisabled?: boolean }> = observer(
|
||||
}, [onClose]);
|
||||
|
||||
async function selectModelFn({ name, value }) {
|
||||
if (getModelFamily(value)) {
|
||||
ClientChatStore.setModel(value);
|
||||
}
|
||||
}
|
||||
|
||||
function isSelectedModelFn({ name, value }) {
|
||||
@@ -144,7 +152,7 @@ const InputMenu: React.FC<{ isDisabled?: boolean }> = observer(
|
||||
>
|
||||
<FlyoutSubMenu
|
||||
title="Text Models"
|
||||
flyoutMenuOptions={textModels.map((m) => ({ name: m, value: m }))}
|
||||
flyoutMenuOptions={ClientChatStore.supportedModels.map((m) => ({ name: m, value: m }))}
|
||||
onClose={onClose}
|
||||
parentIsOpen={isOpen}
|
||||
setMenuState={setMenuState}
|
||||
|
@@ -9,6 +9,7 @@ const ClientChatStore = types
|
||||
isLoading: types.optional(types.boolean, false),
|
||||
model: types.optional(types.string, "meta-llama/llama-4-scout-17b-16e-instruct"),
|
||||
imageModel: types.optional(types.string, "black-forest-labs/flux-1.1-pro"),
|
||||
supportedModels: types.optional(types.array(types.string), [])
|
||||
})
|
||||
.actions((self) => ({
|
||||
cleanup() {
|
||||
@@ -17,6 +18,12 @@ const ClientChatStore = types
|
||||
self.eventSource = null;
|
||||
}
|
||||
},
|
||||
setSupportedModels(modelsList: string[]) {
|
||||
self.supportedModels = modelsList;
|
||||
if(!modelsList.includes(self.model)) {
|
||||
self.model = modelsList.pop()
|
||||
}
|
||||
},
|
||||
sendMessage: flow(function* () {
|
||||
if (!self.input.trim() || self.isLoading) return;
|
||||
|
||||
|
Reference in New Issue
Block a user