adds eslint

This commit is contained in:
geoffsee
2025-06-24 17:29:52 -04:00
committed by Geoff Seemueller
parent 9698fc6f3b
commit 02c3253343
169 changed files with 4896 additions and 4804 deletions

View File

@@ -1,26 +1,27 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { ChatSdk } from '../chat-sdk.ts';
import { AssistantSdk } from '../assistant-sdk.ts';
import Message from '../../models/Message.ts';
import { ProviderRepository } from '../../providers/_ProviderRepository';
import { AssistantSdk } from '../assistant-sdk.ts';
import { ChatSdk } from '../chat-sdk.ts';
// Mock dependencies
vi.mock('../assistant-sdk', () => ({
AssistantSdk: {
getAssistantPrompt: vi.fn()
}
getAssistantPrompt: vi.fn(),
},
}));
vi.mock('../../models/Message', () => ({
default: {
create: vi.fn((message) => message)
}
create: vi.fn(message => message),
},
}));
vi.mock('../../providers/_ProviderRepository', () => ({
ProviderRepository: {
getModelFamily: vi.fn()
}
getModelFamily: vi.fn(),
},
}));
describe('ChatSdk', () => {
@@ -37,11 +38,11 @@ describe('ChatSdk', () => {
expect(Message.create).toHaveBeenCalledWith({
role: 'assistant',
content: ''
content: '',
});
expect(result).toEqual({
role: 'assistant',
content: ''
content: '',
});
});
});
@@ -49,7 +50,7 @@ describe('ChatSdk', () => {
describe('handleChatRequest', () => {
it('should return a 400 response if no messages are provided', async () => {
const request = {
json: vi.fn().mockResolvedValue({ messages: [] })
json: vi.fn().mockResolvedValue({ messages: [] }),
};
const ctx = {
openai: {},
@@ -58,9 +59,9 @@ describe('ChatSdk', () => {
env: {
SERVER_COORDINATOR: {
idFromName: vi.fn(),
get: vi.fn()
}
}
get: vi.fn(),
},
},
};
const response = await ChatSdk.handleChatRequest(request as any, ctx as any);
@@ -72,7 +73,7 @@ describe('ChatSdk', () => {
it('should save stream data and return a response with streamUrl', async () => {
const streamId = 'test-uuid';
vi.stubGlobal('crypto', {
randomUUID: vi.fn().mockReturnValue(streamId)
randomUUID: vi.fn().mockReturnValue(streamId),
});
const messages = [{ role: 'user', content: 'Hello' }];
@@ -80,12 +81,12 @@ describe('ChatSdk', () => {
const conversationId = 'conv-123';
const request = {
json: vi.fn().mockResolvedValue({ messages, model, conversationId })
json: vi.fn().mockResolvedValue({ messages, model, conversationId }),
};
const saveStreamData = vi.fn();
const durableObject = {
saveStreamData
saveStreamData,
};
const ctx = {
@@ -95,9 +96,9 @@ describe('ChatSdk', () => {
env: {
SERVER_COORDINATOR: {
idFromName: vi.fn().mockReturnValue('object-id'),
get: vi.fn().mockReturnValue(durableObject)
}
}
get: vi.fn().mockReturnValue(durableObject),
},
},
};
const response = await ChatSdk.handleChatRequest(request as any, ctx as any);
@@ -105,12 +106,9 @@ describe('ChatSdk', () => {
expect(ctx.env.SERVER_COORDINATOR.idFromName).toHaveBeenCalledWith('stream-index');
expect(ctx.env.SERVER_COORDINATOR.get).toHaveBeenCalledWith('object-id');
expect(saveStreamData).toHaveBeenCalledWith(
streamId,
expect.stringContaining(model)
);
expect(saveStreamData).toHaveBeenCalledWith(streamId, expect.stringContaining(model));
expect(responseBody).toEqual({
streamUrl: `/api/streams/${streamId}`
streamUrl: `/api/streams/${streamId}`,
});
});
});
@@ -120,7 +118,7 @@ describe('ChatSdk', () => {
const messages = [{ role: 'user', content: 'Hello' }];
const dynamicMaxTokens = vi.fn().mockResolvedValue(500);
const durableObject = {
dynamicMaxTokens
dynamicMaxTokens,
};
const ctx = {
@@ -128,9 +126,9 @@ describe('ChatSdk', () => {
env: {
SERVER_COORDINATOR: {
idFromName: vi.fn().mockReturnValue('object-id'),
get: vi.fn().mockReturnValue(durableObject)
}
}
get: vi.fn().mockReturnValue(durableObject),
},
},
};
await ChatSdk.calculateMaxTokens(messages, ctx as any);
@@ -150,7 +148,7 @@ describe('ChatSdk', () => {
expect(AssistantSdk.getAssistantPrompt).toHaveBeenCalledWith({
maxTokens: 1000,
userTimezone: 'UTC',
userLocation: 'USA/unknown'
userLocation: 'USA/unknown',
});
expect(result).toBe('Assistant prompt');
});
@@ -160,15 +158,13 @@ describe('ChatSdk', () => {
it('should build a message chain with system role for most models', async () => {
vi.mocked(ProviderRepository.getModelFamily).mockResolvedValue('openai');
const messages = [
{role: 'user', content: 'Hello'}
];
const messages = [{ role: 'user', content: 'Hello' }];
const opts = {
systemPrompt: 'System prompt',
assistantPrompt: 'Assistant prompt',
toolResults: {role: 'tool', content: 'Tool result'},
model: 'gpt-4'
toolResults: { role: 'tool', content: 'Tool result' },
model: 'gpt-4',
};
const result = await ChatSdk.buildMessageChain(messages, opts as any);
@@ -177,30 +173,28 @@ describe('ChatSdk', () => {
expect(Message.create).toHaveBeenCalledTimes(3);
expect(Message.create).toHaveBeenNthCalledWith(1, {
role: 'system',
content: 'System prompt'
content: 'System prompt',
});
expect(Message.create).toHaveBeenNthCalledWith(2, {
role: 'assistant',
content: 'Assistant prompt'
content: 'Assistant prompt',
});
expect(Message.create).toHaveBeenNthCalledWith(3, {
role: 'user',
content: 'Hello'
content: 'Hello',
});
});
it('should build a message chain with assistant role for o1, gemma, claude, or google models', async () => {
vi.mocked(ProviderRepository.getModelFamily).mockResolvedValue('claude');
const messages = [
{ role: 'user', content: 'Hello' }
];
const messages = [{ role: 'user', content: 'Hello' }];
const opts = {
systemPrompt: 'System prompt',
assistantPrompt: 'Assistant prompt',
toolResults: { role: 'tool', content: 'Tool result' },
model: 'claude-3'
model: 'claude-3',
};
const result = await ChatSdk.buildMessageChain(messages, opts as any);
@@ -209,7 +203,7 @@ describe('ChatSdk', () => {
expect(Message.create).toHaveBeenCalledTimes(3);
expect(Message.create).toHaveBeenNthCalledWith(1, {
role: 'assistant',
content: 'System prompt'
content: 'System prompt',
});
});
@@ -220,14 +214,14 @@ describe('ChatSdk', () => {
{ role: 'user', content: 'Hello' },
{ role: 'user', content: '' },
{ role: 'user', content: ' ' },
{ role: 'user', content: 'World' }
{ role: 'user', content: 'World' },
];
const opts = {
systemPrompt: 'System prompt',
assistantPrompt: 'Assistant prompt',
toolResults: { role: 'tool', content: 'Tool result' },
model: 'gpt-4'
model: 'gpt-4',
};
const result = await ChatSdk.buildMessageChain(messages, opts as any);