-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathllm.ts
123 lines (110 loc) · 2.48 KB
/
llm.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
export interface LLMProvider {
id: string;
name: string;
description: string | null;
providerType: string;
endpointUrl: string;
apiKey: string;
isActive: boolean;
isDefault: boolean;
models: LLMModel[];
ownerType: string;
userOwner?: {
id: string;
name: string;
};
teamOwner?: {
id: string;
name: string;
};
teamOwnerId?: string | null;
}
export type LLMProviderType = 'openai' | 'openai-compatible';
export interface LLMModel {
id: string;
name: string;
displayName?: string;
description?: string;
modelType: LLMModelType;
contextWindow?: number;
isActive: boolean;
isDefault: boolean;
createdAt: Date;
updatedAt: Date;
config?: Record<string, any>;
providerId: string;
}
export type LLMModelType = 'text' | 'chat' | 'embedding' | 'image';
export interface CreateLLMProviderRequest {
name: string;
description?: string;
providerType: LLMProviderType;
endpointUrl: string;
isActive?: boolean;
isDefault?: boolean;
apiKey?: string;
config?: Record<string, any>;
}
export interface UpdateLLMProviderRequest {
name?: string;
description?: string;
endpointUrl?: string;
isActive?: boolean;
isDefault?: boolean;
apiKey?: string;
config?: Record<string, any>;
}
export interface CreateLLMModelRequest {
name: string;
displayName?: string;
description?: string;
modelType: LLMModelType;
contextWindow?: number;
isActive?: boolean;
isDefault?: boolean;
config?: Record<string, any>;
providerId: string;
}
export interface UpdateLLMModelRequest {
name?: string;
displayName?: string;
description?: string;
modelType?: LLMModelType;
contextWindow?: number;
isActive?: boolean;
isDefault?: boolean;
config?: Record<string, any>;
}
export interface TestLLMProviderRequest {
providerId: string;
message: string;
modelId?: string;
}
export interface TestLLMProviderResponse {
success: boolean;
response?: string;
error?: string;
latency?: number;
tokens?: {
input: number;
output: number;
};
}
// Add these types to the existing file
export interface UserLLMPreferences {
models: {
chat?: string; // Model ID
text?: string; // Model ID
embedding?: string; // Model ID
image?: string; // Model ID
};
settings?: {
temperature?: number;
topP?: number;
maxTokens?: number;
};
}
export interface UserPreferencesUpdateRequest {
defaultLLMProviderId?: string | null;
llmPreferences?: UserLLMPreferences;
}