diff --git a/src/FunctionsClient.ts b/src/FunctionsClient.ts index 6e044db..ed1849f 100644 --- a/src/FunctionsClient.ts +++ b/src/FunctionsClient.ts @@ -115,6 +115,8 @@ export class FunctionsClient { data = await response.json() } else if (responseType === 'application/octet-stream') { data = await response.blob() + } else if (responseType === 'text/event-stream') { + data = response } else if (responseType === 'multipart/form-data') { data = await response.formData() } else { diff --git a/src/edge-runtime.d.ts b/src/edge-runtime.d.ts new file mode 100644 index 0000000..740937a --- /dev/null +++ b/src/edge-runtime.d.ts @@ -0,0 +1,45 @@ +interface ModelOptions { + /** + * Pool embeddings by taking their mean. Applies only for `gte-small` model + */ + mean_pool?: boolean + + /** + * Normalize the embeddings result. Applies only for `gte-small` model + */ + normalize?: boolean + + /** + * Stream response from model. Applies only for LLMs like `mistral` (default: false) + */ + stream?: boolean + + /** + * Automatically abort the request to the model after specified time (in seconds). Applies only for LLMs like `mistral` (default: 60) + */ + timeout?: number +} + +interface Session { + /** + * Execute the given prompt in model session + */ + run(prompt: string, modelOptions?: ModelOptions): unknown +} + +declare var Session: { + prototype: Session + /** + * Create a new model session using given model + */ + new (model: string, sessionOptions?: unknown): Session +} + +declare var Supabase: { + /** + * Provides AI related APIs + */ + readonly ai: { + readonly Session: typeof Session + } +}