diff --git a/api/openapi.json b/api/openapi.json index 4ea57d0e1..8b613d21f 100644 --- a/api/openapi.json +++ b/api/openapi.json @@ -834,6 +834,67 @@ "type": "string", "title": "Workspace Name" } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "default": 1, + "title": "Page" + } + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 100, + "minimum": 1, + "default": 50, + "title": "Page Size" + } + }, + { + "name": "filter_by_ids", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "title": "Filter By Ids" + } + }, + { + "name": "filter_by_alert_trigger_types", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/components/schemas/AlertTriggerType" + } + }, + { + "type": "null" + } + ], + "title": "Filter By Alert Trigger Types" + } } ], "responses": { @@ -842,11 +903,60 @@ "content": { "application/json": { "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/Conversation" - }, - "title": "Response V1 Get Workspace Messages" + "$ref": "#/components/schemas/PaginatedMessagesResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/api/v1/workspaces/{workspace_name}/messages/{prompt_id}": { + "get": { + "tags": [ + "CodeGate API", + "Workspaces" + ], + "summary": "Get Messages By Prompt Id", + "description": "Get messages for a workspace.", + "operationId": "v1_get_messages_by_prompt_id", + "parameters": [ + { + "name": "workspace_name", + "in": "path", + "required": true, + "schema": { + "type": "string", + "title": "Workspace Name" + } + }, + { + "name": "prompt_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "title": "Prompt Id" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Conversation" } } } @@ -1660,17 +1770,31 @@ "secrets": { "type": "integer", "title": "Secrets" + }, + "total_alerts": { + "type": "integer", + "title": "Total Alerts" } }, "type": "object", "required": [ "malicious_packages", "pii", - "secrets" + "secrets", + "total_alerts" ], "title": "AlertSummary", "description": "Represents a set of summary alerts" }, + "AlertTriggerType": { + "type": "string", + "enum": [ + "codegate-pii", + "codegate-context-retriever", + "codegate-secrets" + ], + "title": "AlertTriggerType" + }, "ChatMessage": { "properties": { "message": { @@ -1820,10 +1944,17 @@ ] }, "alerts": { - "items": { - "$ref": "#/components/schemas/Alert" - }, - "type": "array", + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/Alert" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Alerts", "default": [] } @@ -1840,6 +1971,61 @@ "title": "Conversation", "description": "Represents a conversation." }, + "ConversationSummary": { + "properties": { + "chat_id": { + "type": "string", + "title": "Chat Id" + }, + "prompt": { + "$ref": "#/components/schemas/ChatMessage" + }, + "alerts_summary": { + "$ref": "#/components/schemas/AlertSummary" + }, + "token_usage_agg": { + "anyOf": [ + { + "$ref": "#/components/schemas/TokenUsageAggregate" + }, + { + "type": "null" + } + ] + }, + "provider": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Provider" + }, + "type": { + "$ref": "#/components/schemas/QuestionType" + }, + "conversation_timestamp": { + "type": "string", + "format": "date-time", + "title": "Conversation Timestamp" + } + }, + "type": "object", + "required": [ + "chat_id", + "prompt", + "alerts_summary", + "token_usage_agg", + "provider", + "type", + "conversation_timestamp" + ], + "title": "ConversationSummary", + "description": "Represents a conversation summary." + }, "CustomInstructions": { "properties": { "prompt": { @@ -2024,6 +2210,37 @@ "title": "MuxRule", "description": "Represents a mux rule for a provider." }, + "PaginatedMessagesResponse": { + "properties": { + "data": { + "items": { + "$ref": "#/components/schemas/ConversationSummary" + }, + "type": "array", + "title": "Data" + }, + "limit": { + "type": "integer", + "title": "Limit" + }, + "offset": { + "type": "integer", + "title": "Offset" + }, + "total": { + "type": "integer", + "title": "Total" + } + }, + "type": "object", + "required": [ + "data", + "limit", + "offset", + "total" + ], + "title": "PaginatedMessagesResponse" + }, "Persona": { "properties": { "id": { diff --git a/migrations/versions/2025_03_05_2126-e4c05d7591a8_add_installation_table.py b/migrations/versions/2025_03_05_2126-e4c05d7591a8_add_installation_table.py index 775e3967b..9e2b6c130 100644 --- a/migrations/versions/2025_03_05_2126-e4c05d7591a8_add_installation_table.py +++ b/migrations/versions/2025_03_05_2126-e4c05d7591a8_add_installation_table.py @@ -9,8 +9,6 @@ from typing import Sequence, Union from alembic import op -import sqlalchemy as sa - # revision identifiers, used by Alembic. revision: str = "e4c05d7591a8" diff --git a/model_cost_data/model_prices_and_context_window.json b/model_cost_data/model_prices_and_context_window.json index 42ebef110..cb2322752 100644 --- a/model_cost_data/model_prices_and_context_window.json +++ b/model_cost_data/model_prices_and_context_window.json @@ -1068,9 +1068,9 @@ "max_tokens": 65536, "max_input_tokens": 128000, "max_output_tokens": 65536, - "input_cost_per_token": 0.000003, - "output_cost_per_token": 0.000012, - "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.00000121, + "output_cost_per_token": 0.00000484, + "cache_read_input_token_cost": 0.000000605, "litellm_provider": "azure", "mode": "chat", "supports_function_calling": true, @@ -1082,9 +1082,9 @@ "max_tokens": 65536, "max_input_tokens": 128000, "max_output_tokens": 65536, - "input_cost_per_token": 0.000003, - "output_cost_per_token": 0.000012, - "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.00000121, + "output_cost_per_token": 0.00000484, + "cache_read_input_token_cost": 0.000000605, "litellm_provider": "azure", "mode": "chat", "supports_function_calling": true, @@ -2795,6 +2795,7 @@ "supports_vision": true, "tool_use_system_prompt_tokens": 264, "supports_assistant_prefill": true, + "supports_pdf_input": true, "supports_prompt_caching": true, "supports_response_schema": true, "deprecation_date": "2025-10-01", @@ -2814,6 +2815,7 @@ "supports_vision": true, "tool_use_system_prompt_tokens": 264, "supports_assistant_prefill": true, + "supports_pdf_input": true, "supports_prompt_caching": true, "supports_response_schema": true, "deprecation_date": "2025-10-01", @@ -2888,6 +2890,7 @@ "supports_vision": true, "tool_use_system_prompt_tokens": 159, "supports_assistant_prefill": true, + "supports_pdf_input": true, "supports_prompt_caching": true, "supports_response_schema": true, "deprecation_date": "2025-06-01", @@ -2907,15 +2910,16 @@ "supports_vision": true, "tool_use_system_prompt_tokens": 159, "supports_assistant_prefill": true, + "supports_pdf_input": true, "supports_prompt_caching": true, "supports_response_schema": true, "deprecation_date": "2025-06-01", "supports_tool_choice": true }, "claude-3-7-sonnet-latest": { - "max_tokens": 8192, + "max_tokens": 128000, "max_input_tokens": 200000, - "max_output_tokens": 8192, + "max_output_tokens": 128000, "input_cost_per_token": 0.000003, "output_cost_per_token": 0.000015, "cache_creation_input_token_cost": 0.00000375, @@ -2926,15 +2930,16 @@ "supports_vision": true, "tool_use_system_prompt_tokens": 159, "supports_assistant_prefill": true, + "supports_pdf_input": true, "supports_prompt_caching": true, "supports_response_schema": true, "deprecation_date": "2025-06-01", "supports_tool_choice": true }, "claude-3-7-sonnet-20250219": { - "max_tokens": 8192, + "max_tokens": 128000, "max_input_tokens": 200000, - "max_output_tokens": 8192, + "max_output_tokens": 128000, "input_cost_per_token": 0.000003, "output_cost_per_token": 0.000015, "cache_creation_input_token_cost": 0.00000375, @@ -2945,6 +2950,7 @@ "supports_vision": true, "tool_use_system_prompt_tokens": 159, "supports_assistant_prefill": true, + "supports_pdf_input": true, "supports_prompt_caching": true, "supports_response_schema": true, "deprecation_date": "2026-02-01", @@ -4159,6 +4165,7 @@ "litellm_provider": "vertex_ai-anthropic_models", "mode": "chat", "supports_function_calling": true, + "supports_pdf_input": true, "supports_vision": true, "supports_assistant_prefill": true, "supports_tool_choice": true @@ -4172,6 +4179,7 @@ "litellm_provider": "vertex_ai-anthropic_models", "mode": "chat", "supports_function_calling": true, + "supports_pdf_input": true, "supports_vision": true, "supports_assistant_prefill": true, "supports_tool_choice": true @@ -4185,6 +4193,7 @@ "litellm_provider": "vertex_ai-anthropic_models", "mode": "chat", "supports_function_calling": true, + "supports_pdf_input": true, "supports_vision": true, "supports_assistant_prefill": true, "supports_tool_choice": true @@ -4198,6 +4207,7 @@ "litellm_provider": "vertex_ai-anthropic_models", "mode": "chat", "supports_function_calling": true, + "supports_pdf_input": true, "supports_vision": true, "supports_assistant_prefill": true, "supports_tool_choice": true @@ -4213,6 +4223,7 @@ "litellm_provider": "vertex_ai-anthropic_models", "mode": "chat", "supports_function_calling": true, + "supports_pdf_input": true, "supports_vision": true, "tool_use_system_prompt_tokens": 159, "supports_assistant_prefill": true, @@ -4256,6 +4267,7 @@ "litellm_provider": "vertex_ai-anthropic_models", "mode": "chat", "supports_function_calling": true, + "supports_pdf_input": true, "supports_assistant_prefill": true, "supports_tool_choice": true }, @@ -4268,6 +4280,7 @@ "litellm_provider": "vertex_ai-anthropic_models", "mode": "chat", "supports_function_calling": true, + "supports_pdf_input": true, "supports_assistant_prefill": true, "supports_tool_choice": true }, @@ -6432,6 +6445,18 @@ "supports_prompt_caching": true, "supports_response_schema": true }, + "eu.amazon.nova-micro-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 300000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.000000046, + "output_cost_per_token": 0.000000184, + "litellm_provider": "bedrock_converse", + "mode": "chat", + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true + }, "amazon.nova-lite-v1:0": { "max_tokens": 4096, "max_input_tokens": 128000, @@ -6460,6 +6485,20 @@ "supports_prompt_caching": true, "supports_response_schema": true }, + "eu.amazon.nova-lite-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.000000078, + "output_cost_per_token": 0.000000312, + "litellm_provider": "bedrock_converse", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true + }, "amazon.nova-pro-v1:0": { "max_tokens": 4096, "max_input_tokens": 300000, @@ -6488,6 +6527,21 @@ "supports_prompt_caching": true, "supports_response_schema": true }, + "eu.amazon.nova-pro-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 300000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.00000105, + "output_cost_per_token": 0.0000042, + "litellm_provider": "bedrock_converse", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "source": "https://aws.amazon.com/bedrock/pricing/" + }, "anthropic.claude-3-sonnet-20240229-v1:0": { "max_tokens": 4096, "max_input_tokens": 200000, @@ -6499,8 +6553,25 @@ "supports_function_calling": true, "supports_response_schema": true, "supports_vision": true, + "supports_pdf_input": true, "supports_tool_choice": true }, + "bedrock/invoke/anthropic.claude-3-5-sonnet-20240620-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_tool_choice": true, + "metadata": { + "notes": "Anthropic via Invoke route does not currently support pdf input." + } + }, "anthropic.claude-3-5-sonnet-20240620-v1:0": { "max_tokens": 4096, "max_input_tokens": 200000, @@ -6512,6 +6583,7 @@ "supports_function_calling": true, "supports_response_schema": true, "supports_vision": true, + "supports_pdf_input": true, "supports_tool_choice": true }, "anthropic.claude-3-7-sonnet-20250219-v1:0": { @@ -6539,6 +6611,7 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, + "supports_pdf_input": true, "supports_assistant_prefill": true, "supports_prompt_caching": true, "supports_response_schema": true, @@ -6555,6 +6628,7 @@ "supports_function_calling": true, "supports_response_schema": true, "supports_vision": true, + "supports_pdf_input": true, "supports_tool_choice": true }, "anthropic.claude-3-5-haiku-20241022-v1:0": { @@ -6566,6 +6640,7 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_assistant_prefill": true, + "supports_pdf_input": true, "supports_function_calling": true, "supports_response_schema": true, "supports_prompt_caching": true, @@ -6595,6 +6670,7 @@ "supports_function_calling": true, "supports_response_schema": true, "supports_vision": true, + "supports_pdf_input": true, "supports_tool_choice": true }, "us.anthropic.claude-3-5-sonnet-20240620-v1:0": { @@ -6608,6 +6684,7 @@ "supports_function_calling": true, "supports_response_schema": true, "supports_vision": true, + "supports_pdf_input": true, "supports_tool_choice": true }, "us.anthropic.claude-3-5-sonnet-20241022-v2:0": { @@ -6620,6 +6697,7 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, + "supports_pdf_input": true, "supports_assistant_prefill": true, "supports_prompt_caching": true, "supports_response_schema": true, @@ -6651,6 +6729,7 @@ "supports_function_calling": true, "supports_response_schema": true, "supports_vision": true, + "supports_pdf_input": true, "supports_tool_choice": true }, "us.anthropic.claude-3-5-haiku-20241022-v1:0": { @@ -6662,6 +6741,7 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_assistant_prefill": true, + "supports_pdf_input": true, "supports_function_calling": true, "supports_prompt_caching": true, "supports_response_schema": true, @@ -6691,6 +6771,7 @@ "supports_function_calling": true, "supports_response_schema": true, "supports_vision": true, + "supports_pdf_input": true, "supports_tool_choice": true }, "eu.anthropic.claude-3-5-sonnet-20240620-v1:0": { @@ -6704,6 +6785,7 @@ "supports_function_calling": true, "supports_response_schema": true, "supports_vision": true, + "supports_pdf_input": true, "supports_tool_choice": true }, "eu.anthropic.claude-3-5-sonnet-20241022-v2:0": { @@ -6716,6 +6798,7 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, + "supports_pdf_input": true, "supports_assistant_prefill": true, "supports_prompt_caching": true, "supports_response_schema": true, @@ -6732,6 +6815,7 @@ "supports_function_calling": true, "supports_response_schema": true, "supports_vision": true, + "supports_pdf_input": true, "supports_tool_choice": true }, "eu.anthropic.claude-3-5-haiku-20241022-v1:0": { @@ -6744,6 +6828,7 @@ "mode": "chat", "supports_function_calling": true, "supports_assistant_prefill": true, + "supports_pdf_input": true, "supports_prompt_caching": true, "supports_response_schema": true, "supports_tool_choice": true diff --git a/poetry.lock b/poetry.lock index 15579a88b..09a132016 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2101,33 +2101,30 @@ reference = ["Pillow", "google-re2"] [[package]] name = "onnxruntime" -version = "1.20.1" +version = "1.21.0" description = "ONNX Runtime is a runtime accelerator for Machine Learning models" optional = false -python-versions = "*" +python-versions = ">=3.10" groups = ["main"] files = [ - {file = "onnxruntime-1.20.1-cp310-cp310-macosx_13_0_universal2.whl", hash = "sha256:e50ba5ff7fed4f7d9253a6baf801ca2883cc08491f9d32d78a80da57256a5439"}, - {file = "onnxruntime-1.20.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b2908b50101a19e99c4d4e97ebb9905561daf61829403061c1adc1b588bc0de"}, - {file = "onnxruntime-1.20.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d82daaec24045a2e87598b8ac2b417b1cce623244e80e663882e9fe1aae86410"}, - {file = "onnxruntime-1.20.1-cp310-cp310-win32.whl", hash = "sha256:4c4b251a725a3b8cf2aab284f7d940c26094ecd9d442f07dd81ab5470e99b83f"}, - {file = "onnxruntime-1.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:d3b616bb53a77a9463707bb313637223380fc327f5064c9a782e8ec69c22e6a2"}, - {file = "onnxruntime-1.20.1-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:06bfbf02ca9ab5f28946e0f912a562a5f005301d0c419283dc57b3ed7969bb7b"}, - {file = "onnxruntime-1.20.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f6243e34d74423bdd1edf0ae9596dd61023b260f546ee17d701723915f06a9f7"}, - {file = "onnxruntime-1.20.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5eec64c0269dcdb8d9a9a53dc4d64f87b9e0c19801d9321246a53b7eb5a7d1bc"}, - {file = "onnxruntime-1.20.1-cp311-cp311-win32.whl", hash = "sha256:a19bc6e8c70e2485a1725b3d517a2319603acc14c1f1a017dda0afe6d4665b41"}, - {file = "onnxruntime-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:8508887eb1c5f9537a4071768723ec7c30c28eb2518a00d0adcd32c89dea3221"}, - {file = "onnxruntime-1.20.1-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:22b0655e2bf4f2161d52706e31f517a0e54939dc393e92577df51808a7edc8c9"}, - {file = "onnxruntime-1.20.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f56e898815963d6dc4ee1c35fc6c36506466eff6d16f3cb9848cea4e8c8172"}, - {file = "onnxruntime-1.20.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bb71a814f66517a65628c9e4a2bb530a6edd2cd5d87ffa0af0f6f773a027d99e"}, - {file = "onnxruntime-1.20.1-cp312-cp312-win32.whl", hash = "sha256:bd386cc9ee5f686ee8a75ba74037750aca55183085bf1941da8efcfe12d5b120"}, - {file = "onnxruntime-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:19c2d843eb074f385e8bbb753a40df780511061a63f9def1b216bf53860223fb"}, - {file = "onnxruntime-1.20.1-cp313-cp313-macosx_13_0_universal2.whl", hash = "sha256:cc01437a32d0042b606f462245c8bbae269e5442797f6213e36ce61d5abdd8cc"}, - {file = "onnxruntime-1.20.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb44b08e017a648924dbe91b82d89b0c105b1adcfe31e90d1dc06b8677ad37be"}, - {file = "onnxruntime-1.20.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bda6aebdf7917c1d811f21d41633df00c58aff2bef2f598f69289c1f1dabc4b3"}, - {file = "onnxruntime-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:d30367df7e70f1d9fc5a6a68106f5961686d39b54d3221f760085524e8d38e16"}, - {file = "onnxruntime-1.20.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9158465745423b2b5d97ed25aa7740c7d38d2993ee2e5c3bfacb0c4145c49d8"}, - {file = "onnxruntime-1.20.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0df6f2df83d61f46e842dbcde610ede27218947c33e994545a22333491e72a3b"}, + {file = "onnxruntime-1.21.0-cp310-cp310-macosx_13_0_universal2.whl", hash = "sha256:95513c9302bc8dd013d84148dcf3168e782a80cdbf1654eddc948a23147ccd3d"}, + {file = "onnxruntime-1.21.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:635d4ab13ae0f150dd4c6ff8206fd58f1c6600636ecc796f6f0c42e4c918585b"}, + {file = "onnxruntime-1.21.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d06bfa0dd5512bd164f25a2bf594b2e7c9eabda6fc064b684924f3e81bdab1b"}, + {file = "onnxruntime-1.21.0-cp310-cp310-win_amd64.whl", hash = "sha256:b0fc22d219791e0284ee1d9c26724b8ee3fbdea28128ef25d9507ad3b9621f23"}, + {file = "onnxruntime-1.21.0-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:8e16f8a79df03919810852fb46ffcc916dc87a9e9c6540a58f20c914c575678c"}, + {file = "onnxruntime-1.21.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9156cf6f8ee133d07a751e6518cf6f84ed37fbf8243156bd4a2c4ee6e073c8"}, + {file = "onnxruntime-1.21.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a5d09815a9e209fa0cb20c2985b34ab4daeba7aea94d0f96b8751eb10403201"}, + {file = "onnxruntime-1.21.0-cp311-cp311-win_amd64.whl", hash = "sha256:1d970dff1e2fa4d9c53f2787b3b7d0005596866e6a31997b41169017d1362dd0"}, + {file = "onnxruntime-1.21.0-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:893d67c68ca9e7a58202fa8d96061ed86a5815b0925b5a97aef27b8ba246a20b"}, + {file = "onnxruntime-1.21.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:37b7445c920a96271a8dfa16855e258dc5599235b41c7bbde0d262d55bcc105f"}, + {file = "onnxruntime-1.21.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9a04aafb802c1e5573ba4552f8babcb5021b041eb4cfa802c9b7644ca3510eca"}, + {file = "onnxruntime-1.21.0-cp312-cp312-win_amd64.whl", hash = "sha256:7f801318476cd7003d636a5b392f7a37c08b6c8d2f829773f3c3887029e03f32"}, + {file = "onnxruntime-1.21.0-cp313-cp313-macosx_13_0_universal2.whl", hash = "sha256:85718cbde1c2912d3a03e3b3dc181b1480258a229c32378408cace7c450f7f23"}, + {file = "onnxruntime-1.21.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94dff3a61538f3b7b0ea9a06bc99e1410e90509c76e3a746f039e417802a12ae"}, + {file = "onnxruntime-1.21.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1e704b0eda5f2bbbe84182437315eaec89a450b08854b5a7762c85d04a28a0a"}, + {file = "onnxruntime-1.21.0-cp313-cp313-win_amd64.whl", hash = "sha256:19b630c6a8956ef97fb7c94948b17691167aa1aaf07b5f214fa66c3e4136c108"}, + {file = "onnxruntime-1.21.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3995c4a2d81719623c58697b9510f8de9fa42a1da6b4474052797b0d712324fe"}, + {file = "onnxruntime-1.21.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:36b18b8f39c0f84e783902112a0dd3c102466897f96d73bb83f6a6bff283a423"}, ] [package.dependencies] @@ -3133,30 +3130,30 @@ files = [ [[package]] name = "ruff" -version = "0.9.9" +version = "0.9.10" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ - {file = "ruff-0.9.9-py3-none-linux_armv6l.whl", hash = "sha256:628abb5ea10345e53dff55b167595a159d3e174d6720bf19761f5e467e68d367"}, - {file = "ruff-0.9.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b6cd1428e834b35d7493354723543b28cc11dc14d1ce19b685f6e68e07c05ec7"}, - {file = "ruff-0.9.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5ee162652869120ad260670706f3cd36cd3f32b0c651f02b6da142652c54941d"}, - {file = "ruff-0.9.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3aa0f6b75082c9be1ec5a1db78c6d4b02e2375c3068438241dc19c7c306cc61a"}, - {file = "ruff-0.9.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:584cc66e89fb5f80f84b05133dd677a17cdd86901d6479712c96597a3f28e7fe"}, - {file = "ruff-0.9.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf3369325761a35aba75cd5c55ba1b5eb17d772f12ab168fbfac54be85cf18c"}, - {file = "ruff-0.9.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3403a53a32a90ce929aa2f758542aca9234befa133e29f4933dcef28a24317be"}, - {file = "ruff-0.9.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:18454e7fa4e4d72cffe28a37cf6a73cb2594f81ec9f4eca31a0aaa9ccdfb1590"}, - {file = "ruff-0.9.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fadfe2c88724c9617339f62319ed40dcdadadf2888d5afb88bf3adee7b35bfb"}, - {file = "ruff-0.9.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6df104d08c442a1aabcfd254279b8cc1e2cbf41a605aa3e26610ba1ec4acf0b0"}, - {file = "ruff-0.9.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d7c62939daf5b2a15af48abbd23bea1efdd38c312d6e7c4cedf5a24e03207e17"}, - {file = "ruff-0.9.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9494ba82a37a4b81b6a798076e4a3251c13243fc37967e998efe4cce58c8a8d1"}, - {file = "ruff-0.9.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4efd7a96ed6d36ef011ae798bf794c5501a514be369296c672dab7921087fa57"}, - {file = "ruff-0.9.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ab90a7944c5a1296f3ecb08d1cbf8c2da34c7e68114b1271a431a3ad30cb660e"}, - {file = "ruff-0.9.9-py3-none-win32.whl", hash = "sha256:6b4c376d929c25ecd6d87e182a230fa4377b8e5125a4ff52d506ee8c087153c1"}, - {file = "ruff-0.9.9-py3-none-win_amd64.whl", hash = "sha256:837982ea24091d4c1700ddb2f63b7070e5baec508e43b01de013dc7eff974ff1"}, - {file = "ruff-0.9.9-py3-none-win_arm64.whl", hash = "sha256:3ac78f127517209fe6d96ab00f3ba97cafe38718b23b1db3e96d8b2d39e37ddf"}, - {file = "ruff-0.9.9.tar.gz", hash = "sha256:0062ed13f22173e85f8f7056f9a24016e692efeea8704d1a5e8011b8aa850933"}, + {file = "ruff-0.9.10-py3-none-linux_armv6l.whl", hash = "sha256:eb4d25532cfd9fe461acc83498361ec2e2252795b4f40b17e80692814329e42d"}, + {file = "ruff-0.9.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:188a6638dab1aa9bb6228a7302387b2c9954e455fb25d6b4470cb0641d16759d"}, + {file = "ruff-0.9.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5284dcac6b9dbc2fcb71fdfc26a217b2ca4ede6ccd57476f52a587451ebe450d"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47678f39fa2a3da62724851107f438c8229a3470f533894b5568a39b40029c0c"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99713a6e2766b7a17147b309e8c915b32b07a25c9efd12ada79f217c9c778b3e"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524ee184d92f7c7304aa568e2db20f50c32d1d0caa235d8ddf10497566ea1a12"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:df92aeac30af821f9acf819fc01b4afc3dfb829d2782884f8739fb52a8119a16"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de42e4edc296f520bb84954eb992a07a0ec5a02fecb834498415908469854a52"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d257f95b65806104b6b1ffca0ea53f4ef98454036df65b1eda3693534813ecd1"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60dec7201c0b10d6d11be00e8f2dbb6f40ef1828ee75ed739923799513db24c"}, + {file = "ruff-0.9.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d838b60007da7a39c046fcdd317293d10b845001f38bcb55ba766c3875b01e43"}, + {file = "ruff-0.9.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ccaf903108b899beb8e09a63ffae5869057ab649c1e9231c05ae354ebc62066c"}, + {file = "ruff-0.9.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f9567d135265d46e59d62dc60c0bfad10e9a6822e231f5b24032dba5a55be6b5"}, + {file = "ruff-0.9.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5f202f0d93738c28a89f8ed9eaba01b7be339e5d8d642c994347eaa81c6d75b8"}, + {file = "ruff-0.9.10-py3-none-win32.whl", hash = "sha256:bfb834e87c916521ce46b1788fbb8484966e5113c02df216680102e9eb960029"}, + {file = "ruff-0.9.10-py3-none-win_amd64.whl", hash = "sha256:f2160eeef3031bf4b17df74e307d4c5fb689a6f3a26a2de3f7ef4044e3c484f1"}, + {file = "ruff-0.9.10-py3-none-win_arm64.whl", hash = "sha256:5fd804c0327a5e5ea26615550e706942f348b197d5475ff34c19733aee4b2e69"}, + {file = "ruff-0.9.10.tar.gz", hash = "sha256:9bacb735d7bada9cfb0f2c227d3658fc443d90a727b47f206fb33f52f3c0eac7"}, ] [[package]] @@ -3453,69 +3450,69 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.38" +version = "2.0.39" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e1d9e429028ce04f187a9f522818386c8b076723cdbe9345708384f49ebcec6"}, - {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b87a90f14c68c925817423b0424381f0e16d80fc9a1a1046ef202ab25b19a444"}, - {file = "SQLAlchemy-2.0.38-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:402c2316d95ed90d3d3c25ad0390afa52f4d2c56b348f212aa9c8d072a40eee5"}, - {file = "SQLAlchemy-2.0.38-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6493bc0eacdbb2c0f0d260d8988e943fee06089cd239bd7f3d0c45d1657a70e2"}, - {file = "SQLAlchemy-2.0.38-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0561832b04c6071bac3aad45b0d3bb6d2c4f46a8409f0a7a9c9fa6673b41bc03"}, - {file = "SQLAlchemy-2.0.38-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:49aa2cdd1e88adb1617c672a09bf4ebf2f05c9448c6dbeba096a3aeeb9d4d443"}, - {file = "SQLAlchemy-2.0.38-cp310-cp310-win32.whl", hash = "sha256:64aa8934200e222f72fcfd82ee71c0130a9c07d5725af6fe6e919017d095b297"}, - {file = "SQLAlchemy-2.0.38-cp310-cp310-win_amd64.whl", hash = "sha256:c57b8e0841f3fce7b703530ed70c7c36269c6d180ea2e02e36b34cb7288c50c7"}, - {file = "SQLAlchemy-2.0.38-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bf89e0e4a30714b357f5d46b6f20e0099d38b30d45fa68ea48589faf5f12f62d"}, - {file = "SQLAlchemy-2.0.38-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8455aa60da49cb112df62b4721bd8ad3654a3a02b9452c783e651637a1f21fa2"}, - {file = "SQLAlchemy-2.0.38-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f53c0d6a859b2db58332e0e6a921582a02c1677cc93d4cbb36fdf49709b327b2"}, - {file = "SQLAlchemy-2.0.38-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3c4817dff8cef5697f5afe5fec6bc1783994d55a68391be24cb7d80d2dbc3a6"}, - {file = "SQLAlchemy-2.0.38-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9cea5b756173bb86e2235f2f871b406a9b9d722417ae31e5391ccaef5348f2c"}, - {file = "SQLAlchemy-2.0.38-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:40e9cdbd18c1f84631312b64993f7d755d85a3930252f6276a77432a2b25a2f3"}, - {file = "SQLAlchemy-2.0.38-cp311-cp311-win32.whl", hash = "sha256:cb39ed598aaf102251483f3e4675c5dd6b289c8142210ef76ba24aae0a8f8aba"}, - {file = "SQLAlchemy-2.0.38-cp311-cp311-win_amd64.whl", hash = "sha256:f9d57f1b3061b3e21476b0ad5f0397b112b94ace21d1f439f2db472e568178ae"}, - {file = "SQLAlchemy-2.0.38-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12d5b06a1f3aeccf295a5843c86835033797fea292c60e72b07bcb5d820e6dd3"}, - {file = "SQLAlchemy-2.0.38-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e036549ad14f2b414c725349cce0772ea34a7ab008e9cd67f9084e4f371d1f32"}, - {file = "SQLAlchemy-2.0.38-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee3bee874cb1fadee2ff2b79fc9fc808aa638670f28b2145074538d4a6a5028e"}, - {file = "SQLAlchemy-2.0.38-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e185ea07a99ce8b8edfc788c586c538c4b1351007e614ceb708fd01b095ef33e"}, - {file = "SQLAlchemy-2.0.38-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b79ee64d01d05a5476d5cceb3c27b5535e6bb84ee0f872ba60d9a8cd4d0e6579"}, - {file = "SQLAlchemy-2.0.38-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afd776cf1ebfc7f9aa42a09cf19feadb40a26366802d86c1fba080d8e5e74bdd"}, - {file = "SQLAlchemy-2.0.38-cp312-cp312-win32.whl", hash = "sha256:a5645cd45f56895cfe3ca3459aed9ff2d3f9aaa29ff7edf557fa7a23515a3725"}, - {file = "SQLAlchemy-2.0.38-cp312-cp312-win_amd64.whl", hash = "sha256:1052723e6cd95312f6a6eff9a279fd41bbae67633415373fdac3c430eca3425d"}, - {file = "SQLAlchemy-2.0.38-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ecef029b69843b82048c5b347d8e6049356aa24ed644006c9a9d7098c3bd3bfd"}, - {file = "SQLAlchemy-2.0.38-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c8bcad7fc12f0cc5896d8e10fdf703c45bd487294a986903fe032c72201596b"}, - {file = "SQLAlchemy-2.0.38-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0ef3f98175d77180ffdc623d38e9f1736e8d86b6ba70bff182a7e68bed7727"}, - {file = "SQLAlchemy-2.0.38-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b0ac78898c50e2574e9f938d2e5caa8fe187d7a5b69b65faa1ea4648925b096"}, - {file = "SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9eb4fa13c8c7a2404b6a8e3772c17a55b1ba18bc711e25e4d6c0c9f5f541b02a"}, - {file = "SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5dba1cdb8f319084f5b00d41207b2079822aa8d6a4667c0f369fce85e34b0c86"}, - {file = "SQLAlchemy-2.0.38-cp313-cp313-win32.whl", hash = "sha256:eae27ad7580529a427cfdd52c87abb2dfb15ce2b7a3e0fc29fbb63e2ed6f8120"}, - {file = "SQLAlchemy-2.0.38-cp313-cp313-win_amd64.whl", hash = "sha256:b335a7c958bc945e10c522c069cd6e5804f4ff20f9a744dd38e748eb602cbbda"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:40310db77a55512a18827488e592965d3dec6a3f1e3d8af3f8243134029daca3"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d3043375dd5bbcb2282894cbb12e6c559654c67b5fffb462fda815a55bf93f7"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70065dfabf023b155a9c2a18f573e47e6ca709b9e8619b2e04c54d5bcf193178"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:c058b84c3b24812c859300f3b5abf300daa34df20d4d4f42e9652a4d1c48c8a4"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0398361acebb42975deb747a824b5188817d32b5c8f8aba767d51ad0cc7bb08d"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-win32.whl", hash = "sha256:a2bc4e49e8329f3283d99840c136ff2cd1a29e49b5624a46a290f04dff48e079"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-win_amd64.whl", hash = "sha256:9cd136184dd5f58892f24001cdce986f5d7e96059d004118d5410671579834a4"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:665255e7aae5f38237b3a6eae49d2358d83a59f39ac21036413fab5d1e810578"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:92f99f2623ff16bd4aaf786ccde759c1f676d39c7bf2855eb0b540e1ac4530c8"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa498d1392216fae47eaf10c593e06c34476ced9549657fca713d0d1ba5f7248"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9afbc3909d0274d6ac8ec891e30210563b2c8bdd52ebbda14146354e7a69373"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:57dd41ba32430cbcc812041d4de8d2ca4651aeefad2626921ae2a23deb8cd6ff"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3e35d5565b35b66905b79ca4ae85840a8d40d31e0b3e2990f2e7692071b179ca"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-win32.whl", hash = "sha256:f0d3de936b192980209d7b5149e3c98977c3810d401482d05fb6d668d53c1c63"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-win_amd64.whl", hash = "sha256:3868acb639c136d98107c9096303d2d8e5da2880f7706f9f8c06a7f961961149"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07258341402a718f166618470cde0c34e4cec85a39767dce4e24f61ba5e667ea"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a826f21848632add58bef4f755a33d45105d25656a0c849f2dc2df1c71f6f50"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:386b7d136919bb66ced64d2228b92d66140de5fefb3c7df6bd79069a269a7b06"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f2951dc4b4f990a4b394d6b382accb33141d4d3bd3ef4e2b27287135d6bdd68"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8bf312ed8ac096d674c6aa9131b249093c1b37c35db6a967daa4c84746bc1bc9"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6db316d6e340f862ec059dc12e395d71f39746a20503b124edc255973977b728"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-win32.whl", hash = "sha256:c09a6ea87658695e527104cf857c70f79f14e9484605e205217aae0ec27b45fc"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-win_amd64.whl", hash = "sha256:12f5c9ed53334c3ce719155424dc5407aaa4f6cadeb09c5b627e06abb93933a1"}, - {file = "SQLAlchemy-2.0.38-py3-none-any.whl", hash = "sha256:63178c675d4c80def39f1febd625a6333f44c0ba269edd8a468b156394b27753"}, - {file = "sqlalchemy-2.0.38.tar.gz", hash = "sha256:e5a4d82bdb4bf1ac1285a68eab02d253ab73355d9f0fe725a97e1e0fa689decb"}, + {file = "SQLAlchemy-2.0.39-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:66a40003bc244e4ad86b72abb9965d304726d05a939e8c09ce844d27af9e6d37"}, + {file = "SQLAlchemy-2.0.39-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67de057fbcb04a066171bd9ee6bcb58738d89378ee3cabff0bffbf343ae1c787"}, + {file = "SQLAlchemy-2.0.39-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:533e0f66c32093a987a30df3ad6ed21170db9d581d0b38e71396c49718fbb1ca"}, + {file = "SQLAlchemy-2.0.39-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7399d45b62d755e9ebba94eb89437f80512c08edde8c63716552a3aade61eb42"}, + {file = "SQLAlchemy-2.0.39-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:788b6ff6728072b313802be13e88113c33696a9a1f2f6d634a97c20f7ef5ccce"}, + {file = "SQLAlchemy-2.0.39-cp37-cp37m-win32.whl", hash = "sha256:01da15490c9df352fbc29859d3c7ba9cd1377791faeeb47c100832004c99472c"}, + {file = "SQLAlchemy-2.0.39-cp37-cp37m-win_amd64.whl", hash = "sha256:f2bcb085faffcacf9319b1b1445a7e1cfdc6fb46c03f2dce7bc2d9a4b3c1cdc5"}, + {file = "SQLAlchemy-2.0.39-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b761a6847f96fdc2d002e29e9e9ac2439c13b919adfd64e8ef49e75f6355c548"}, + {file = "SQLAlchemy-2.0.39-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0d7e3866eb52d914aea50c9be74184a0feb86f9af8aaaa4daefe52b69378db0b"}, + {file = "SQLAlchemy-2.0.39-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:995c2bacdddcb640c2ca558e6760383dcdd68830160af92b5c6e6928ffd259b4"}, + {file = "SQLAlchemy-2.0.39-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:344cd1ec2b3c6bdd5dfde7ba7e3b879e0f8dd44181f16b895940be9b842fd2b6"}, + {file = "SQLAlchemy-2.0.39-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5dfbc543578058c340360f851ddcecd7a1e26b0d9b5b69259b526da9edfa8875"}, + {file = "SQLAlchemy-2.0.39-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3395e7ed89c6d264d38bea3bfb22ffe868f906a7985d03546ec7dc30221ea980"}, + {file = "SQLAlchemy-2.0.39-cp38-cp38-win32.whl", hash = "sha256:bf555f3e25ac3a70c67807b2949bfe15f377a40df84b71ab2c58d8593a1e036e"}, + {file = "SQLAlchemy-2.0.39-cp38-cp38-win_amd64.whl", hash = "sha256:463ecfb907b256e94bfe7bcb31a6d8c7bc96eca7cbe39803e448a58bb9fcad02"}, + {file = "sqlalchemy-2.0.39-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6827f8c1b2f13f1420545bd6d5b3f9e0b85fe750388425be53d23c760dcf176b"}, + {file = "sqlalchemy-2.0.39-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9f119e7736967c0ea03aff91ac7d04555ee038caf89bb855d93bbd04ae85b41"}, + {file = "sqlalchemy-2.0.39-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4600c7a659d381146e1160235918826c50c80994e07c5b26946a3e7ec6c99249"}, + {file = "sqlalchemy-2.0.39-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a06e6c8e31c98ddc770734c63903e39f1947c9e3e5e4bef515c5491b7737dde"}, + {file = "sqlalchemy-2.0.39-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4c433f78c2908ae352848f56589c02b982d0e741b7905228fad628999799de4"}, + {file = "sqlalchemy-2.0.39-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7bd5c5ee1448b6408734eaa29c0d820d061ae18cb17232ce37848376dcfa3e92"}, + {file = "sqlalchemy-2.0.39-cp310-cp310-win32.whl", hash = "sha256:87a1ce1f5e5dc4b6f4e0aac34e7bb535cb23bd4f5d9c799ed1633b65c2bcad8c"}, + {file = "sqlalchemy-2.0.39-cp310-cp310-win_amd64.whl", hash = "sha256:871f55e478b5a648c08dd24af44345406d0e636ffe021d64c9b57a4a11518304"}, + {file = "sqlalchemy-2.0.39-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a28f9c238f1e143ff42ab3ba27990dfb964e5d413c0eb001b88794c5c4a528a9"}, + {file = "sqlalchemy-2.0.39-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08cf721bbd4391a0e765fe0fe8816e81d9f43cece54fdb5ac465c56efafecb3d"}, + {file = "sqlalchemy-2.0.39-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a8517b6d4005facdbd7eb4e8cf54797dbca100a7df459fdaff4c5123265c1cd"}, + {file = "sqlalchemy-2.0.39-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b2de1523d46e7016afc7e42db239bd41f2163316935de7c84d0e19af7e69538"}, + {file = "sqlalchemy-2.0.39-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:412c6c126369ddae171c13987b38df5122cb92015cba6f9ee1193b867f3f1530"}, + {file = "sqlalchemy-2.0.39-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b35e07f1d57b79b86a7de8ecdcefb78485dab9851b9638c2c793c50203b2ae8"}, + {file = "sqlalchemy-2.0.39-cp311-cp311-win32.whl", hash = "sha256:3eb14ba1a9d07c88669b7faf8f589be67871d6409305e73e036321d89f1d904e"}, + {file = "sqlalchemy-2.0.39-cp311-cp311-win_amd64.whl", hash = "sha256:78f1b79132a69fe8bd6b5d91ef433c8eb40688ba782b26f8c9f3d2d9ca23626f"}, + {file = "sqlalchemy-2.0.39-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c457a38351fb6234781d054260c60e531047e4d07beca1889b558ff73dc2014b"}, + {file = "sqlalchemy-2.0.39-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:018ee97c558b499b58935c5a152aeabf6d36b3d55d91656abeb6d93d663c0c4c"}, + {file = "sqlalchemy-2.0.39-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5493a8120d6fc185f60e7254fc056a6742f1db68c0f849cfc9ab46163c21df47"}, + {file = "sqlalchemy-2.0.39-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2cf5b5ddb69142511d5559c427ff00ec8c0919a1e6c09486e9c32636ea2b9dd"}, + {file = "sqlalchemy-2.0.39-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f03143f8f851dd8de6b0c10784363712058f38209e926723c80654c1b40327a"}, + {file = "sqlalchemy-2.0.39-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06205eb98cb3dd52133ca6818bf5542397f1dd1b69f7ea28aa84413897380b06"}, + {file = "sqlalchemy-2.0.39-cp312-cp312-win32.whl", hash = "sha256:7f5243357e6da9a90c56282f64b50d29cba2ee1f745381174caacc50d501b109"}, + {file = "sqlalchemy-2.0.39-cp312-cp312-win_amd64.whl", hash = "sha256:2ed107331d188a286611cea9022de0afc437dd2d3c168e368169f27aa0f61338"}, + {file = "sqlalchemy-2.0.39-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fe193d3ae297c423e0e567e240b4324d6b6c280a048e64c77a3ea6886cc2aa87"}, + {file = "sqlalchemy-2.0.39-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:79f4f502125a41b1b3b34449e747a6abfd52a709d539ea7769101696bdca6716"}, + {file = "sqlalchemy-2.0.39-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a10ca7f8a1ea0fd5630f02feb055b0f5cdfcd07bb3715fc1b6f8cb72bf114e4"}, + {file = "sqlalchemy-2.0.39-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6b0a1c7ed54a5361aaebb910c1fa864bae34273662bb4ff788a527eafd6e14d"}, + {file = "sqlalchemy-2.0.39-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52607d0ebea43cf214e2ee84a6a76bc774176f97c5a774ce33277514875a718e"}, + {file = "sqlalchemy-2.0.39-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c08a972cbac2a14810463aec3a47ff218bb00c1a607e6689b531a7c589c50723"}, + {file = "sqlalchemy-2.0.39-cp313-cp313-win32.whl", hash = "sha256:23c5aa33c01bd898f879db158537d7e7568b503b15aad60ea0c8da8109adf3e7"}, + {file = "sqlalchemy-2.0.39-cp313-cp313-win_amd64.whl", hash = "sha256:4dabd775fd66cf17f31f8625fc0e4cfc5765f7982f94dc09b9e5868182cb71c0"}, + {file = "sqlalchemy-2.0.39-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2600a50d590c22d99c424c394236899ba72f849a02b10e65b4c70149606408b5"}, + {file = "sqlalchemy-2.0.39-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4eff9c270afd23e2746e921e80182872058a7a592017b2713f33f96cc5f82e32"}, + {file = "sqlalchemy-2.0.39-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7332868ce891eda48896131991f7f2be572d65b41a4050957242f8e935d5d7"}, + {file = "sqlalchemy-2.0.39-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125a7763b263218a80759ad9ae2f3610aaf2c2fbbd78fff088d584edf81f3782"}, + {file = "sqlalchemy-2.0.39-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:04545042969833cb92e13b0a3019549d284fd2423f318b6ba10e7aa687690a3c"}, + {file = "sqlalchemy-2.0.39-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:805cb481474e111ee3687c9047c5f3286e62496f09c0e82e8853338aaaa348f8"}, + {file = "sqlalchemy-2.0.39-cp39-cp39-win32.whl", hash = "sha256:34d5c49f18778a3665d707e6286545a30339ad545950773d43977e504815fa70"}, + {file = "sqlalchemy-2.0.39-cp39-cp39-win_amd64.whl", hash = "sha256:35e72518615aa5384ef4fae828e3af1b43102458b74a8c481f69af8abf7e802a"}, + {file = "sqlalchemy-2.0.39-py3-none-any.whl", hash = "sha256:a1c6b0a5e3e326a466d809b651c63f278b1256146a377a528b6938a279da334f"}, + {file = "sqlalchemy-2.0.39.tar.gz", hash = "sha256:5d2d1fe548def3267b4c70a8568f108d1fed7cbbeccb9cc166e05af2abc25c22"}, ] [package.dependencies] @@ -3646,14 +3643,14 @@ pbr = ">=2.0.0" [[package]] name = "structlog" -version = "25.1.0" +version = "25.2.0" description = "Structured Logging for Python" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "structlog-25.1.0-py3-none-any.whl", hash = "sha256:843fe4f254540329f380812cbe612e1af5ec5b8172205ae634679cd35a6d6321"}, - {file = "structlog-25.1.0.tar.gz", hash = "sha256:2ef2a572e0e27f09664965d31a576afe64e46ac6084ef5cec3c2b8cd6e4e3ad3"}, + {file = "structlog-25.2.0-py3-none-any.whl", hash = "sha256:0fecea2e345d5d491b72f3db2e5fcd6393abfc8cd06a4851f21fcd4d1a99f437"}, + {file = "structlog-25.2.0.tar.gz", hash = "sha256:d9f9776944207d1035b8b26072b9b140c63702fd7aa57c2f85d28ab701bd8e92"}, ] [package.extras] @@ -4279,4 +4276,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = ">=3.12,<3.13" -content-hash = "ba9315e5bd243ff23b9f1044c43228b0658f7c345bc081dcfe9f2af8f2511e0c" +content-hash = "42b8d0f35558340b3672fed931dbbf80502285b0c70f61476307f3c8736772b1" diff --git a/pyproject.toml b/pyproject.toml index 8da9768ad..e9d570886 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,11 +13,11 @@ click = "==8.1.8" PyYAML = "==6.0.2" fastapi = "==0.115.11" uvicorn = "==0.34.0" -structlog = "==25.1.0" +structlog = "==25.2.0" litellm = "==1.63.0" llama_cpp_python = "==0.3.5" cryptography = "==44.0.2" -sqlalchemy = "==2.0.38" +sqlalchemy = "==2.0.39" aiosqlite = "==0.21.0" ollama = "==0.4.7" pydantic-settings = "==2.8.1" @@ -36,7 +36,7 @@ cachetools = "==5.5.2" legacy-cgi = "==2.6.2" presidio-analyzer = "==2.2.357" presidio-anonymizer = "==2.2.357" -onnxruntime = "==1.20.1" +onnxruntime = "==1.21.0" onnx = "==1.17.0" spacy = "<3.8.0" en-core-web-sm = {url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.8.0/en_core_web_sm-3.8.0-py3-none-any.whl"} @@ -46,7 +46,7 @@ regex = "==2024.11.6" pytest = "==8.3.5" pytest-cov = "==6.0.0" black = "==25.1.0" -ruff = "==0.9.9" +ruff = "==0.9.10" bandit = "==1.8.3" build = "==1.2.2.post1" wheel = "==0.45.1" diff --git a/src/codegate/api/v1.py b/src/codegate/api/v1.py index 33efea33e..edd6d0a06 100644 --- a/src/codegate/api/v1.py +++ b/src/codegate/api/v1.py @@ -1,9 +1,10 @@ from typing import List, Optional from uuid import UUID +import cachetools.func import requests import structlog -from fastapi import APIRouter, Depends, HTTPException, Response +from fastapi import APIRouter, Depends, HTTPException, Query, Response from fastapi.responses import StreamingResponse from fastapi.routing import APIRoute from pydantic import BaseModel, ValidationError @@ -11,14 +12,16 @@ import codegate.muxing.models as mux_models from codegate import __version__ from codegate.api import v1_models, v1_processing +from codegate.config import API_DEFAULT_PAGE_SIZE, API_MAX_PAGE_SIZE from codegate.db.connection import AlreadyExistsError, DbReader -from codegate.db.models import AlertSeverity, Persona, WorkspaceWithModel +from codegate.db.models import AlertSeverity, AlertTriggerType, Persona, WorkspaceWithModel from codegate.muxing.persona import ( PersonaDoesNotExistError, PersonaManager, PersonaSimilarDescriptionError, ) from codegate.providers import crud as provendcrud +from codegate.updates.client import Origin, get_update_client_singleton from codegate.workspaces import crud logger = structlog.get_logger("codegate") @@ -419,7 +422,9 @@ async def get_workspace_alerts(workspace_name: str) -> List[Optional[v1_models.A raise HTTPException(status_code=500, detail="Internal server error") try: - alerts = await dbreader.get_alerts_by_workspace(ws.id, AlertSeverity.CRITICAL.value) + alerts = await dbreader.get_alerts_by_workspace_or_prompt_id( + workspace_id=ws.id, trigger_category=AlertSeverity.CRITICAL.value + ) prompts_outputs = await dbreader.get_prompts_with_output(ws.id) return await v1_processing.parse_get_alert_conversation(alerts, prompts_outputs) except Exception: @@ -443,11 +448,12 @@ async def get_workspace_alerts_summary(workspace_name: str) -> v1_models.AlertSu raise HTTPException(status_code=500, detail="Internal server error") try: - summary = await dbreader.get_alerts_summary_by_workspace(ws.id) + summary = await dbreader.get_alerts_summary(workspace_id=ws.id) return v1_models.AlertSummary( - malicious_packages=summary["codegate_context_retriever_count"], - pii=summary["codegate_pii_count"], - secrets=summary["codegate_secrets_count"], + malicious_packages=summary.total_packages_count, + pii=summary.total_pii_count, + secrets=summary.total_secrets_count, + total_alerts=summary.total_alerts, ) except Exception: logger.exception("Error while getting alerts summary") @@ -459,7 +465,13 @@ async def get_workspace_alerts_summary(workspace_name: str) -> v1_models.AlertSu tags=["Workspaces"], generate_unique_id_function=uniq_name, ) -async def get_workspace_messages(workspace_name: str) -> List[v1_models.Conversation]: +async def get_workspace_messages( + workspace_name: str, + page: int = Query(1, ge=1), + page_size: int = Query(API_DEFAULT_PAGE_SIZE, ge=1, le=API_MAX_PAGE_SIZE), + filter_by_ids: Optional[List[str]] = Query(None), + filter_by_alert_trigger_types: Optional[List[AlertTriggerType]] = Query(None), +) -> v1_models.PaginatedMessagesResponse: """Get messages for a workspace.""" try: ws = await wscrud.get_workspace_by_name(workspace_name) @@ -469,19 +481,119 @@ async def get_workspace_messages(workspace_name: str) -> List[v1_models.Conversa logger.exception("Error while getting workspace") raise HTTPException(status_code=500, detail="Internal server error") - try: - prompts_with_output_alerts_usage = ( - await dbreader.get_prompts_with_output_alerts_usage_by_workspace_id( - ws.id, AlertSeverity.CRITICAL.value - ) - ) - conversations, _ = await v1_processing.parse_messages_in_conversations( - prompts_with_output_alerts_usage + offset = (page - 1) * page_size + valid_conversations: List[v1_models.ConversationSummary] = [] + fetched_prompts = 0 + + while len(valid_conversations) < page_size: + batch_size = page_size * 2 # Fetch more prompts to compensate for potential skips + + prompts = await dbreader.get_prompts( + ws.id, + offset + fetched_prompts, + batch_size, + filter_by_ids, + list([AlertSeverity.CRITICAL.value]), + filter_by_alert_trigger_types, ) - return conversations + + if not prompts or len(prompts) == 0: + break + + # iterate for all prompts to compose the conversation summary + for prompt in prompts: + fetched_prompts += 1 + if not prompt.request: + logger.warning(f"Skipping prompt {prompt.id}. Empty request field") + continue + + messages, _ = await v1_processing.parse_request(prompt.request) + if not messages or len(messages) == 0: + logger.warning(f"Skipping prompt {prompt.id}. No messages found") + continue + + # message is just the first entry in the request, cleaned properly + message = v1_processing.parse_question_answer(messages[0]) + message_obj = v1_models.ChatMessage( + message=message, timestamp=prompt.timestamp, message_id=prompt.id + ) + + # count total alerts for the prompt + total_alerts_row = await dbreader.get_alerts_summary(prompt_id=prompt.id) + + # get token usage for the prompt + prompts_outputs = await dbreader.get_prompts_with_output(prompt_id=prompt.id) + ws_token_usage = await v1_processing.parse_workspace_token_usage(prompts_outputs) + + conversation_summary = v1_models.ConversationSummary( + chat_id=prompt.id, + prompt=message_obj, + provider=prompt.provider, + type=prompt.type, + conversation_timestamp=prompt.timestamp, + alerts_summary=v1_models.AlertSummary( + malicious_packages=total_alerts_row.total_packages_count, + pii=total_alerts_row.total_pii_count, + secrets=total_alerts_row.total_secrets_count, + total_alerts=total_alerts_row.total_alerts, + ), + total_alerts=total_alerts_row.total_alerts, + token_usage_agg=ws_token_usage, + ) + + valid_conversations.append(conversation_summary) + if len(valid_conversations) >= page_size: + break + + # Fetch total message count + total_count = await dbreader.get_total_messages_count_by_workspace_id( + ws.id, + filter_by_ids, + list([AlertSeverity.CRITICAL.value]), + filter_by_alert_trigger_types, + ) + + return v1_models.PaginatedMessagesResponse( + data=valid_conversations, + limit=page_size, + offset=offset, + total=total_count, + ) + + +@v1.get( + "/workspaces/{workspace_name}/messages/{prompt_id}", + tags=["Workspaces"], + generate_unique_id_function=uniq_name, +) +async def get_messages_by_prompt_id( + workspace_name: str, + prompt_id: str, +) -> v1_models.Conversation: + """Get messages for a workspace.""" + try: + ws = await wscrud.get_workspace_by_name(workspace_name) + except crud.WorkspaceDoesNotExistError: + raise HTTPException(status_code=404, detail="Workspace does not exist") except Exception: - logger.exception("Error while getting messages") + logger.exception("Error while getting workspace") raise HTTPException(status_code=500, detail="Internal server error") + prompts_outputs = await dbreader.get_prompts_with_output( + workspace_id=ws.id, prompt_id=prompt_id + ) + + # get all alerts for the prompt + alerts = await dbreader.get_alerts_by_workspace_or_prompt_id( + workspace_id=ws.id, prompt_id=prompt_id, trigger_category=AlertSeverity.CRITICAL.value + ) + deduped_alerts = await v1_processing.remove_duplicate_alerts(alerts) + conversations, _ = await v1_processing.parse_messages_in_conversations(prompts_outputs) + if not conversations: + raise HTTPException(status_code=404, detail="Conversation not found") + + conversation = conversations[0] + conversation.alerts = deduped_alerts + return conversation @v1.get( @@ -614,10 +726,9 @@ async def stream_sse(): @v1.get("/version", tags=["Dashboard"], generate_unique_id_function=uniq_name) -def version_check(): +async def version_check(): try: - latest_version = v1_processing.fetch_latest_version() - + latest_version = _get_latest_version() # normalize the versions as github will return them with a 'v' prefix current_version = __version__.lstrip("v") latest_version_stripped = latest_version.lstrip("v") @@ -665,7 +776,7 @@ async def get_workspace_token_usage(workspace_name: str) -> v1_models.TokenUsage raise HTTPException(status_code=500, detail="Internal server error") try: - prompts_outputs = await dbreader.get_prompts_with_output(ws.id) + prompts_outputs = await dbreader.get_prompts_with_output(workspace_id=ws.id) ws_token_usage = await v1_processing.parse_workspace_token_usage(prompts_outputs) return ws_token_usage except Exception: @@ -771,3 +882,9 @@ async def delete_persona(persona_name: str): except Exception: logger.exception("Error while deleting persona") raise HTTPException(status_code=500, detail="Internal server error") + + +@cachetools.func.ttl_cache(maxsize=128, ttl=20 * 60) +def _get_latest_version(): + update_client = get_update_client_singleton() + return update_client.get_latest_version(Origin.FrontEnd) diff --git a/src/codegate/api/v1_models.py b/src/codegate/api/v1_models.py index dff26489e..6489f96d6 100644 --- a/src/codegate/api/v1_models.py +++ b/src/codegate/api/v1_models.py @@ -191,6 +191,7 @@ class AlertSummary(pydantic.BaseModel): malicious_packages: int pii: int secrets: int + total_alerts: int class PartialQuestionAnswer(pydantic.BaseModel): @@ -201,7 +202,6 @@ class PartialQuestionAnswer(pydantic.BaseModel): partial_questions: PartialQuestions answer: Optional[ChatMessage] model_token_usage: TokenUsageByModel - alerts: List[Alert] = [] class Conversation(pydantic.BaseModel): @@ -215,7 +215,21 @@ class Conversation(pydantic.BaseModel): chat_id: str conversation_timestamp: datetime.datetime token_usage_agg: Optional[TokenUsageAggregate] - alerts: List[Alert] = [] + alerts: Optional[List[Alert]] = [] + + +class ConversationSummary(pydantic.BaseModel): + """ + Represents a conversation summary. + """ + + chat_id: str + prompt: ChatMessage + alerts_summary: AlertSummary + token_usage_agg: Optional[TokenUsageAggregate] + provider: Optional[str] + type: QuestionType + conversation_timestamp: datetime.datetime class AlertConversation(pydantic.BaseModel): @@ -333,3 +347,10 @@ class PersonaUpdateRequest(pydantic.BaseModel): new_name: str new_description: str + + +class PaginatedMessagesResponse(pydantic.BaseModel): + data: List[ConversationSummary] + limit: int + offset: int + total: int diff --git a/src/codegate/api/v1_processing.py b/src/codegate/api/v1_processing.py index 10f42075b..8281f7281 100644 --- a/src/codegate/api/v1_processing.py +++ b/src/codegate/api/v1_processing.py @@ -3,9 +3,7 @@ from collections import defaultdict from typing import AsyncGenerator, Dict, List, Optional, Tuple -import cachetools.func import regex as re -import requests import structlog from codegate.api import v1_models @@ -34,16 +32,6 @@ ] -@cachetools.func.ttl_cache(maxsize=128, ttl=20 * 60) -def fetch_latest_version() -> str: - url = "https://api.github.com/repos/stacklok/codegate/releases/latest" - headers = {"Accept": "application/vnd.github+json", "X-GitHub-Api-Version": "2022-11-28"} - response = requests.get(url, headers=headers, timeout=5) - response.raise_for_status() - data = response.json() - return data.get("tag_name", "unknown") - - async def generate_sse_events() -> AsyncGenerator[str, None]: """ SSE generator from queue @@ -202,15 +190,10 @@ async def _get_partial_question_answer( model=model, token_usage=token_usage, provider_type=provider ) - alerts: List[v1_models.Alert] = [ - v1_models.Alert.from_db_model(db_alert) for db_alert in row.alerts - ] - return PartialQuestionAnswer( partial_questions=request_message, answer=output_message, model_token_usage=model_token_usage, - alerts=alerts, ) @@ -374,7 +357,7 @@ async def match_conversations( for group in grouped_partial_questions: questions_answers: List[QuestionAnswer] = [] token_usage_agg = TokenUsageAggregate(tokens_by_model={}, token_usage=TokenUsage()) - alerts: List[v1_models.Alert] = [] + first_partial_qa = None for partial_question in sorted(group, key=lambda x: x.timestamp): # Partial questions don't contain the answer, so we need to find the corresponding @@ -398,8 +381,6 @@ async def match_conversations( qa = _get_question_answer_from_partial(selected_partial_qa) qa.question.message = parse_question_answer(qa.question.message) questions_answers.append(qa) - deduped_alerts = await remove_duplicate_alerts(selected_partial_qa.alerts) - alerts.extend(deduped_alerts) token_usage_agg.add_model_token_usage(selected_partial_qa.model_token_usage) # if we have a conversation with at least one question and answer @@ -413,7 +394,6 @@ async def match_conversations( chat_id=first_partial_qa.partial_questions.message_id, conversation_timestamp=first_partial_qa.partial_questions.timestamp, token_usage_agg=token_usage_agg, - alerts=alerts, ) for qa in questions_answers: map_q_id_to_conversation[qa.question.message_id] = conversation diff --git a/src/codegate/cli.py b/src/codegate/cli.py index 1ae3f9c22..674c3c880 100644 --- a/src/codegate/cli.py +++ b/src/codegate/cli.py @@ -11,13 +11,14 @@ from uvicorn.config import Config as UvicornConfig from uvicorn.server import Server +import codegate from codegate.ca.codegate_ca import CertificateAuthority from codegate.codegate_logging import LogFormat, LogLevel, setup_logging from codegate.config import Config, ConfigurationError from codegate.db.connection import ( init_db_sync, - init_session_if_not_exists, init_instance, + init_session_if_not_exists, ) from codegate.pipeline.factory import PipelineFactory from codegate.pipeline.sensitive_data.manager import SensitiveDataManager @@ -25,6 +26,8 @@ from codegate.providers.copilot.provider import CopilotProvider from codegate.server import init_app from codegate.storage.utils import restore_storage_backup +from codegate.updates.client import init_update_client_singleton +from codegate.updates.scheduled import ScheduledUpdateChecker from codegate.workspaces import crud as wscrud @@ -322,9 +325,17 @@ def serve( # noqa: C901 logger = structlog.get_logger("codegate").bind(origin="cli") init_db_sync(cfg.db_path) - init_instance(cfg.db_path) + instance_id = init_instance(cfg.db_path) init_session_if_not_exists(cfg.db_path) + # Initialize the update checking logic. + update_client = init_update_client_singleton( + cfg.update_service_url, codegate.__version__, instance_id + ) + update_checker = ScheduledUpdateChecker(update_client) + update_checker.daemon = True + update_checker.start() + # Check certificates and create CA if necessary logger.info("Checking certificates and creating CA if needed") ca = CertificateAuthority.get_instance() diff --git a/src/codegate/clients/__init__.py b/src/codegate/clients/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/codegate/config.py b/src/codegate/config.py index 179ec4d34..0b3b4b6d4 100644 --- a/src/codegate/config.py +++ b/src/codegate/config.py @@ -25,6 +25,9 @@ "llamacpp": "./codegate_volume/models", # Default LlamaCpp model path } +API_DEFAULT_PAGE_SIZE = 50 +API_MAX_PAGE_SIZE = 100 + @dataclass class Config: @@ -56,6 +59,10 @@ class Config: server_key: str = "server.key" force_certs: bool = False + # Update configuration. + use_update_service: bool = False + update_service_url: str = "https://updates.codegate.ai/api/v1/version" + max_fim_hash_lifetime: int = 60 * 5 # Time in seconds. Default is 5 minutes. # Min value is 0 (max similarity), max value is 2 (orthogonal) @@ -162,6 +169,8 @@ def from_file(cls, config_path: Union[str, Path]) -> "Config": force_certs=config_data.get("force_certs", cls.force_certs), prompts=prompts_config, provider_urls=provider_urls, + use_update_service=config_data.get("use_update_service", cls.use_update_service), + update_service_url=config_data.get("update_service_url", cls.update_service_url), ) except yaml.YAMLError as e: raise ConfigurationError(f"Failed to parse config file: {e}") @@ -206,11 +215,13 @@ def from_env(cls) -> "Config": if "CODEGATE_SERVER_KEY" in os.environ: config.server_key = os.environ["CODEGATE_SERVER_KEY"] if "CODEGATE_FORCE_CERTS" in os.environ: - config.force_certs = os.environ["CODEGATE_FORCE_CERTS"] + config.force_certs = cls.__bool_from_string(os.environ["CODEGATE_FORCE_CERTS"]) if "CODEGATE_DB_PATH" in os.environ: config.db_path = os.environ["CODEGATE_DB_PATH"] if "CODEGATE_VEC_DB_PATH" in os.environ: config.vec_db_path = os.environ["CODEGATE_VEC_DB_PATH"] + if "CODEGATE_UPDATE_SERVICE_URL" in os.environ: + config.update_service_url = os.environ["CODEGATE_UPDATE_SERVICE_URL"] # Load provider URLs from environment variables for provider in DEFAULT_PROVIDER_URLS.keys(): @@ -243,6 +254,7 @@ def load( force_certs: Optional[bool] = None, db_path: Optional[str] = None, vec_db_path: Optional[str] = None, + update_service_url: Optional[str] = None, ) -> "Config": """Load configuration with priority resolution. @@ -271,6 +283,7 @@ def load( force_certs: Optional flag to force certificate generation db_path: Optional path to the main SQLite database file vec_db_path: Optional path to the vector SQLite database file + update_service_url: Optional URL for the update service Returns: Config: Resolved configuration @@ -323,6 +336,8 @@ def load( config.db_path = env_config.db_path if "CODEGATE_VEC_DB_PATH" in os.environ: config.vec_db_path = env_config.vec_db_path + if "CODEGATE_UPDATE_SERVICE_URL" in os.environ: + config.update_service_url = env_config.update_service_url # Override provider URLs from environment for provider, url in env_config.provider_urls.items(): @@ -363,6 +378,8 @@ def load( config.vec_db_path = vec_db_path if force_certs is not None: config.force_certs = force_certs + if update_service_url is not None: + config.update_service_url = update_service_url # Set the __config class attribute Config.__config = config @@ -372,3 +389,7 @@ def load( @classmethod def get_config(cls) -> "Config": return cls.__config + + @staticmethod + def __bool_from_string(raw_value) -> bool: + return raw_value.lower() == "true" diff --git a/src/codegate/db/connection.py b/src/codegate/db/connection.py index 3f439aeaa..a828b6a37 100644 --- a/src/codegate/db/connection.py +++ b/src/codegate/db/connection.py @@ -4,7 +4,7 @@ import sqlite3 import uuid from pathlib import Path -from typing import Dict, List, Optional, Type +from typing import List, Optional, Tuple, Type import numpy as np import sqlite_vec_sl_tmp @@ -12,20 +12,22 @@ from alembic import command as alembic_command from alembic.config import Config as AlembicConfig from pydantic import BaseModel -from sqlalchemy import CursorResult, TextClause, event, text +from sqlalchemy import CursorResult, TextClause, bindparam, event, text from sqlalchemy.engine import Engine from sqlalchemy.exc import IntegrityError, OperationalError from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine from sqlalchemy.orm import sessionmaker +from codegate.config import API_DEFAULT_PAGE_SIZE from codegate.db.fim_cache import FimCache from codegate.db.models import ( ActiveWorkspace, Alert, - GetPromptWithOutputsRow, + AlertSummaryRow, + AlertTriggerType, + GetMessagesRow, GetWorkspaceByNameConditions, Instance, - IntermediatePromptWithOutputUsageAlerts, MuxRule, Output, Persona, @@ -598,10 +600,11 @@ async def delete_persona(self, persona_id: str) -> None: conditions = {"id": persona_id} await self._execute_with_no_return(sql, conditions) - async def init_instance(self) -> None: + async def init_instance(self) -> str: """ Initializes instance details in the database. """ + instance_id = str(uuid.uuid4()) sql = text( """ INSERT INTO instance (id, created_at) @@ -611,13 +614,14 @@ async def init_instance(self) -> None: try: instance = Instance( - id=str(uuid.uuid4()), + id=instance_id, created_at=datetime.datetime.now(datetime.timezone.utc), ) await self._execute_with_no_return(sql, instance.model_dump()) except IntegrityError as e: logger.debug(f"Exception type: {type(e)}") - raise AlreadyExistsError(f"Instance already initialized.") + raise AlreadyExistsError("Instance already initialized.") + return instance_id class DbReader(DbCodeGate): @@ -685,7 +689,11 @@ async def _exec_vec_db_query_to_pydantic( conn.close() return results - async def get_prompts_with_output(self, workpace_id: str) -> List[GetPromptWithOutputsRow]: + async def get_prompts_with_output( + self, workspace_id: Optional[str] = None, prompt_id: Optional[str] = None + ) -> List[GetMessagesRow]: + if not workspace_id and not prompt_id: + raise ValueError("Either workspace_id or prompt_id must be provided.") sql = text( """ SELECT @@ -699,80 +707,183 @@ async def get_prompts_with_output(self, workpace_id: str) -> List[GetPromptWithO o.output_cost FROM prompts p LEFT JOIN outputs o ON p.id = o.prompt_id - WHERE p.workspace_id = :workspace_id + WHERE (:workspace_id IS NULL OR p.workspace_id = :workspace_id) + AND (:prompt_id IS NULL OR p.id = :prompt_id) ORDER BY o.timestamp DESC """ ) - conditions = {"workspace_id": workpace_id} + conditions = {"workspace_id": workspace_id, "prompt_id": prompt_id} prompts = await self._exec_select_conditions_to_pydantic( - GetPromptWithOutputsRow, sql, conditions, should_raise=True + GetMessagesRow, sql, conditions, should_raise=True ) return prompts - async def get_prompts_with_output_alerts_usage_by_workspace_id( - self, workspace_id: str, trigger_category: Optional[str] = None - ) -> List[GetPromptWithOutputsRow]: + def _build_prompt_query( + self, + base_query: str, + workspace_id: str, + filter_by_ids: Optional[List[str]] = None, + filter_by_alert_trigger_categories: Optional[List[str]] = None, + filter_by_alert_trigger_types: Optional[List[str]] = None, + offset: Optional[int] = None, + page_size: Optional[int] = None, + ) -> Tuple[str, dict]: """ - Get all prompts with their outputs, alerts and token usage by workspace_id. + Helper method to construct SQL query and conditions for prompts based on filters. + + Args: + base_query: The base SQL query string with a placeholder for filter conditions. + workspace_id: The ID of the workspace to fetch prompts from. + filter_by_ids: Optional list of prompt IDs to filter by. + filter_by_alert_trigger_categories: Optional list of alert categories to filter by. + filter_by_alert_trigger_types: Optional list of alert trigger types to filter by. + offset: Number of records to skip (for pagination). + page_size: Number of records per page. + + Returns: + A tuple containing the formatted SQL query string and a dictionary of conditions. """ + conditions = {"workspace_id": workspace_id} + filter_conditions = [] - sql = text( - """ - SELECT - p.id as prompt_id, p.timestamp as prompt_timestamp, p.provider, p.request, p.type, - o.id as output_id, o.output, o.timestamp as output_timestamp, o.input_tokens, o.output_tokens, o.input_cost, o.output_cost, - a.id as alert_id, a.code_snippet, a.trigger_string, a.trigger_type, a.trigger_category, a.timestamp as alert_timestamp + if filter_by_alert_trigger_categories: + filter_conditions.append( + """AND (a.trigger_category IN :filter_by_alert_trigger_categories + OR a.trigger_category IS NULL)""" + ) + conditions["filter_by_alert_trigger_categories"] = filter_by_alert_trigger_categories + + if filter_by_alert_trigger_types: + filter_conditions.append( + """AND EXISTS (SELECT 1 FROM alerts a2 WHERE + a2.prompt_id = p.id AND a2.trigger_type IN :filter_by_alert_trigger_types)""" + ) + conditions["filter_by_alert_trigger_types"] = filter_by_alert_trigger_types + + if filter_by_ids: + filter_conditions.append("AND p.id IN :filter_by_ids") + conditions["filter_by_ids"] = filter_by_ids + + if offset is not None: + conditions["offset"] = offset + + if page_size is not None: + conditions["page_size"] = page_size + + filter_clause = " ".join(filter_conditions) + query = base_query.format(filter_conditions=filter_clause) + + return query, conditions + + async def get_prompts( + self, + workspace_id: str, + offset: int = 0, + page_size: int = API_DEFAULT_PAGE_SIZE, + filter_by_ids: Optional[List[str]] = None, + filter_by_alert_trigger_categories: Optional[List[str]] = None, + filter_by_alert_trigger_types: Optional[List[str]] = None, + ) -> List[Prompt]: + """ + Retrieve prompts with filtering and pagination. + + Args: + workspace_id: The ID of the workspace to fetch prompts from + offset: Number of records to skip (for pagination) + page_size: Number of records per page + filter_by_ids: Optional list of prompt IDs to filter by + filter_by_alert_trigger_categories: Optional list of alert categories to filter by + filter_by_alert_trigger_types: Optional list of alert trigger types to filter by + + Returns: + List of Prompt containing prompt details + """ + # Build base query + base_query = """ + SELECT DISTINCT p.id, p.timestamp, p.provider, p.request, p.type, + p.workspace_id FROM prompts p + LEFT JOIN alerts a ON p.id = a.prompt_id + WHERE p.workspace_id = :workspace_id + {filter_conditions} + ORDER BY p.timestamp DESC + LIMIT :page_size OFFSET :offset + """ + + query, conditions = self._build_prompt_query( + base_query, + workspace_id, + filter_by_ids, + filter_by_alert_trigger_categories, + filter_by_alert_trigger_types, + offset, + page_size, + ) + sql = text(query) + + # Bind optional params + if filter_by_alert_trigger_categories: + sql = sql.bindparams(bindparam("filter_by_alert_trigger_categories", expanding=True)) + if filter_by_alert_trigger_types: + sql = sql.bindparams(bindparam("filter_by_alert_trigger_types", expanding=True)) + if filter_by_ids: + sql = sql.bindparams(bindparam("filter_by_ids", expanding=True)) + + # Execute query + rows = await self._exec_select_conditions_to_pydantic( + Prompt, sql, conditions, should_raise=True + ) + return rows + + async def get_total_messages_count_by_workspace_id( + self, + workspace_id: str, + filter_by_ids: Optional[List[str]] = None, + filter_by_alert_trigger_categories: Optional[List[str]] = None, + filter_by_alert_trigger_types: Optional[List[str]] = None, + ) -> int: + """ + Get total count of unique messages for a given workspace_id, + considering trigger_category. + """ + base_query = """ + SELECT COUNT(DISTINCT p.id) FROM prompts p - LEFT JOIN outputs o ON p.id = o.prompt_id LEFT JOIN alerts a ON p.id = a.prompt_id WHERE p.workspace_id = :workspace_id - AND (a.trigger_category = :trigger_category OR a.trigger_category is NULL) - ORDER BY o.timestamp DESC, a.timestamp DESC - """ # noqa: E501 - ) - # If trigger category is None we want to get all alerts - trigger_category = trigger_category if trigger_category else "%" - conditions = {"workspace_id": workspace_id, "trigger_category": trigger_category} - rows: List[IntermediatePromptWithOutputUsageAlerts] = ( - await self._exec_select_conditions_to_pydantic( - IntermediatePromptWithOutputUsageAlerts, sql, conditions, should_raise=True - ) + {filter_conditions} + """ + + query, conditions = self._build_prompt_query( + base_query, + workspace_id, + filter_by_ids, + filter_by_alert_trigger_categories, + filter_by_alert_trigger_types, ) - prompts_dict: Dict[str, GetPromptWithOutputsRow] = {} - for row in rows: - prompt_id = row.prompt_id - if prompt_id not in prompts_dict: - prompts_dict[prompt_id] = GetPromptWithOutputsRow( - id=row.prompt_id, - timestamp=row.prompt_timestamp, - provider=row.provider, - request=row.request, - type=row.type, - output_id=row.output_id, - output=row.output, - output_timestamp=row.output_timestamp, - input_tokens=row.input_tokens, - output_tokens=row.output_tokens, - input_cost=row.input_cost, - output_cost=row.output_cost, - alerts=[], - ) - if row.alert_id: - alert = Alert( - id=row.alert_id, - prompt_id=row.prompt_id, - code_snippet=row.code_snippet, - trigger_string=row.trigger_string, - trigger_type=row.trigger_type, - trigger_category=row.trigger_category, - timestamp=row.alert_timestamp, - ) - prompts_dict[prompt_id].alerts.append(alert) + sql = text(query) - return list(prompts_dict.values()) + # Bind optional params + if filter_by_alert_trigger_categories: + sql = sql.bindparams(bindparam("filter_by_alert_trigger_categories", expanding=True)) + if filter_by_alert_trigger_types: + sql = sql.bindparams(bindparam("filter_by_alert_trigger_types", expanding=True)) + if filter_by_ids: + sql = sql.bindparams(bindparam("filter_by_ids", expanding=True)) - async def get_alerts_by_workspace( - self, workspace_id: str, trigger_category: Optional[str] = None + async with self._async_db_engine.begin() as conn: + try: + result = await conn.execute(sql, conditions) + count = result.scalar() # Fetches the integer result directly + return count or 0 # Ensure it returns an integer + except Exception as e: + logger.error(f"Failed to fetch message count. Error: {e}") + return 0 # Return 0 in case of failure + + async def get_alerts_by_workspace_or_prompt_id( + self, + workspace_id: str, + prompt_id: Optional[str] = None, + trigger_category: Optional[str] = None, ) -> List[Alert]: sql = text( """ @@ -791,6 +902,10 @@ async def get_alerts_by_workspace( ) conditions = {"workspace_id": workspace_id} + if prompt_id: + sql = text(sql.text + " AND a.prompt_id = :prompt_id") + conditions["prompt_id"] = prompt_id + if trigger_category: sql = text(sql.text + " AND a.trigger_category = :trigger_category") conditions["trigger_category"] = trigger_category @@ -802,37 +917,53 @@ async def get_alerts_by_workspace( ) return prompts - async def get_alerts_summary_by_workspace(self, workspace_id: str) -> dict: - """Get aggregated alert summary counts for a given workspace_id.""" + async def get_alerts_summary( + self, workspace_id: str = None, prompt_id: str = None + ) -> AlertSummaryRow: + """Get aggregated alert summary counts for a given workspace_id or prompt id.""" + if not workspace_id and not prompt_id: + raise ValueError("Either workspace_id or prompt_id must be provided.") + + filters = [] + conditions = {} + + if workspace_id: + filters.append("p.workspace_id = :workspace_id") + conditions["workspace_id"] = workspace_id + + if prompt_id: + filters.append("a.prompt_id = :prompt_id") + conditions["prompt_id"] = prompt_id + + filter_clause = " AND ".join(filters) + sql = text( - """ + f""" SELECT - COUNT(*) AS total_alerts, - SUM(CASE WHEN a.trigger_type = 'codegate-secrets' THEN 1 ELSE 0 END) - AS codegate_secrets_count, - SUM(CASE WHEN a.trigger_type = 'codegate-context-retriever' THEN 1 ELSE 0 END) - AS codegate_context_retriever_count, - SUM(CASE WHEN a.trigger_type = 'codegate-pii' THEN 1 ELSE 0 END) - AS codegate_pii_count + COUNT(*) AS total_alerts, + SUM(CASE WHEN a.trigger_type = '{AlertTriggerType.CODEGATE_SECRETS.value}' THEN 1 ELSE 0 END) + AS codegate_secrets_count, + SUM(CASE WHEN a.trigger_type = '{AlertTriggerType.CODEGATE_CONTEXT_RETRIEVER.value}' THEN 1 ELSE 0 END) + AS codegate_context_retriever_count, + SUM(CASE WHEN a.trigger_type = '{AlertTriggerType.CODEGATE_PII.value}' THEN 1 ELSE 0 END) + AS codegate_pii_count FROM alerts a INNER JOIN prompts p ON p.id = a.prompt_id - WHERE p.workspace_id = :workspace_id - """ + WHERE {filter_clause} + """ # noqa: E501 # nosec ) - conditions = {"workspace_id": workspace_id} - async with self._async_db_engine.begin() as conn: result = await conn.execute(sql, conditions) row = result.fetchone() # Return a dictionary with counts (handling None values safely) - return { - "codegate_secrets_count": row.codegate_secrets_count or 0 if row else 0, - "codegate_context_retriever_count": ( - row.codegate_context_retriever_count or 0 if row else 0 - ), - "codegate_pii_count": row.codegate_pii_count or 0 if row else 0, - } + + return AlertSummaryRow( + total_alerts=row.total_alerts or 0 if row else 0, + total_secrets_count=row.codegate_secrets_count or 0 if row else 0, + total_packages_count=row.codegate_context_retriever_count or 0 if row else 0, + total_pii_count=row.codegate_pii_count or 0 if row else 0, + ) async def get_workspaces(self) -> List[WorkspaceWithSessionInfo]: sql = text( @@ -1197,18 +1328,21 @@ def init_session_if_not_exists(db_path: Optional[str] = None): logger.info("Session in DB initialized successfully.") -def init_instance(db_path: Optional[str] = None): +def init_instance(db_path: Optional[str] = None) -> str: db_reader = DbReader(db_path) instance = asyncio.run(db_reader.get_instance()) # Initialize instance if not already initialized. if not instance: db_recorder = DbRecorder(db_path) try: - asyncio.run(db_recorder.init_instance()) + instance_id = asyncio.run(db_recorder.init_instance()) + logger.info("Instance initialized successfully.") + return instance_id except Exception as e: logger.error(f"Failed to initialize instance in DB: {e}") raise - logger.info("Instance initialized successfully.") + else: + return instance[0].id if __name__ == "__main__": diff --git a/src/codegate/db/models.py b/src/codegate/db/models.py index 07c4c8edf..7f8ef4348 100644 --- a/src/codegate/db/models.py +++ b/src/codegate/db/models.py @@ -115,6 +115,21 @@ class WorkspaceRow(BaseModel): custom_instructions: Optional[str] +class AlertSummaryRow(BaseModel): + """An alert summary row entry""" + + total_alerts: int + total_secrets_count: int + total_packages_count: int + total_pii_count: int + + +class AlertTriggerType(str, Enum): + CODEGATE_PII = "codegate-pii" + CODEGATE_CONTEXT_RETRIEVER = "codegate-context-retriever" + CODEGATE_SECRETS = "codegate-secrets" + + class GetWorkspaceByNameConditions(BaseModel): name: WorkspaceNameStr @@ -322,3 +337,18 @@ class PersonaDistance(Persona): """ distance: float + + +class GetMessagesRow(BaseModel): + id: Any + timestamp: Any + provider: Optional[Any] + request: Any + type: Any + output_id: Optional[Any] + output: Optional[Any] + output_timestamp: Optional[Any] + input_tokens: Optional[int] + output_tokens: Optional[int] + input_cost: Optional[float] + output_cost: Optional[float] diff --git a/src/codegate/updates/client.py b/src/codegate/updates/client.py new file mode 100644 index 000000000..f899a43b5 --- /dev/null +++ b/src/codegate/updates/client.py @@ -0,0 +1,57 @@ +from enum import Enum + +import requests +import structlog + +logger = structlog.get_logger("codegate") + + +__update_client_singleton = None + + +# Enum representing whether the request is coming from the front-end or the back-end. +class Origin(Enum): + FrontEnd = "FE" + BackEnd = "BE" + + +class UpdateClient: + def __init__(self, update_url: str, current_version: str, instance_id: str): + self.__update_url = update_url + self.__current_version = current_version + self.__instance_id = instance_id + + def get_latest_version(self, origin: Origin) -> str: + """ + Retrieves the latest version of CodeGate from updates.codegate.ai + """ + headers = { + "X-Instance-ID": self.__instance_id, + "User-Agent": f"codegate/{self.__current_version} {origin.value}", + } + + try: + response = requests.get(self.__update_url, headers=headers, timeout=10) + # Throw if the request was not successful. + response.raise_for_status() + return response.json()["version"] + except Exception as e: + logger.error(f"Error fetching latest version from f{self.__update_url}: {e}") + return "unknown" + + +# Use a singleton since we do not have a good way of doing dependency injection +# with the API endpoints. +def init_update_client_singleton( + update_url: str, current_version: str, instance_id: str +) -> UpdateClient: + global __update_client_singleton + __update_client_singleton = UpdateClient(update_url, current_version, instance_id) + return __update_client_singleton + + +def get_update_client_singleton() -> UpdateClient: + global __update_client_singleton + if __update_client_singleton is None: + raise ValueError("UpdateClient singleton not initialized") + return __update_client_singleton diff --git a/src/codegate/updates/scheduled.py b/src/codegate/updates/scheduled.py new file mode 100644 index 000000000..f0e649efe --- /dev/null +++ b/src/codegate/updates/scheduled.py @@ -0,0 +1,34 @@ +import threading +import time + +import structlog + +import codegate +from codegate.updates.client import Origin, UpdateClient + +logger = structlog.get_logger("codegate") + + +class ScheduledUpdateChecker(threading.Thread): + """ + ScheduledUpdateChecker calls the UpdateClient on a recurring interval. + This is implemented as a separate thread to avoid blocking the main thread. + A dedicated scheduling library could have been used, but the requirements + are trivial, and a simple hand-rolled solution is sufficient. + """ + + def __init__(self, client: UpdateClient, interval_seconds: int = 14400): # 4 hours in seconds + super().__init__() + self.__client = client + self.__interval_seconds = interval_seconds + + def run(self): + """ + Overrides the `run` method of threading.Thread. + """ + while True: + logger.info("Checking for CodeGate updates") + latest = self.__client.get_latest_version(Origin.BackEnd) + if latest != codegate.__version__: + logger.warning(f"A new version of CodeGate is available: {latest}") + time.sleep(self.__interval_seconds) diff --git a/tests/integration/vllm/testcases.yaml b/tests/integration/vllm/testcases.yaml index 52df95984..009783e50 100644 --- a/tests/integration/vllm/testcases.yaml +++ b/tests/integration/vllm/testcases.yaml @@ -82,9 +82,9 @@ testcases: "#- coding: utf-8", "```" ], - "prompt":"# Do not add comments\n<|fim_prefix|>\n# codegate/greet.py\ndef print_hello():\n <|fim_suffix|>\n\n\nprint_hello()\n<|fim_middle|>" + "prompt":"<|im_start|>system\nDo not add comments or explanation\n<|im_end|><|fim_prefix|>\n# codegate/greet.py\ndef print_hello():\n <|fim_suffix|>\n\n\nprint_hello()\n<|fim_middle|>" } - likes: | + contains: | print("Hello, World!") vllm_malicious_package_question: diff --git a/tests/test_server.py b/tests/test_server.py index bcf55e7eb..0bdbb965a 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -75,18 +75,20 @@ def test_health_check(test_client: TestClient) -> None: assert response.json() == {"status": "healthy"} -@patch("codegate.api.v1_processing.fetch_latest_version", return_value="foo") -def test_version_endpoint(mock_fetch_latest_version, test_client: TestClient) -> None: +@patch("codegate.api.v1._get_latest_version") +def test_version_endpoint(mock_get_latest_version, test_client: TestClient) -> None: """Test the version endpoint.""" + # Mock the __get_latest_version function to return a specific version + mock_get_latest_version.return_value = "v1.2.3" + response = test_client.get("/api/v1/version") assert response.status_code == 200 response_data = response.json() - - assert response_data["current_version"] == __version__.lstrip("v") - assert response_data["latest_version"] == "foo" - assert isinstance(response_data["is_latest"], bool) + assert response_data["current_version"] == "0.1.7" + assert response_data["latest_version"] == "1.2.3" assert response_data["is_latest"] is False + assert response_data["error"] is None @patch("codegate.pipeline.sensitive_data.manager.SensitiveDataManager")