diff --git a/pgml-apps/pgml-chat/.env.template b/pgml-apps/pgml-chat/.env.template index 2d582a412..f08a1b630 100644 --- a/pgml-apps/pgml-chat/.env.template +++ b/pgml-apps/pgml-chat/.env.template @@ -1,14 +1,6 @@ OPENAI_API_KEY= DATABASE_URL= -MODEL=hkunlp/instructor-xl -MODEL_PARAMS={"instruction": "Represent the Wikipedia document for retrieval: "} -QUERY_PARAMS={"instruction": "Represent the Wikipedia question for retrieving supporting documents: "} -SYSTEM_PROMPT="You are an assistant to answer questions about an open source software named PostgresML. Your name is PgBot. You are based out of San Francisco, California." -BASE_PROMPT="Given relevant parts of a document and a question, create a final answer.\ - Include a SQL query in the answer wherever possible. \ - Use the following portion of a long document to see if any of the text is relevant to answer the question.\ - \nReturn any relevant text verbatim.\n{context}\nQuestion: {question}\n \ - If the context is empty then ask for clarification and suggest user to send an email to team@postgresml.org or join PostgresML [Discord](https://discord.gg/DmyJP3qJ7U)." + SLACK_BOT_TOKEN= SLACK_APP_TOKEN= DISCORD_BOT_TOKEN= \ No newline at end of file diff --git a/pgml-apps/pgml-chat/.gitignore b/pgml-apps/pgml-chat/.gitignore index 6769e21d9..6b45645ee 100644 --- a/pgml-apps/pgml-chat/.gitignore +++ b/pgml-apps/pgml-chat/.gitignore @@ -157,4 +157,7 @@ cython_debug/ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ \ No newline at end of file +#.idea/ + +pgml_chat/pgml_playground.py +pgml_chat/llama2.py \ No newline at end of file diff --git a/pgml-apps/pgml-chat/README.md b/pgml-apps/pgml-chat/README.md index 64c925f8e..c1d570035 100644 --- a/pgml-apps/pgml-chat/README.md +++ b/pgml-apps/pgml-chat/README.md @@ -34,23 +34,16 @@ wget https://raw.githubusercontent.com/postgresml/postgresml/master/pgml-apps/pg ```bash OPENAI_API_KEY= DATABASE_URL= -MODEL=hkunlp/instructor-xl -MODEL_PARAMS={"instruction": "Represent the Wikipedia document for retrieval: "} -QUERY_PARAMS={"instruction": "Represent the Wikipedia question for retrieving supporting documents: "} -SYSTEM_PROMPT="You are an assistant to answer questions about an open source software named PostgresML. Your name is PgBot. You are based out of San Francisco, California." -BASE_PROMPT="Given relevant parts of a document and a question, create a final answer.\ - Include a SQL query in the answer wherever possible. \ - Use the following portion of a long document to see if any of the text is relevant to answer the question.\ - \nReturn any relevant text verbatim.\n{context}\nQuestion: {question}\n \ - If the context is empty then ask for clarification and suggest user to send an email to team@postgresml.org or join PostgresML [Discord](https://discord.gg/DmyJP3qJ7U)." ``` # Usage You can get help on the command line interface by running: ```bash -(pgml-bot-builder-py3.9) pgml-chat % pgml-chat --help -usage: pgml-chat [-h] --collection_name COLLECTION_NAME [--root_dir ROOT_DIR] [--stage {ingest,chat}] [--chat_interface {cli,slack}] +(pgml-bot-builder-py3.9) pgml-chat % pgml-chat % pgml-chat --help +usage: pgml-chat [-h] --collection_name COLLECTION_NAME [--root_dir ROOT_DIR] [--stage {ingest,chat}] [--chat_interface {cli,slack,discord}] + [--chat_history CHAT_HISTORY] [--bot_name BOT_NAME] [--bot_language BOT_LANGUAGE] [--bot_topic BOT_TOPIC] + [--bot_topic_primary_language BOT_TOPIC_PRIMARY_LANGUAGE] [--bot_persona BOT_PERSONA] PostgresML Chatbot Builder @@ -61,8 +54,19 @@ optional arguments: --root_dir ROOT_DIR Input folder to scan for markdown files. Required for ingest stage. Not required for chat stage (default: None) --stage {ingest,chat} Stage to run (default: chat) - --chat_interface {cli, slack, discord} + --chat_interface {cli,slack,discord} Chat interface to use (default: cli) + --chat_history CHAT_HISTORY + Number of messages from history used for generating response (default: 1) + --bot_name BOT_NAME Name of the bot (default: PgBot) + --bot_language BOT_LANGUAGE + Language of the bot (default: English) + --bot_topic BOT_TOPIC + Topic of the bot (default: PostgresML) + --bot_topic_primary_language BOT_TOPIC_PRIMARY_LANGUAGE + Primary programming language of the topic (default: ) + --bot_persona BOT_PERSONA + Persona of the bot (default: Engineer) ``` ## Ingest In this step, we ingest documents, chunk documents, generate embeddings and index these embeddings for fast query. @@ -161,14 +165,8 @@ pip install . -# Options -You can control the behavior of the chatbot by setting the following environment variables: -- `SYSTEM_PROMPT`: This is the prompt that is used to initialize the chatbot. You can customize this prompt to change the behavior of the chatbot. For example, you can change the name of the chatbot or the location of the chatbot. -- `BASE_PROMPT`: This is the prompt that is used to generate responses to user queries. You can customize this prompt to change the behavior of the chatbot. -- `MODEL`: This is the open source embedding model used to generate embeddings for the documents. You can change this to use a different model. - # Roadmap -- ~~`hyerbot --chat_interface {cli, slack, discord}` that supports Slack, and Discord.~~ +- ~~Use a collection for chat history that can be retrieved and used to generate responses.~~ - Support for file formats like rst, html, pdf, docx, etc. - Support for open source models in addition to OpenAI for chat completion. -- Support for multi-turn converstaions using converstaion buffer. Use a collection for chat history that can be retrieved and used to generate responses. +- Support for multi-turn converstaions using converstaion buffer. diff --git a/pgml-apps/pgml-chat/pgml_chat/.gitignore b/pgml-apps/pgml-chat/pgml_chat/.gitignore new file mode 100644 index 000000000..655ada3a8 --- /dev/null +++ b/pgml-apps/pgml-chat/pgml_chat/.gitignore @@ -0,0 +1,2 @@ +pgml_playground.py +llama2.py \ No newline at end of file diff --git a/pgml-apps/pgml-chat/pgml_chat/main.py b/pgml-apps/pgml-chat/pgml_chat/main.py index 4b731b8bc..3dce96836 100644 --- a/pgml-apps/pgml-chat/pgml_chat/main.py +++ b/pgml-apps/pgml-chat/pgml_chat/main.py @@ -1,5 +1,5 @@ import asyncio -from pgml import Collection, Model, Splitter, Pipeline, migrate, init_logger +from pgml import Collection, Model, Splitter, Pipeline, migrate, init_logger, Builtins import logging from rich.logging import RichHandler from rich.progress import track @@ -11,6 +11,8 @@ from time import time import openai import signal +from uuid import uuid4 +import pendulum import ast from slack_bolt.async_app import AsyncApp @@ -61,6 +63,54 @@ def handler(signum, frame): help="Chat interface to use", ) +parser.add_argument( + "--chat_history", + dest="chat_history", + type=int, + default=0, + help="Number of messages from history used for generating response", +) + +parser.add_argument( + "--bot_name", + dest="bot_name", + type=str, + default="PgBot", + help="Name of the bot", +) + +parser.add_argument( + "--bot_language", + dest="bot_language", + type=str, + default="English", + help="Language of the bot", +) + +parser.add_argument( + "--bot_topic", + dest="bot_topic", + type=str, + default="PostgresML", + help="Topic of the bot", +) +parser.add_argument( + "--bot_topic_primary_language", + dest="bot_topic_primary_language", + type=str, + default="SQL", + help="Primary programming language of the topic", +) + +parser.add_argument( + "--bot_persona", + dest="bot_persona", + type=str, + default="Engineer", + help="Persona of the bot", +) + + args = parser.parse_args() FORMAT = "%(message)s" @@ -77,9 +127,19 @@ def handler(signum, frame): # The code is using the `argparse` module to parse command line arguments. +chat_history_collection_name = args.collection_name + "_chat_history" collection = Collection(args.collection_name) +chat_collection = Collection(chat_history_collection_name) stage = args.stage chat_interface = args.chat_interface +chat_history = args.chat_history + +# Get all bot related environment variables +bot_name = args.bot_name +bot_language = args.bot_language +bot_persona = args.bot_persona +bot_topic = args.bot_topic +bot_topic_primary_language = args.bot_topic_primary_language # The above code is retrieving environment variables and assigning their values to various variables. database_url = os.environ.get("DATABASE_URL") @@ -87,16 +147,73 @@ def handler(signum, frame): splitter_params = os.environ.get( "SPLITTER_PARAMS", {"chunk_size": 1500, "chunk_overlap": 40} ) + splitter = Splitter(splitter_name, splitter_params) -model_name = os.environ.get("MODEL", "intfloat/e5-small") -model_params = ast.literal_eval(os.environ.get("MODEL_PARAMS", {})) +model_name = "hkunlp/instructor-xl" +model_embedding_instruction = "Represent the %s document for retrieval: " % (bot_topic) +model_params = {"instruction": model_embedding_instruction} +# model_name = "BAAI/bge-large-en-v1.5" +# model_params = {} model = Model(model_name, "pgml", model_params) pipeline = Pipeline(args.collection_name + "_pipeline", model, splitter) -query_params = ast.literal_eval(os.environ.get("QUERY_PARAMS", {})) -system_prompt = os.environ.get("SYSTEM_PROMPT") -base_prompt = os.environ.get("BASE_PROMPT") +chat_history_pipeline = Pipeline( + chat_history_collection_name + "_pipeline", model, splitter +) + +query_params_instruction = ( + "Represent the %s question for retrieving supporting documents: " % (bot_topic) +) +query_params = {"instruction": query_params_instruction} +# query_params = {} + +default_system_prompt_template = """ +You are an assistant to answer questions about {topic}. +Your name is {name}. You speak like {persona} in {language}. Use the given list of documents to answer user's question. +Use the conversation history if it is applicable to answer the question. +Use the following steps: + +1. Identify if the user input is really a question. +2. If the user input is not related to the {topic} then respond that it is not related to the {topic}. +3. If the user input is related to the {topic} then first identify relevant documents from the list of documents. +4. If the documents that you found relevant have information to completely and accurately answers the question then respond with the answer. +5. If the documents that you found relevant have code snippets then respond with the code snippets. +6. Most importantly, don't make up code snippets that are not present in the documents. +7. If the user input is generic like Cool, Thanks, Hello, etc. then respond with a generic answer. +""" + +default_system_prompt = default_system_prompt_template.format( + topic=bot_topic, + name=bot_name, + persona=bot_persona, + language=bot_language, + response_programming_language=bot_topic_primary_language, +) + +system_prompt = default_system_prompt + +base_prompt = """ +{conversation_history} +#### +Documents +#### +{context} +### +User: {question} +### + +Helpful Answer:""" + openai_api_key = os.environ.get("OPENAI_API_KEY") +system_prompt_document = [ + { + "text": system_prompt, + "id": str(uuid4())[:8], + "interface": chat_interface, + "role": "system", + "timestamp": pendulum.now().timestamp(), + } +] async def upsert_documents(folder: str) -> int: log.info("Scanning " + folder + " for markdown files") @@ -117,13 +234,112 @@ async def upsert_documents(folder: str) -> int: return len(md_files) +async def generate_chat_response( + user_input, + system_prompt, + openai_api_key, + temperature=0.7, + max_tokens=256, + top_p=0.9, + user_name="", +): + messages = [] + messages.append({"role": "system", "content": system_prompt}) + + chat_history_messages = await chat_collection.get_documents( { + "limit" : chat_history*2, + "order_by": {"timestamp": "desc"}, + "filter": { + "metadata": { + "$and" : [ + { + "$or": + [ + {"role": {"$eq": "assistant"}}, + {"role": {"$eq": "user"}} + ] + }, + { + "interface" : { + "$eq" : chat_interface + } + }, + { + "user_name" : { + "$eq" : user_name + } + } + ] + } + } + } + ) + + # Reverse the order so that user messages are first + + chat_history_messages.reverse() + + conversation_history = "" + for entry in chat_history_messages: + document = entry["document"] + if document["role"] == "user": + conversation_history += "User: " + document["text"] + "\n" + if document["role"] == "assistant": + conversation_history += "Assistant: " + document["text"] + "\n" + + log.info(conversation_history) + + history_documents = [] + user_message_id = str(uuid4())[:8] + _document = { + "text": user_input, + "id": user_message_id, + "interface": chat_interface, + "role": "user", + "timestamp": pendulum.now().timestamp(), + "user_name": user_name, + } + history_documents.append(_document) + + if user_input: + query = await get_prompt(user_input,conversation_history) + + messages.append({"role": "user", "content": query}) + + log.info(messages) + + response = await generate_response( + messages, + openai_api_key, + max_tokens=max_tokens, + temperature=temperature, + top_p=top_p, + ) + + _document = { + "text": response, + "id": str(uuid4())[:8], + "parent_message_id" : user_message_id, + "interface": chat_interface, + "role": "assistant", + "timestamp": pendulum.now().timestamp(), + "user_name": user_name, + } + history_documents.append(_document) + + await chat_collection.upsert_documents(history_documents) + + return response + + async def generate_response( messages, openai_api_key, temperature=0.7, max_tokens=256, top_p=0.9 ): openai.api_key = openai_api_key log.debug("Generating response from OpenAI API: " + str(messages)) response = openai.ChatCompletion.create( - model="gpt-3.5-turbo", + # model="gpt-3.5-turbo-16k", + model="gpt-4", messages=messages, temperature=temperature, max_tokens=max_tokens, @@ -137,43 +353,58 @@ async def generate_response( async def ingest_documents(folder: str): # Add the pipeline to the collection, does nothing if we have already added it await collection.add_pipeline(pipeline) + await chat_collection.add_pipeline(chat_history_pipeline) # This will upsert, chunk, and embed the contents in the folder total_docs = await upsert_documents(folder) log.info("Total documents: " + str(total_docs)) -async def get_prompt(user_input: str = ""): +async def get_prompt(user_input: str = "", conversation_history: str = "") -> str: + query_input = "In the context of " + bot_topic + ", " + user_input vector_results = ( await collection.query() - .vector_recall(user_input, pipeline, query_params) - .limit(2) + .vector_recall(query_input, pipeline, query_params) + .limit(5) .fetch_all() ) log.info(vector_results) - context = "" - for result in vector_results: - context += result[1] + "\n" - - query = base_prompt.format(context=context, question=user_input) + context = "" + for id, result in enumerate(vector_results): + if result[0] > 0.6: + context += "#### \n Document %d: " % (id) + result[1] + "\n" + + if conversation_history: + conversation_history = "#### \n Conversation History: \n" + conversation_history + + query = base_prompt.format( + conversation_history=conversation_history, + context=context, + question=user_input, + topic=bot_topic, + persona=bot_persona, + language=bot_language, + response_programming_language=bot_topic_primary_language, + ) return query async def chat_cli(): - user_input = "Who are you?" + user_name = os.environ.get("USER") while True: try: - messages = [{"role": "system", "content": system_prompt}] - if user_input: - query = await get_prompt(user_input) - messages.append({"role": "user", "content": query}) - response = await generate_response( - messages, openai_api_key, max_tokens=512, temperature=0.0 - ) - log.info("PgBot: " + response) - user_input = input("User (Ctrl-C to exit): ") + response = await generate_chat_response( + user_input, + system_prompt, + openai_api_key, + max_tokens=512, + temperature=0.3, + top_p=0.9, + user_name=user_name, + ) + print("PgBot: " + response) except KeyboardInterrupt: print("Exiting...") break @@ -191,15 +422,17 @@ async def chat_slack(): @app.message(f"<@{bot_user_id}>") async def message_hello(message, say): print("Message received... ") - messages = [{"role": "system", "content": system_prompt}] user_input = message["text"] - - query = await get_prompt(user_input) - messages.append({"role": "user", "content": query}) - response = await generate_response( - messages, openai_api_key, max_tokens=512, temperature=1.0 - ) user = message["user"] + response = await generate_chat_response( + user_input, + system_prompt, + openai_api_key, + max_tokens=512, + temperature=0.7, + user_name = user, + ) + await say(text=f"<@{user}> {response}") @@ -218,21 +451,18 @@ async def message_hello(message, say): @client.event async def on_ready(): + await chat_collection.upsert_documents(system_prompt_document) print(f"We have logged in as {client.user}") @client.event async def on_message(message): bot_mention = f"<@{client.user.id}>" - messages = [{"role": "system", "content": system_prompt}] if message.author != client.user and bot_mention in message.content: print("Discord response in progress ..") user_input = message.content - query = await get_prompt(user_input) - - messages.append({"role": "user", "content": query}) - response = await generate_response( - messages, openai_api_key, max_tokens=512, temperature=1.0 + response = await generate_chat_response( + user_input, system_prompt, openai_api_key, max_tokens=512, temperature=0.7,user_name=message.author.name ) await message.channel.send(response) @@ -243,6 +473,7 @@ async def run(): chunks, and logs the total number of documents and chunks. """ log.info("Starting pgml-chat.... ") + await chat_collection.upsert_documents(system_prompt_document) # await migrate() if stage == "ingest": root_dir = args.root_dir @@ -265,4 +496,6 @@ def main(): client.run(os.environ["DISCORD_BOT_TOKEN"]) else: asyncio.run(run()) + + main() diff --git a/pgml-apps/pgml-chat/pgml_chat/pgml_playground.py b/pgml-apps/pgml-chat/pgml_chat/pgml_playground.py new file mode 100644 index 000000000..866310ed4 --- /dev/null +++ b/pgml-apps/pgml-chat/pgml_chat/pgml_playground.py @@ -0,0 +1,116 @@ +from pgml import Collection, Builtins, Pipeline +import asyncio +from rich import print + + # chat_history_user_messages = await chat_collection.query().vector_recall(user_input, chat_history_pipeline, query_params)( + # { + # "limit" : chat_history, + # "filter": { + # "metadata": { + # "$and": [ + # {{"role": {"$eq": "user"}}, + # {{"interface": {"$eq": chat_interface}}, + # ] + # } + # }, + # } + # ).fetch_all() + + # chat_history_assistant_messages = await chat_collection.query().vector_recall(user_input, chat_history_pipeline, query_params)( + # { + # "limit" : chat_history, + # "filter": { + # "metadata": { + # "$and": [ + # {"role": {"$eq": "assistant"}}, + # {"interface": {"$eq": chat_interface}}, + # ] + # } + # }, + # } + # ).fetch_all() + + +async def main(): + collection = Collection("pgml_chat_all_docs_4_chat_history") + builtins = Builtins() + query = """SELECT metadata->>'role' as role, text as content from %s.documents + WHERE metadata @> '{\"interface\" : \"cli\"}'::JSONB + AND metadata @> '{\"role\" : \"user\"}'::JSONB + OR metadata @> '{\"role\" : \"assistant\"}'::JSONB + ORDER BY metadata->>'timestamp' DESC LIMIT %d""" % ( + "pgml_chat_readme_1_chat_history", + 4, + ) + results = await builtins.query(query).fetch_all() + results.reverse() + # print(results) + documents = await collection.get_documents( + { "limit": 3, + "order_by": {"timestamp": "desc"}, + "filter": { + "metadata": { + "$and": [ + # {"role": {"$eq": "assistant"}}, + {"interface": {"$eq": "cli"}}, + ] + } + } + } + ) + print(documents) + pipeline = Pipeline("pgml_chat_all_docs_4_chat_history_pipeline") + chat_history_user_messages = ( + await collection.query() + .vector_recall( + "how do I use xgboost", + pipeline, + { + "instruction": "Represent the question for retrieving supporting documents: " + }, + ) + .limit(2) + .filter( + { + "metadata": { + "$and": [ + {"role": {"$eq": "user"}}, + {"interface": {"$eq": "discord"}}, + ] + } + } + ) + .fetch_all() + ) + + # print(chat_history_user_messages) + + results = ( + await collection.query() + .vector_recall( + "PostgresML on your Ubuntu machine", + pipeline, + { + "instruction": "Represent the question for retrieving supporting documents: " + }, + ) + .limit(10) + .filter( + { + "metadata": { + "$and": [ + {"role": {"$eq": "assistant"}}, + {"interface": {"$eq": "cli"}}, + ] + } + } + ) + .fetch_all() + ) + # print(results) + + # llama2-7b-chat + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/pgml-apps/pgml-chat/poetry.lock b/pgml-apps/pgml-chat/poetry.lock index 682eacb13..dc95a2fea 100644 --- a/pgml-apps/pgml-chat/poetry.lock +++ b/pgml-apps/pgml-chat/poetry.lock @@ -126,14 +126,14 @@ frozenlist = ">=1.1.0" [[package]] name = "async-timeout" -version = "4.0.2" +version = "4.0.3" description = "Timeout context manager for asyncio programs" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] [[package]] @@ -155,36 +155,41 @@ docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib- tests = ["attrs[tests-no-zope]", "zope-interface"] tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +[package.source] +type = "legacy" +url = "https://test.pypi.org/simple" +reference = "testpypi" + [[package]] name = "black" -version = "23.7.0" +version = "23.9.1" description = "The uncompromising code formatter." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, - {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, - {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, - {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, - {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, - {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, - {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, - {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, - {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, - {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, - {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, + {file = "black-23.9.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:d6bc09188020c9ac2555a498949401ab35bb6bf76d4e0f8ee251694664df6301"}, + {file = "black-23.9.1-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:13ef033794029b85dfea8032c9d3b92b42b526f1ff4bf13b2182ce4e917f5100"}, + {file = "black-23.9.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:75a2dc41b183d4872d3a500d2b9c9016e67ed95738a3624f4751a0cb4818fe71"}, + {file = "black-23.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13a2e4a93bb8ca74a749b6974925c27219bb3df4d42fc45e948a5d9feb5122b7"}, + {file = "black-23.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:adc3e4442eef57f99b5590b245a328aad19c99552e0bdc7f0b04db6656debd80"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:8431445bf62d2a914b541da7ab3e2b4f3bc052d2ccbf157ebad18ea126efb91f"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:8fc1ddcf83f996247505db6b715294eba56ea9372e107fd54963c7553f2b6dfe"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:7d30ec46de88091e4316b17ae58bbbfc12b2de05e069030f6b747dfc649ad186"}, + {file = "black-23.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031e8c69f3d3b09e1aa471a926a1eeb0b9071f80b17689a655f7885ac9325a6f"}, + {file = "black-23.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:538efb451cd50f43aba394e9ec7ad55a37598faae3348d723b59ea8e91616300"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:638619a559280de0c2aa4d76f504891c9860bb8fa214267358f0a20f27c12948"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:a732b82747235e0542c03bf352c126052c0fbc458d8a239a94701175b17d4855"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:cf3a4d00e4cdb6734b64bf23cd4341421e8953615cba6b3670453737a72ec204"}, + {file = "black-23.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf99f3de8b3273a8317681d8194ea222f10e0133a24a7548c73ce44ea1679377"}, + {file = "black-23.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:14f04c990259576acd093871e7e9b14918eb28f1866f91968ff5524293f9c573"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:c619f063c2d68f19b2d7270f4cf3192cb81c9ec5bc5ba02df91471d0b88c4c5c"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:6a3b50e4b93f43b34a9d3ef00d9b6728b4a722c997c99ab09102fd5efdb88325"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c46767e8df1b7beefb0899c4a95fb43058fa8500b6db144f4ff3ca38eb2f6393"}, + {file = "black-23.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50254ebfa56aa46a9fdd5d651f9637485068a1adf42270148cd101cdf56e0ad9"}, + {file = "black-23.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:403397c033adbc45c2bd41747da1f7fc7eaa44efbee256b53842470d4ac5a70f"}, + {file = "black-23.9.1-py3-none-any.whl", hash = "sha256:6ccd59584cc834b6d127628713e4b6b968e5f79572da66284532525a042549f9"}, + {file = "black-23.9.1.tar.gz", hash = "sha256:24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d"}, ] [package.dependencies] @@ -194,7 +199,7 @@ packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -301,19 +306,24 @@ files = [ [[package]] name = "click" -version = "8.1.6" +version = "8.1.7" description = "Composable command line interface toolkit" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, - {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} +[package.source] +type = "legacy" +url = "https://test.pypi.org/simple" +reference = "testpypi" + [[package]] name = "colorama" version = "0.4.6" @@ -326,16 +336,21 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[package.source] +type = "legacy" +url = "https://test.pypi.org/simple" +reference = "testpypi" + [[package]] name = "discord-py" -version = "2.3.1" +version = "2.3.2" description = "A Python wrapper for the Discord API" category = "main" optional = false python-versions = ">=3.8.0" files = [ - {file = "discord.py-2.3.1-py3-none-any.whl", hash = "sha256:149652f24da299706270bf8c03c2fcf80cf1caf3a480744c61d5b001688b380d"}, - {file = "discord.py-2.3.1.tar.gz", hash = "sha256:8eb4fe66b5d503da6de3a8425e23012711dc2fbcd7a782107a92beac15ee3459"}, + {file = "discord.py-2.3.2-py3-none-any.whl", hash = "sha256:9da4679fc3cb10c64b388284700dc998663e0e57328283bbfcfc2525ec5960a6"}, + {file = "discord.py-2.3.2.tar.gz", hash = "sha256:4560f70f2eddba7e83370ecebd237ac09fbb4980dc66507482b0c0e5b8f76b9c"}, ] [package.dependencies] @@ -565,14 +580,14 @@ files = [ [[package]] name = "openai" -version = "0.27.8" +version = "0.27.10" description = "Python client library for the OpenAI API" category = "main" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-0.27.8-py3-none-any.whl", hash = "sha256:e0a7c2f7da26bdbe5354b03c6d4b82a2f34bd4458c7a17ae1a7092c3e397e03c"}, - {file = "openai-0.27.8.tar.gz", hash = "sha256:2483095c7db1eee274cebac79e315a986c4e55207bb4fa7b82d185b3a2ed9536"}, + {file = "openai-0.27.10-py3-none-any.whl", hash = "sha256:beabd1757e3286fa166dde3b70ebb5ad8081af046876b47c14c41e203ed22a14"}, + {file = "openai-0.27.10.tar.gz", hash = "sha256:60e09edf7100080283688748c6803b7b3b52d5a55d21890f3815292a0552d83b"}, ] [package.dependencies] @@ -600,84 +615,145 @@ files = [ [[package]] name = "pathspec" -version = "0.11.1" +version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + [[package]] name = "pgml" -version = "0.8.0" +version = "0.9.4" description = "Python SDK is designed to facilitate the development of scalable vector search applications on PostgreSQL databases." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "pgml-0.8.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:9308a53e30121df0c428a15cff93f8a6c5d6ba936f31c4f4b8c066fbdca9c8cc"}, - {file = "pgml-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4e92a014be000c99de6f97ff9f4f63f40af4712a7a480c914283e63b804024c2"}, - {file = "pgml-0.8.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:4d84992b5c5834334f390e4b517e2fb1af6e47f37f244f01867d4f32918b5e47"}, - {file = "pgml-0.8.0-cp310-cp310-manylinux_2_34_aarch64.whl", hash = "sha256:e042422d836b4afd584b63746a53d68a84b370a5b716c1f557fee68ea904a2f5"}, - {file = "pgml-0.8.0-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:05fcb19667f48093cd5bfbbef34d76b87efa36f5c9f8aa8f52b21958134d9507"}, - {file = "pgml-0.8.0-cp310-none-win_amd64.whl", hash = "sha256:42d5c4bbd0bca75c346b9f4a70301e692eb213e7ac0e394f8f44ee90a08f1f8b"}, - {file = "pgml-0.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:12855bacf6df2ac8d0039453755bcc778c3781e857010713ed811a9726617080"}, - {file = "pgml-0.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:46830c9971ee9f2d01ca181d196ca8a2e30d2d9c3d5a106595456534cee7f313"}, - {file = "pgml-0.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:34f9ec58369fe6ed05b2ddce7cd212055bb679dd1badb42fa876148bba3e455f"}, - {file = "pgml-0.8.0-cp311-cp311-manylinux_2_34_aarch64.whl", hash = "sha256:da072fe1b0eb3999a01fcd1b1b7e180cbd14eb6a1d65fa32f0f5977bed8ed1a7"}, - {file = "pgml-0.8.0-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:fbcceacc564b80852f8a33098169546fa741ff5ee8e1cd3207b2a3cdbe23345e"}, - {file = "pgml-0.8.0-cp311-none-win_amd64.whl", hash = "sha256:dd6b7fe356bc440179d2b3cdb58ee517140978f671cbdb27459b9309d074b01d"}, - {file = "pgml-0.8.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:854c913c0549f5fdde34783f2035256b07873ca8d93e637dd56939e9ac4dfc70"}, - {file = "pgml-0.8.0-cp37-cp37m-manylinux_2_34_aarch64.whl", hash = "sha256:16e64df9b259361bd63f0f9aa52100ee85a4bf678c7d03fcc1d0df082469336f"}, - {file = "pgml-0.8.0-cp37-cp37m-manylinux_2_34_x86_64.whl", hash = "sha256:c42f2a92d5c05c390b2b6c34aadf6faa0cfb4243d5244c44bd699f75a28757b1"}, - {file = "pgml-0.8.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:a5bb83ff9bece5021c7d0a078138c87f3e59aaf51208166266b82c439a54bd51"}, - {file = "pgml-0.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e1e22f64fc536c20d026e9bf4a58797535de6d4cde18858ba14f6c28ca6dc9b"}, - {file = "pgml-0.8.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:08050b4b35c90034fb49d96ea74edda130a494f2cfabd956bd6c0d68d02f5d35"}, - {file = "pgml-0.8.0-cp38-cp38-manylinux_2_34_aarch64.whl", hash = "sha256:d71a17e0458747c87534004acdfa586fb978b76e4688611deac4ee677e651f64"}, - {file = "pgml-0.8.0-cp38-cp38-manylinux_2_34_x86_64.whl", hash = "sha256:2b059ee7b9173698c0bad8a6f14d35ee90cd6b28c2fb80a7a30396935c0bdab0"}, - {file = "pgml-0.8.0-cp38-none-win_amd64.whl", hash = "sha256:ca3c6e8c570a3ec78ccae14efb8a19aeb73f41f569f162b76750be5d40b40016"}, - {file = "pgml-0.8.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:b9ad06ad7b4284539844effdae31d444402afe53f887974b1a88138af6715422"}, - {file = "pgml-0.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:592f6364e69194db819fde66072ffdeec349ebca00af9efad6fbc4e23b18fb26"}, - {file = "pgml-0.8.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:cba5a4b5e7fd32d35635ac83f8472f669f5ea49ca0059f8d50671ac9c76dca63"}, - {file = "pgml-0.8.0-cp39-cp39-manylinux_2_34_aarch64.whl", hash = "sha256:ae8c63d577c060cfeb46f7adc2e6b60c2b2f7478205e455bde1c233df3ed581c"}, - {file = "pgml-0.8.0-cp39-cp39-manylinux_2_34_x86_64.whl", hash = "sha256:c9832807832f325338a0783e27ee58ebf65b960d3b629e816ffff3de30308519"}, - {file = "pgml-0.8.0-cp39-none-win_amd64.whl", hash = "sha256:acb82bf88ce2f7945cae3ae95ad4e37e24576e478ba50754c61230dc52c91630"}, + {file = "pgml-0.9.4-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:d517ad306bf1522145bfc779bd2d727426c631168ac72b69640a2c097a83dcad"}, + {file = "pgml-0.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3f2154e4ced996ba15f65061035f9dc4c2c1f54b9671b7df2219bfdfe6a83e47"}, + {file = "pgml-0.9.4-cp310-cp310-manylinux_2_31_x86_64.whl", hash = "sha256:6c4732346c7e7ca3de03fbb3493fa8a00032220158c0909729ac85d56d41ed2b"}, + {file = "pgml-0.9.4-cp310-cp310-manylinux_2_34_aarch64.whl", hash = "sha256:b494f4d8dab4dd89b44d618a682ccf02c658ce035a4f16d03021d7e62b0acc8e"}, + {file = "pgml-0.9.4-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:d5c4ff491979c61b8958de3fdc5c57a0e1cd81a5966ec897a4f9500aded40dfa"}, + {file = "pgml-0.9.4-cp310-none-win_amd64.whl", hash = "sha256:e44557115b4235662161654d1163ff78cbf8b03bee85505ee077f14f60232cd9"}, + {file = "pgml-0.9.4-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:340d661a05364a7306e62dd9f7b577a2e3f4606e52a73c5a66cb51c8d0531298"}, + {file = "pgml-0.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6b658abbd698b4f9bb10510fdd1ae155c09bbe11dbff7e53747ad0367df23013"}, + {file = "pgml-0.9.4-cp311-cp311-manylinux_2_31_x86_64.whl", hash = "sha256:d2bc9a6b2e7d0df7ca1b023f3cb4a0e733c0573704e0311ef1e6c65813fe4a08"}, + {file = "pgml-0.9.4-cp311-cp311-manylinux_2_34_aarch64.whl", hash = "sha256:eb88f16ae5e114cefa96f5c9e256f46d920625ee56acc3725de2f3d2237c4c9a"}, + {file = "pgml-0.9.4-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:e3f8cf74b910888454eea73ca6aa40f713b9b492e1e4186a7e3e8c6870a380bc"}, + {file = "pgml-0.9.4-cp311-none-win_amd64.whl", hash = "sha256:12e5e0da59252bab278b9f6f84299699db136d20f7f22526d2919be92e051b48"}, + {file = "pgml-0.9.4-cp37-cp37m-manylinux_2_31_x86_64.whl", hash = "sha256:ea09c16eaf1d159e5a19e148fee962e7fcfab663d0df704ea5aa563ebaff82c4"}, + {file = "pgml-0.9.4-cp37-cp37m-manylinux_2_34_aarch64.whl", hash = "sha256:37adb2533a2995e000d0814070bfd8f100938644dd2d90be7a2914eb4dc55925"}, + {file = "pgml-0.9.4-cp37-cp37m-manylinux_2_34_x86_64.whl", hash = "sha256:d1a236fe0f2b9a33109c572ebd0261b8b8f1194de6892525cd2811084fb93672"}, + {file = "pgml-0.9.4-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6e2f78ce5350ba20f780e0753ba068794277ec7ef3ea13f8592babab1f8d9cc1"}, + {file = "pgml-0.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e9db6baffdd9edda409a44f209a4c8ba282f885c257912586f440401f2df290c"}, + {file = "pgml-0.9.4-cp38-cp38-manylinux_2_31_x86_64.whl", hash = "sha256:16b13e104a009490677715e77696bef090cfb12b3f4ff2b5ff6d38e6da0fabb1"}, + {file = "pgml-0.9.4-cp38-cp38-manylinux_2_34_aarch64.whl", hash = "sha256:0a6fc0223790d4ddf2a5db09fe016fe06f88a6769caa23d1251bafcc12ca0b17"}, + {file = "pgml-0.9.4-cp38-cp38-manylinux_2_34_x86_64.whl", hash = "sha256:a509020d10dcb8efada9e37a53f4e4fab80e80036dd4ce76dd1f1ea791a6834e"}, + {file = "pgml-0.9.4-cp38-none-win_amd64.whl", hash = "sha256:1c1b8fa68f735f15615cdad92f7fed5ed7159892f892d316425ab24e020c971f"}, + {file = "pgml-0.9.4-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:56061c1ae34721ca2863bf30a6b173e98736cfc698d618932efa154c562582b1"}, + {file = "pgml-0.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79d4ab494a8d0006d16f33ff5c6ebb6039f0ca655b54af22b908cbaf306cabbf"}, + {file = "pgml-0.9.4-cp39-cp39-manylinux_2_31_x86_64.whl", hash = "sha256:eb001d7800e3fb189359d2d81e7fabbb12322a0c9c4277d2ad491a85a3b2594b"}, + {file = "pgml-0.9.4-cp39-cp39-manylinux_2_34_aarch64.whl", hash = "sha256:d2ff28950f98e3dc3b377c855fb39cf10db11e67191b1ea07897c4196f7b8cd4"}, + {file = "pgml-0.9.4-cp39-cp39-manylinux_2_34_x86_64.whl", hash = "sha256:c292dba6b3a3ee4f93a4ed44633959cfae70ffa33ec8775ec35e44583edf20bc"}, + {file = "pgml-0.9.4-cp39-none-win_amd64.whl", hash = "sha256:668286b5d7f0896cde03bacb97396295dfdcaf7dd71e9b7a664cf4d6f86ab072"}, + {file = "pgml-0.9.4.tar.gz", hash = "sha256:17af7d22bfcc2f1b73ce449276cfa29edc22a859dac8e8079b6913b8825439b0"}, ] +[package.source] +type = "legacy" +url = "https://test.pypi.org/simple" +reference = "testpypi" + [[package]] name = "platformdirs" -version = "3.9.1" +version = "3.10.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.9.1-py3-none-any.whl", hash = "sha256:ad8291ae0ae5072f66c16945166cb11c63394c7a3ad1b1bc9828ca3162da8c2f"}, - {file = "platformdirs-3.9.1.tar.gz", hash = "sha256:1b42b450ad933e981d56e59f1b97495428c9bd60698baab9f3eb3d00d5822421"}, + {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, + {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, ] [package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] [[package]] name = "pygments" -version = "2.15.1" +version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, - {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, ] [package.extras] plugins = ["importlib-metadata"] +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:104a4ff9f1ece23d8a31582156ea3ae928afe7121fac9fed3e967a1e2d6cf6ed"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:1efd93a2e222eb7360b5396108fdfa04e9753637d24143b8026dfb48ffbc755b"}, +] + +[package.dependencies] +six = ">=1.5" + +[package.source] +type = "legacy" +url = "https://test.pypi.org/simple" +reference = "testpypi" + [[package]] name = "python-dotenv" version = "1.0.0" @@ -693,6 +769,18 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + [[package]] name = "requests" version = "2.31.0" @@ -717,14 +805,14 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.4.2" +version = "13.5.3" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.4.2-py3-none-any.whl", hash = "sha256:8f87bc7ee54675732fa66a05ebfe489e27264caeeff3728c945d25971b6485ec"}, - {file = "rich-13.4.2.tar.gz", hash = "sha256:d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898"}, + {file = "rich-13.5.3-py3-none-any.whl", hash = "sha256:9257b468badc3d347e146a4faa268ff229039d4c2d176ab0cffb4c4fbc73d5d9"}, + {file = "rich-13.5.3.tar.gz", hash = "sha256:87b43e0543149efa1253f485cd845bb7ee54df16c9617b8a893650ab84b4acb6"}, ] [package.dependencies] @@ -735,6 +823,18 @@ typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9 [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + [[package]] name = "slack-bolt" version = "1.18.0" @@ -759,19 +859,19 @@ testing-without-asyncio = ["Flask-Sockets (>=0.2,<1)", "Jinja2 (==3.0.3)", "Werk [[package]] name = "slack-sdk" -version = "3.21.3" +version = "3.22.0" description = "The Slack API Platform SDK for Python" category = "main" optional = false python-versions = ">=3.6.0" files = [ - {file = "slack_sdk-3.21.3-py2.py3-none-any.whl", hash = "sha256:de3c07b92479940b61cd68c566f49fbc9974c8f38f661d26244078f3903bb9cc"}, - {file = "slack_sdk-3.21.3.tar.gz", hash = "sha256:20829bdc1a423ec93dac903470975ebf3bc76fd3fd91a4dadc0eeffc940ecb0c"}, + {file = "slack_sdk-3.22.0-py2.py3-none-any.whl", hash = "sha256:f102a4902115dff3b97c3e8883ad4e22d54732221886fc5ef29bfc290f063b4a"}, + {file = "slack_sdk-3.22.0.tar.gz", hash = "sha256:6eacce0fa4f8cfb4d84eac0d7d7e1b1926040a2df654ae86b94179bdf2bc4d8c"}, ] [package.extras] optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=10,<11)"] -testing = ["Flask (>=1,<2)", "Flask-Sockets (>=0.2,<1)", "Jinja2 (==3.0.3)", "Werkzeug (<2)", "black (==22.8.0)", "boto3 (<=2)", "click (==8.0.4)", "databases (>=0.5)", "flake8 (>=5,<6)", "itsdangerous (==1.1.0)", "moto (>=3,<4)", "psutil (>=5,<6)", "pytest (>=6.2.5,<7)", "pytest-asyncio (<1)", "pytest-cov (>=2,<3)"] +testing = ["Flask (>=1,<2)", "Flask-Sockets (>=0.2,<1)", "Jinja2 (==3.0.3)", "Werkzeug (<2)", "black (==22.8.0)", "boto3 (<=2)", "click (==8.0.4)", "flake8 (>=5,<6)", "itsdangerous (==1.1.0)", "moto (>=3,<4)", "psutil (>=5,<6)", "pytest (>=6.2.5,<7)", "pytest-asyncio (<1)", "pytest-cov (>=2,<3)"] [[package]] name = "tomli" @@ -787,47 +887,47 @@ files = [ [[package]] name = "tqdm" -version = "4.65.0" +version = "4.66.1" description = "Fast, Extensible Progress Meter" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.65.0-py3-none-any.whl", hash = "sha256:c4f53a17fe37e132815abceec022631be8ffe1b9381c2e6e30aa70edc99e9671"}, - {file = "tqdm-4.65.0.tar.gz", hash = "sha256:1871fb68a86b8fb3b59ca4cdd3dcccbc7e6d613eeed31f4c332531977b89beb5"}, + {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, + {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] -dev = ["py-make (>=0.1.0)", "twine", "wheel"] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] [[package]] name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] [[package]] name = "urllib3" -version = "2.0.4" +version = "2.0.5" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, - {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, + {file = "urllib3-2.0.5-py3-none-any.whl", hash = "sha256:ef16afa8ba34a1f989db38e1dbbe0c302e4289a47856990d0682e374563ce35e"}, + {file = "urllib3-2.0.5.tar.gz", hash = "sha256:13abf37382ea2ce6fb744d4dad67838eec857c9f4f57009891805e0b5e123594"}, ] [package.extras] @@ -927,4 +1027,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4.0" -content-hash = "790dc81785c78af605fe97847b90d3ff2cbf901c28ec6e0a055f4c01858892ea" +content-hash = "1412f9786ed6bf30332e934385e4c63c4847ed6f198a26d8f1003bc00d518a0c" diff --git a/pgml-apps/pgml-chat/pyproject.toml b/pgml-apps/pgml-chat/pyproject.toml index 10f9c95e9..4dc061d57 100644 --- a/pgml-apps/pgml-chat/pyproject.toml +++ b/pgml-apps/pgml-chat/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "pgml-chat" -version = "0.1.1" +version = "0.2.0" description = "PostgresML bot builder for all your documentation" authors = ["PostgresML "] license = "MIT" @@ -11,14 +11,21 @@ packages = [{include = "pgml_chat"}] python = ">=3.8,<4.0" openai = "^0.27.8" rich = "^13.4.2" -pgml = "^0.9.0" python-dotenv = "^1.0.0" click = "^8.1.6" black = "^23.7.0" slack-bolt = "^1.18.0" discord-py = "^2.3.1" +pendulum = "^2.1.2" +pgml = "^0.9.4" +[[tool.poetry.source]] +name = "testpypi" +url = "https://test.pypi.org/simple/" +default = false +secondary = false + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api"