From 8dabd1e666b2aef34a2227629dd5b1287c55c9b6 Mon Sep 17 00:00:00 2001 From: Bharat Ramanathan Date: Thu, 5 Oct 2023 15:41:00 +0530 Subject: [PATCH 1/7] docs: add docstrings and type annotations --- .gitignore | 2 +- README.md | 54 ++-- poetry.lock | 22 +- pyproject.toml | 6 +- src/wandbot/api/app.py | 115 ++++++++- src/wandbot/api/client.py | 314 +++++++++++++++++++++-- src/wandbot/api/schemas.py | 18 ++ src/wandbot/apps/discord/__main__.py | 100 ++++---- src/wandbot/apps/discord/config.py | 26 +- src/wandbot/apps/slack/__main__.py | 107 ++++---- src/wandbot/apps/slack/config.py | 23 +- src/wandbot/apps/utils.py | 94 +++++++ src/wandbot/chat/chat.py | 192 ++++++++++++-- src/wandbot/chat/config.py | 18 +- src/wandbot/chat/prompts.py | 34 ++- src/wandbot/chat/schemas.py | 18 ++ src/wandbot/chat/utils.py | 74 ------ src/wandbot/database/client.py | 193 ++++++++++++-- src/wandbot/database/config.py | 15 +- src/wandbot/database/database.py | 14 +- src/wandbot/database/models.py | 20 +- src/wandbot/database/schemas.py | 18 ++ src/wandbot/evaluation/eval.py | 32 ++- src/wandbot/ingestion/config.py | 32 ++- src/wandbot/ingestion/prepare_data.py | 185 +++++++++++-- src/wandbot/ingestion/preprocess_data.py | 267 ++++++++++++++++--- src/wandbot/ingestion/report.py | 73 +++++- src/wandbot/ingestion/utils.py | 178 +++++++++---- src/wandbot/ingestion/vectorstores.py | 69 ++++- src/wandbot/utils.py | 129 +++++++++- 30 files changed, 1992 insertions(+), 450 deletions(-) create mode 100644 src/wandbot/apps/utils.py delete mode 100644 src/wandbot/chat/utils.py diff --git a/.gitignore b/.gitignore index 3abc3fc..e99a853 100644 --- a/.gitignore +++ b/.gitignore @@ -132,4 +132,4 @@ wandb/ artifacts/ data/ .idea/ - +.aider \ No newline at end of file diff --git a/README.md b/README.md index b7ed406..dae574f 100644 --- a/README.md +++ b/README.md @@ -1,31 +1,33 @@ # wandbot -A question answering bot for Weights & Biases [documentation](https://docs.wandb.ai/). -This bot is built using [llama-index](https://gpt-index.readthedocs.io/en/stable/) and openai [gpt-4](https://openai.com/research/gpt-4). +Wandbot is a question-answering bot designed specifically for Weights & Biases [documentation](https://docs.wandb.ai/). +Leveraging the power of [llama-index](https://gpt-index.readthedocs.io/en/stable/) and OpenAI's [gpt-4](https://openai.com/research/gpt-4), it provides precise and context-aware responses +using a combination of [FAISS](https://github.com/facebookresearch/faiss) for RAG and OpenAI's [gpt-4](https://openai.com/research/gpt-4) for generating responses. + ## Features -- The bot utilizes retrieval augmented generation with [FAISS](https://github.com/facebookresearch/faiss) backend to retrieve relevant documents and efficiently handle user queries and provides accurate, context-aware responses -- Periodic data ingestion with report generation for continuous improvement of the bot.: Checkout the latest data ingestion report [here](https://wandb.ai/wandbot/wandbot-dev/reportlist) -- Integrated with Discord and Slack, allowing seamless integration into popular collaboration platforms. -- Logging and analysis with Weights & Biases Tables for performance monitoring and continuous improvement.: Checkout the workspace for more details [here](https://wandb.ai/wandbot/wandbot_public) -- Uses a fallback mechanism for model selection when GPT-4 is unable to generate a response. -- Evaluation using a combination of metrics such as retrieval accuracy, string similarity, and model-generated response correctness -- Want to know more about the custom system prompt used by the bot?: Checkout the full prompt [here](data/prompts/chat_prompt.json) +- Wandbot employs Retrieval Augmented Generation with a [FAISS](https://github.com/facebookresearch/faiss) backend, ensuring efficient and accurate responses to user queries by retrieving relevant documents. +- It features periodic data ingestion and report generation, contributing to the bot's continuous improvement. You can view the latest data ingestion report [here](https://wandb.ai/wandbot/wandbot-dev/reportlist). +- The bot is integrated with Discord and Slack, facilitating seamless integration with these popular collaboration platforms. +- Performance monitoring and continuous improvement are made possible through logging and analysis with Weights & Biases Tables. Visit the workspace for more details [here](https://wandb.ai/wandbot/wandbot_public). +- Wandbot has a fallback mechanism for model selection, which is used when GPT-4 fails to generate a response. +- The bot's performance is evaluated using a mix of metrics, including retrieval accuracy, string similarity, and the correctness of model-generated responses. +- Curious about the custom system prompt used by the bot? You can view the full prompt [here](data/prompts/chat_prompt.json). ## Installation -The project uses `python = ">=3.10.0,<3.11"` and uses [poetry](https://python-poetry.org/) for dependency management. To install the dependencies: +The project is built with Python version `>=3.10.0,<3.11` and utilizes [poetry](https://python-poetry.org/) for managing dependencies. Follow the steps below to install the necessary dependencies: ```bash git clone git@github.com:wandb/wandbot.git pip install poetry --all-extras cd wandbot poetry install -# Depending on which platform you want to run on run the following command: -# poetry install --extras discord # for discord -# poetry install --extras slack # for slack -# poetry install --extras api # for api +# Choose the platform application you wish to run on and execute the corresponding command: +# poetry install --extras discord # for running on Discord +# poetry install --extras slack # for running on Slack +# poetry install --extras api # for running the API ``` ## Usage @@ -43,7 +45,7 @@ These datasets are also stored as wandb artifacts in the project defined in the ### Running the Q&A Bot -You will need to set the following environment variables: +Before running the Q&A bot, ensure the following environment variables are set: ```bash OPENAI_API_KEY @@ -59,7 +61,7 @@ WANDB_PROJECT="wandbot-dev" WANDB_ENTITY="wandbot" ``` -Then you can run the Q&A bot application, use the following commands: +Once these environment variables are set, you can start the Q&A bot application using the following commands: ```bash (poetry run uvicorn wandbot.api.app:app --host="0.0.0.0" --port=8000 > api.log 2>&1) & \ @@ -67,9 +69,9 @@ Then you can run the Q&A bot application, use the following commands: (poetry run python -m wandbot.apps.discord > discord_app.log 2>&1) ``` -Please refer to the [run.sh](./run.sh) file in the root of the repository for more details on commands related to installing and running the bot. +For more detailed instructions on installing and running the bot, please refer to the [run.sh](./run.sh) file located in the root of the repository. -This will start the chatbot applications - the api, the slackbot and the discord bot, allowing you to interact with it and ask questions related to the Weights & Biases documentation. +Executing these commands will launch the API, Slackbot, and Discord bot applications, enabling you to interact with the bot and ask questions related to the Weights & Biases documentation. ### Evaluation @@ -84,14 +86,14 @@ cd wandbot poetry run python -m eval ``` -## Implementation Overview +## Overview of the Implementation -1. Document Embeddings with FAISS -2. Building the Q&A Pipeline with llama-index -3. Model Selection and Fallback Mechanism -4. Deploying the Q&A Bot on FastAPI, Discord and Slack -5. Logging and Analysis with Weights & Biases Tables -6. Evaluation of the Q&A Bot +1. Creating Document Embeddings with FAISS +2. Constructing the Q&A Pipeline using llama-index +3. Selection of Models and Implementation of Fallback Mechanism +4. Deployment of the Q&A Bot on FastAPI, Discord, and Slack +5. Utilizing Weights & Biases Tables for Logging and Analysis +6. Evaluating the Performance of the Q&A Bot -You can track the bot usage in the following project: +You can monitor the usage of the bot in the following project: https://wandb.ai/wandbot/wandbot_public diff --git a/poetry.lock b/poetry.lock index c242559..64eb0a0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -939,13 +939,13 @@ six = "*" [[package]] name = "langsmith" -version = "0.0.41" +version = "0.0.42" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.41-py3-none-any.whl", hash = "sha256:a555bef3d51e37bce284090b155e2148ec4098efa96ee918b3092c43c4bfaa77"}, - {file = "langsmith-0.0.41.tar.gz", hash = "sha256:ea05649bb140d6e58614e171df6539410b77ce393c23545453278677e916e351"}, + {file = "langsmith-0.0.42-py3-none-any.whl", hash = "sha256:e10a5084bdd71735a00e91850d4a293b6206825834027676d76fec8d0d044d0a"}, + {file = "langsmith-0.0.42.tar.gz", hash = "sha256:66fec6bce07cd18c8d9a7b9d7be216de5f7a93790c2f4cf37efb6956f9fffbf6"}, ] [package.dependencies] @@ -2545,13 +2545,13 @@ files = [ [[package]] name = "unstructured" -version = "0.10.18" +version = "0.10.19" description = "A library that prepares raw documents for downstream ML tasks." optional = false python-versions = ">=3.7.0" files = [ - {file = "unstructured-0.10.18-py3-none-any.whl", hash = "sha256:eaec0f0ecc470bb646a750cb32c125275d34d258ced46cfc3364098939d9ca77"}, - {file = "unstructured-0.10.18.tar.gz", hash = "sha256:7f330573d4297182f4b1500e05c9fc4779a08811bce23c527a96898b2ff374f6"}, + {file = "unstructured-0.10.19-py3-none-any.whl", hash = "sha256:d967efb7e56c3da7c5d0304a0b2671a6eb2a4f79fe0fa5ed5a4e35aa9889ee2f"}, + {file = "unstructured-0.10.19.tar.gz", hash = "sha256:f99edf5727221d3a2d1e6880cf59a307f40cadcb3e8df58f424ba2b561862742"}, ] [package.dependencies] @@ -2571,7 +2571,7 @@ tabulate = "*" [package.extras] airtable = ["pyairtable"] -all-docs = ["ebooklib", "markdown", "msg-parser", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx", "python-pptx (<=0.6.21)", "unstructured-inference (==0.5.31)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +all-docs = ["ebooklib", "markdown", "msg-parser", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx", "python-pptx (<=0.6.21)", "unstructured-inference (==0.6.6)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] azure = ["adlfs", "fsspec (==2023.9.1)"] azure-cognitive-search = ["azure-search-documents"] biomed = ["bs4"] @@ -2590,9 +2590,9 @@ github = ["pygithub (>1.58.0)"] gitlab = ["python-gitlab"] google-drive = ["google-api-python-client"] huggingface = ["langdetect", "sacremoses", "sentencepiece", "torch", "transformers"] -image = ["pdf2image", "pdfminer.six", "unstructured-inference (==0.5.31)", "unstructured.pytesseract (>=0.3.12)"] +image = ["pdf2image", "pdfminer.six", "unstructured-inference (==0.6.6)", "unstructured.pytesseract (>=0.3.12)"] jira = ["atlassian-python-api"] -local-inference = ["ebooklib", "markdown", "msg-parser", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx", "python-pptx (<=0.6.21)", "unstructured-inference (==0.5.31)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +local-inference = ["ebooklib", "markdown", "msg-parser", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx", "python-pptx (<=0.6.21)", "unstructured-inference (==0.6.6)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] md = ["markdown"] msg = ["msg-parser"] notion = ["htmlBuilder", "notion-client"] @@ -2602,7 +2602,7 @@ openai = ["langchain", "openai", "tiktoken"] org = ["pypandoc"] outlook = ["Office365-REST-Python-Client (<2.4.3)", "msal"] paddleocr = ["unstructured.paddleocr (==2.6.1.3)"] -pdf = ["pdf2image", "pdfminer.six", "unstructured-inference (==0.5.31)", "unstructured.pytesseract (>=0.3.12)"] +pdf = ["pdf2image", "pdfminer.six", "unstructured-inference (==0.6.6)", "unstructured.pytesseract (>=0.3.12)"] ppt = ["python-pptx (<=0.6.21)"] pptx = ["python-pptx (<=0.6.21)"] reddit = ["praw"] @@ -2794,4 +2794,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.10.0,<3.11" -content-hash = "bfe95f62da4f94b68ae1aa1d1c67fd61d2393d53855425cecb18989fb7acdfcb" +content-hash = "821477b2d4e888f8bc3ab13c6e99a3b26daee8138f7c438af65910a96623f837" diff --git a/pyproject.toml b/pyproject.toml index ab1adbb..f4f4f08 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ wandb = "^0.15.12" openai = "^0.28.1" tiktoken = "^0.5.1" pandas = "^2.0.3" -unstructured = "^0.10.5" +unstructured = "^0.10.19" pydantic-settings = "^2.0.3" gitpython = "^3.1.32" giturlparse = "^0.12.0" @@ -42,9 +42,9 @@ build-backend = "poetry.core.masonry.api" [tool.isort] profile = "black" -line_length = 119 +line_length = 80 skip = [".gitignore", "data", "examples", "notebooks", "artifacts", ".vscode", ".github", ".idea", ".replit", "*.md", "wandb", ".env", ".git", ] [tool.black] -line-length = 119 +line-length = 80 skip = [".gitignore", "data", "examples", "notebooks", "artifacts", ".vscode", ".github", ".idea", ".replit", "*.md", "wandb", ".env", ".git", ] diff --git a/src/wandbot/api/app.py b/src/wandbot/api/app.py index a65c184..bbb797f 100644 --- a/src/wandbot/api/app.py +++ b/src/wandbot/api/app.py @@ -1,3 +1,33 @@ +"""This module serves as the main server API for the wandbot application. + +It imports and uses the FastAPI framework to define the API and initialize application event handlers like "startup". +Also, the module includes Python's built-in asyncio library for managing asynchronous tasks related to database backup. + +The API includes: +- APICreateChatThreadRequest +- APIFeedbackRequest +- APIFeedbackResponse +- APIGetChatThreadResponse +- APIQueryRequest +- APIQueryResponse +- APIQuestionAnswerRequest +- APIQuestionAnswerResponse + +Following classes and their functionalities: +- Chat: Main chat handling class, initialized during startup. +- ChatConfig: Configuration utility for chat. +- ChatRequest: Schema to handle requests made to the chat. + +It also sets up and interacts with the database through: +- DatabaseClient: A utility to interact with the database. +- Base.metadata.create_all(bind=engine): Creates database tables based on the metadata. + +The server runs periodic backup of the data to wandb using the backup_db method which runs as a coroutine. +The backup data is transformed into a Pandas DataFrame and saved as a wandb.Table. + +It uses logger from the utils module for logging purposes. +""" + import asyncio from datetime import datetime @@ -33,19 +63,39 @@ async def backup_db(): + """Periodically backs up the database to a table. + + This function runs periodically and retrieves all question-answer threads from the database since the last backup. + It then creates a pandas DataFrame from the retrieved threads and logs it to a table using Weights & Biases. + The last backup timestamp is updated after each backup. + + Returns: + None + """ global last_backup - while True: # code to run periodically starts here + while True: chat_threads = db_client.get_all_question_answers(last_backup) if chat_threads is not None: - chat_table = pd.DataFrame([chat_thread for chat_thread in chat_threads]) + chat_table = pd.DataFrame( + [chat_thread for chat_thread in chat_threads] + ) last_backup = datetime.now() logger.info(f"Backing up database to Table at {last_backup}") - wandb.log({"question_answers_db": wandb.Table(dataframe=chat_table)}) + wandb.log( + {"question_answers_db": wandb.Table(dataframe=chat_table)} + ) await asyncio.sleep(600) @app.on_event("startup") def startup_event(): + """Handles the startup event. + + This function initializes the chat and database client objects and creates a task to backup the database. + + Returns: + None + """ global chat, db_client chat = Chat(ChatConfig()) db_client = DatabaseClient() @@ -60,6 +110,15 @@ def startup_event(): async def create_question_answer( request: APIQuestionAnswerRequest, response: Response ) -> APIQuestionAnswerResponse | None: + """Creates a question answer. + + Args: + request: The request object containing the question answer data. + response: The response object to update with the result. + + Returns: + The created question answer or None if creation failed. + """ question_answer = db_client.create_question_answer(request) if question_answer is None: response.status_code = status.HTTP_400_BAD_REQUEST @@ -71,7 +130,21 @@ async def create_question_answer( response_model=APIGetChatThreadResponse | None, status_code=status.HTTP_200_OK, ) -async def get_chat_thread(application: str, thread_id: str, response: Response) -> APIGetChatThreadResponse: +async def get_chat_thread( + application: str, thread_id: str, response: Response +) -> APIGetChatThreadResponse: + """Retrieves a chat thread from the database. + + If the chat thread does not exist, it creates a new chat thread. + + Args: + application: The application name. + thread_id: The ID of the chat thread. + response: The HTTP response object. + + Returns: + The retrieved or created chat thread. + """ chat_thread = db_client.get_chat_thread( application=application, thread_id=thread_id, @@ -89,12 +162,24 @@ async def get_chat_thread(application: str, thread_id: str, response: Response) return chat_thread -@app.post("/query", response_model=APIQueryResponse, status_code=status.HTTP_200_OK) +@app.post( + "/query", response_model=APIQueryResponse, status_code=status.HTTP_200_OK +) async def query( request: APIQueryRequest, ) -> APIQueryResponse: + """Executes a query using the chat function and returns the result as an APIQueryResponse. + + Args: + request: The APIQueryRequest object containing the question and chat history. + + Returns: + The APIQueryResponse object containing the result of the query. + """ result = chat( - ChatRequest(question=request.question, chat_history=request.chat_history), + ChatRequest( + question=request.question, chat_history=request.chat_history + ), ) result = APIQueryResponse(**result.model_dump()) @@ -106,7 +191,18 @@ async def query( response_model=APIFeedbackResponse | None, status_code=status.HTTP_201_CREATED, ) -async def feedback(request: APIFeedbackRequest, response: Response) -> APIFeedbackResponse: +async def feedback( + request: APIFeedbackRequest, response: Response +) -> APIFeedbackResponse: + """Handles the feedback request and logs the feedback data. + + Args: + request: The feedback request object. + response: The response object. + + Returns: + The feedback response object. + """ feedback_response = db_client.create_feedback(request) if feedback_response is not None: wandb.log( @@ -124,6 +220,11 @@ async def feedback(request: APIFeedbackRequest, response: Response) -> APIFeedba @app.on_event("shutdown") def shutdown_event(): + """Finish the current run if wandb.run is not None. + + Returns: + None + """ if wandb.run is not None: wandb.run.finish() diff --git a/src/wandbot/api/client.py b/src/wandbot/api/client.py index 3f6dca1..18e7d15 100644 --- a/src/wandbot/api/client.py +++ b/src/wandbot/api/client.py @@ -1,3 +1,13 @@ +"""A client for interacting with the API. + +This module provides a client class for interacting with the API, including +getting chat history, creating question answers, creating feedback, and querying. + +Classes: + APIClient: Client for interacting with the API. + AsyncAPIClient: Asynchronous client for interacting with the API. +""" + import json import uuid from datetime import datetime @@ -21,16 +31,49 @@ class APIClient: + """Client for interacting with the API. + + This class provides methods for interacting with the API, including + getting chat history, creating question answers, creating feedback, + and querying. + + Attributes: + url: The base URL for the API. + query_endpoint: The endpoint for querying. + feedback_endpoint: The endpoint for feedback. + chat_thread_endpoint: The endpoint for chat threads. + chat_question_answer_endpoint: The endpoint for question answers. + """ + def __init__(self, url: str): + """Initializes the API client with the given URL. + + Args: + url: The base URL for the API. + """ self.url = url self.query_endpoint = urljoin(str(self.url), "query") self.feedback_endpoint = urljoin(str(self.url), "feedback") self.chat_thread_endpoint = urljoin(str(self.url), "chat_thread") - self.chat_question_answer_endpoint = urljoin(str(self.url), "question_answer") + self.chat_question_answer_endpoint = urljoin( + str(self.url), "question_answer" + ) - def _get_chat_thread(self, request: APIGetChatThreadRequest) -> APIGetChatThreadResponse | None: + def _get_chat_thread( + self, request: APIGetChatThreadRequest + ) -> APIGetChatThreadResponse | None: + """Gets a chat thread from the API. + + Args: + request: The request object containing the application and thread ID. + + Returns: + The response from the API, or None if the status code is not 200 or 201. + """ with requests.Session() as session: - with session.get(f"{self.chat_thread_endpoint}/{request.application}/{request.thread_id}") as response: + with session.get( + f"{self.chat_thread_endpoint}/{request.application}/{request.thread_id}" + ) as response: if response.status_code in (200, 201): return APIGetChatThreadResponse(**response.json()) @@ -39,6 +82,15 @@ def get_chat_history( application: str, thread_id: str, ) -> List[QuestionAnswer] | None: + """Gets the chat history for a given application and thread ID. + + Args: + application: The application to get the chat history for. + thread_id: The thread ID to get the chat history for. + + Returns: + The chat history, or None if the response from the API is None. + """ request = APIGetChatThreadRequest( application=application, thread_id=thread_id, @@ -49,9 +101,22 @@ def get_chat_history( else: return response.question_answers - def _create_question_answer(self, request: APIQuestionAnswerRequest) -> APIQuestionAnswerResponse | None: + def _create_question_answer( + self, request: APIQuestionAnswerRequest + ) -> APIQuestionAnswerResponse | None: + """Creates a question answer in the API. + + Args: + request: The request object containing the question answer data. + + Returns: + The response from the API, or None if the status code is not 201. + """ with requests.Session() as session: - with session.post(self.chat_question_answer_endpoint, json=json.loads(request.json())) as response: + with session.post( + self.chat_question_answer_endpoint, + json=json.loads(request.json()), + ) as response: if response.status_code == 201: return APIQuestionAnswerResponse(**response.json()) @@ -74,6 +139,29 @@ def create_question_answer( start_time: datetime | None = None, end_time: datetime | None = None, ) -> APIQuestionAnswerResponse | None: + """Creates a question answer in the API. + + Args: + question_answer_id: The ID of the question answer. + thread_id: The ID of the thread. + question: The question. + system_prompt: The system prompt. + answer: The answer. + model: The model. + sources: The sources. + source_documents: The source documents. + total_tokens: The total number of tokens. + prompt_tokens: The number of prompt tokens. + completion_tokens: The number of completion tokens. + successful_requests: The number of successful requests. + total_cost: The total cost. + time_taken: The time taken. + start_time: The start time. + end_time: The end time. + + Returns: + The response from the API. + """ request = APIQuestionAnswerRequest( question_answer_id=question_answer_id, thread_id=thread_id, @@ -95,13 +183,37 @@ def create_question_answer( response = self._create_question_answer(request) return response - def _create_feedback(self, request: APIFeedbackRequest) -> APIFeedbackResponse | None: + def _create_feedback( + self, request: APIFeedbackRequest + ) -> APIFeedbackResponse | None: + """Creates feedback in the API. + + Args: + request: The request object containing the feedback data. + + Returns: + The response from the API, or None if the status code is not 201. + """ with requests.Session() as session: - with session.post(self.feedback_endpoint, json=request.dict()) as response: + with session.post( + self.feedback_endpoint, json=request.dict() + ) as response: if response.status_code == 201: return APIFeedbackResponse(**response.json()) - def create_feedback(self, feedback_id: str, question_answer_id: str, rating: int): + def create_feedback( + self, feedback_id: str, question_answer_id: str, rating: int + ): + """Creates feedback in the API. + + Args: + feedback_id: The ID of the feedback. + question_answer_id: The ID of the question answer. + rating: The rating. + + Returns: + The response from the API. + """ feedback_request = APIFeedbackRequest( feedback_id=feedback_id, question_answer_id=question_answer_id, @@ -111,6 +223,14 @@ def create_feedback(self, feedback_id: str, question_answer_id: str, rating: int return response def _query(self, request: APIQueryRequest) -> APIQueryResponse | None: + """Queries the API. + + Args: + request: The request object containing the query data. + + Returns: + The response from the API, or None if the status code is not 200. + """ with requests.Session() as session: payload = json.loads(request.json()) with session.post(self.query_endpoint, json=payload) as response: @@ -124,6 +244,15 @@ def query( question: str, chat_history: Optional[List[QuestionAnswer]] = None, ) -> APIQueryResponse: + """Queries the API. + + Args: + question: The question to query. + chat_history: The chat history. + + Returns: + The response from the API. + """ request = APIQueryRequest( question=question, chat_history=chat_history, @@ -134,10 +263,32 @@ def query( class AsyncAPIClient(APIClient): + """Client for interacting with the API asynchronously. + + This class provides methods for interacting with the API, including + getting chat history, creating question answers, creating feedback, + and querying. All methods are asynchronous. + """ + def __init__(self, url: str): + """Initializes the AsyncAPIClient instance with a given URL. + + Args: + url: The URL of the API to interact with. + """ super().__init__(url) - async def _get_chat_thread(self, request: APIGetChatThreadRequest) -> APIGetChatThreadResponse | None: + async def _get_chat_thread( + self, request: APIGetChatThreadRequest + ) -> APIGetChatThreadResponse | None: + """Private method to get a chat thread from the API. + + Args: + request: The request object containing the application and thread ID. + + Returns: + The response from the API, or None if the status code is not 200 or 201. + """ async with aiohttp.ClientSession() as session: async with session.get( f"{self.chat_thread_endpoint}/{request.application}/{request.thread_id}" @@ -146,7 +297,18 @@ async def _get_chat_thread(self, request: APIGetChatThreadRequest) -> APIGetChat response = await response.json() return APIGetChatThreadResponse(**response) - async def get_chat_history(self, application: str, thread_id: str) -> List[QuestionAnswer] | None: + async def get_chat_history( + self, application: str, thread_id: str + ) -> List[QuestionAnswer] | None: + """Gets the chat history for a given application and thread ID. + + Args: + application: The application to get the chat history for. + thread_id: The thread ID to get the chat history for. + + Returns: + The chat history as a list of QuestionAnswer objects, or None if no chat history is found. + """ request = APIGetChatThreadRequest( application=application, thread_id=thread_id, @@ -157,9 +319,22 @@ async def get_chat_history(self, application: str, thread_id: str) -> List[Quest else: return response.question_answers - async def _create_question_answer(self, request: APIQuestionAnswerRequest) -> APIQuestionAnswerResponse | None: + async def _create_question_answer( + self, request: APIQuestionAnswerRequest + ) -> APIQuestionAnswerResponse | None: + """Private method to create a question answer in the API. + + Args: + request: The request object containing the question answer data. + + Returns: + The response from the API, or None if the status code is not 201. + """ async with aiohttp.ClientSession() as session: - async with session.post(self.chat_question_answer_endpoint, json=json.loads(request.json())) as response: + async with session.post( + self.chat_question_answer_endpoint, + json=json.loads(request.json()), + ) as response: if response.status == 201: response = await response.json() return APIQuestionAnswerResponse(**response) @@ -183,6 +358,29 @@ async def create_question_answer( start_time: datetime | None = None, end_time: datetime | None = None, ) -> APIQuestionAnswerResponse | None: + """Creates a question answer in the API. + + Args: + question_answer_id: The ID of the question answer. + thread_id: The ID of the thread. + question: The question. + system_prompt: The system prompt. + answer: The answer. + model: The model used. + sources: The sources used. + source_documents: The source documents used. + total_tokens: The total number of tokens used. + prompt_tokens: The number of prompt tokens used. + completion_tokens: The number of completion tokens used. + successful_requests: The number of successful requests. + total_cost: The total cost. + time_taken: The time taken. + start_time: The start time. + end_time: The end time. + + Returns: + The response from the API, or None if the status code is not 201. + """ request = APIQuestionAnswerRequest( question_answer_id=question_answer_id, thread_id=thread_id, @@ -204,14 +402,38 @@ async def create_question_answer( response = await self._create_question_answer(request) return response - async def _create_feedback(self, request: APIFeedbackRequest) -> APIFeedbackResponse: + async def _create_feedback( + self, request: APIFeedbackRequest + ) -> APIFeedbackResponse: + """Private method to create feedback in the API. + + Args: + request: The request object containing the feedback data. + + Returns: + The response from the API. + """ async with aiohttp.ClientSession() as session: - async with session.post(self.feedback_endpoint, json=json.loads(request.json())) as response: + async with session.post( + self.feedback_endpoint, json=json.loads(request.json()) + ) as response: if response.status == 201: response = await response.json() return APIFeedbackResponse(**response) - async def create_feedback(self, feedback_id: str, question_answer_id: str, rating: int): + async def create_feedback( + self, feedback_id: str, question_answer_id: str, rating: int + ): + """Creates feedback in the API. + + Args: + feedback_id: The ID of the feedback. + question_answer_id: The ID of the question answer the feedback is for. + rating: The rating of the feedback. + + Returns: + The response from the API. + """ request = APIFeedbackRequest( feedback_id=feedback_id, question_answer_id=question_answer_id, @@ -221,8 +443,18 @@ async def create_feedback(self, feedback_id: str, question_answer_id: str, ratin return response async def _query(self, request: APIQueryRequest) -> APIQueryResponse | None: + """Private method to query the API. + + Args: + request: The request object containing the query data. + + Returns: + The response from the API, or None if the status code is not 200. + """ async with aiohttp.ClientSession() as session: - async with session.post(self.query_endpoint, json=json.loads(request.json())) as response: + async with session.post( + self.query_endpoint, json=json.loads(request.json()) + ) as response: if response.status == 200: response = await response.json() return APIQueryResponse(**response) @@ -234,6 +466,15 @@ async def query( question: str, chat_history: List[QuestionAnswer] = None, ) -> APIQueryResponse: + """Queries the API. + + Args: + question: The question to query. + chat_history: The chat history. + + Returns: + The response from the API. + """ request = APIQueryRequest( question=question, chat_history=chat_history, @@ -255,17 +496,26 @@ async def run(): application = "test" # thread_id = str(uuid.uuid4()) thread_id = "300d9a8c-ea55-4bb1-94e6-d3e3ed2df8bd" - chat_history = await api_client.get_chat_history(application=application, thread_id=thread_id) + chat_history = await api_client.get_chat_history( + application=application, thread_id=thread_id + ) if not chat_history: print("No chat history found") else: - print(json.dumps([json.loads(item.json()) for item in chat_history], indent=2)) + print( + json.dumps( + [json.loads(item.json()) for item in chat_history], + indent=2, + ) + ) # chat_history = [(item.question, item.answer) for item in chat_history] # query the api and get the chat response question = "Hi @wandbot, How about openai?" - chat_response = await api_client.query(question=question, chat_history=chat_history) + chat_response = await api_client.query( + question=question, chat_history=chat_history + ) # save the chat response to the database question_answer_id = str(uuid.uuid4()) @@ -276,16 +526,32 @@ async def run(): ) # get the chat history again - chat_history = await api_client.get_chat_history(application=application, thread_id=thread_id) - print(json.dumps([json.loads(item.json()) for item in chat_history], indent=2)) + chat_history = await api_client.get_chat_history( + application=application, thread_id=thread_id + ) + print( + json.dumps( + [json.loads(item.json()) for item in chat_history], indent=2 + ) + ) # add feedback feedback_id = str(uuid.uuid4()) - await api_client.create_feedback(feedback_id=feedback_id, question_answer_id=question_answer_id, rating=1) + await api_client.create_feedback( + feedback_id=feedback_id, + question_answer_id=question_answer_id, + rating=1, + ) # get the chat history again - chat_history = await api_client.get_chat_history(application=application, thread_id=thread_id) - print(json.dumps([json.loads(item.json()) for item in chat_history], indent=2)) + chat_history = await api_client.get_chat_history( + application=application, thread_id=thread_id + ) + print( + json.dumps( + [json.loads(item.json()) for item in chat_history], indent=2 + ) + ) print(timer.start, timer.start, timer.elapsed) asyncio.run(run()) diff --git a/src/wandbot/api/schemas.py b/src/wandbot/api/schemas.py index 0bb5b20..13a67c1 100644 --- a/src/wandbot/api/schemas.py +++ b/src/wandbot/api/schemas.py @@ -1,3 +1,21 @@ +"""A module for API schemas. + +This module provides the schemas for API requests and responses. +It includes classes for creating question answers, getting chat threads, +creating chat threads, querying, creating feedback, and more. + +Classes: + APIQuestionAnswerRequest: Request schema for creating a question answer. + APIQuestionAnswerResponse: Response schema for a question answer. + APIGetChatThreadRequest: Request schema for getting a chat thread. + APIGetChatThreadResponse: Response schema for a chat thread. + APICreateChatThreadRequest: Request schema for creating a chat thread. + APIQueryRequest: Request schema for querying. + APIQueryResponse: Response schema for a query. + APIFeedbackRequest: Request schema for creating feedback. + APIFeedbackResponse: Response schema for feedback. +""" + from wandbot.chat.schemas import ChatRepsonse, ChatRequest from wandbot.database.schemas import ( ChatThread, diff --git a/src/wandbot/apps/discord/__main__.py b/src/wandbot/apps/discord/__main__.py index 9fe9022..66e974a 100644 --- a/src/wandbot/apps/discord/__main__.py +++ b/src/wandbot/apps/discord/__main__.py @@ -1,15 +1,21 @@ +"""Discord bot for handling user queries and interacting with an API. + +This module contains the main functionality for a Discord bot that listens to user messages, +detects the language of the message, creates threads for user queries, interacts with an API to get responses, +formats the responses, and sends them back to the user. It also handles user feedback on the bot's responses. + +""" import asyncio import logging import uuid -from collections import OrderedDict import discord import langdetect from discord.ext import commands from wandbot.api.client import AsyncAPIClient -from wandbot.api.schemas import APIQueryResponse from wandbot.apps.discord.config import DiscordAppConfig +from wandbot.apps.utils import format_response logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) @@ -25,44 +31,16 @@ api_client = AsyncAPIClient(url=config.WANDBOT_API_URL) -def deduplicate(input_list): - return list(OrderedDict.fromkeys(input_list)) - - -def format_response( - response: APIQueryResponse | None, outro_message: str = "", lang: str = "en", is_last=False -) -> str: - if response is not None: - result = response.answer - if "gpt-4" not in response.model: - if lang == "ja": - warning_message = f"*警告: {response.model}* にフォールバックします。これらの結果は *gpt-4* ほど良くない可能性があります*" - else: - warning_message = ( - f"*Warning: Falling back to {response.model}*, These results may nor be as good as " f"*gpt-4*\n\n" - ) - result = warning_message + response.answer - - if config.include_sources and response.sources and is_last: - sources_list = deduplicate( - [item for item in response.sources.split(",") if item.strip().startswith("http")] - ) - if len(sources_list) > 0: - items = min(len(sources_list), 3) - if lang == "ja": - result = f"{result}\n\n*参考文献*\n\n>" + "\n> ".join(sources_list[:items]) + "\n\n" - else: - result = f"{result}\n\n*References*\n\n>" + "\n> ".join(sources_list[:items]) + "\n\n" - if outro_message: - result = f"{result}\n\n{outro_message}" - - else: - result = config.ERROR_MESSAGE - return result - - @bot.event async def on_message(message: discord.Message): + """Handles the on_message event in Discord. + + Args: + message: The message object received. + + Returns: + None + """ if message.author == bot.user: return if bot.user is not None and bot.user.mentioned_in(message): @@ -95,7 +73,10 @@ async def on_message(message: discord.Message): chat_history = None if not chat_history: if lang_code == "ja": - await thread.send(f"🤖 {mention}: {config.JA_INTRO_MESSAGE}", mention_author=True) + await thread.send( + f"🤖 {mention}: {config.JA_INTRO_MESSAGE}", + mention_author=True, + ) else: await thread.send( f"🤖 Hi {mention}: {config.EN_INTRO_MESSAGE}", @@ -108,9 +89,15 @@ async def on_message(message: discord.Message): ) if response is None: if lang_code == "ja": - await thread.send(f"🤖 {mention}: {config.JA_ERROR_MESSAGE}", mention_author=True) + await thread.send( + f"🤖 {mention}: {config.JA_ERROR_MESSAGE}", + mention_author=True, + ) else: - await thread.send(f"🤖 {mention}: {config.EN_ERROR_MESSAGE}", mention_author=True) + await thread.send( + f"🤖 {mention}: {config.EN_ERROR_MESSAGE}", + mention_author=True, + ) return if lang_code == "ja": outro_message = config.JA_OUTRO_MESSAGE @@ -126,15 +113,31 @@ async def on_message(message: discord.Message): response_copy.answer = answer_chunk if i == len(answer_chunks) - 1: sent_message = await thread.send( - format_response(response_copy, outro_message, lang_code, is_last=True), + format_response( + config, + response_copy, + outro_message, + lang_code, + ), ) else: sent_message = await thread.send( - format_response(response_copy, "", lang_code), + format_response( + config, + response_copy, + "", + lang_code, + is_last=False, + ), ) else: sent_message = await thread.send( - format_response(response, outro_message, lang_code, is_last=True), + format_response( + config, + response, + outro_message, + lang_code, + ), ) if sent_message is not None: await api_client.create_question_answer( @@ -148,10 +151,15 @@ async def on_message(message: discord.Message): # # Wait for reactions def check(reaction, user): - return user == message.author and str(reaction.emoji) in ["👍", "👎"] + return user == message.author and str(reaction.emoji) in [ + "👍", + "👎", + ] try: - reaction, user = await bot.wait_for("reaction_add", timeout=config.WAIT_TIME, check=check) + reaction, user = await bot.wait_for( + "reaction_add", timeout=config.WAIT_TIME, check=check + ) except asyncio.TimeoutError: # await thread.send("🤖") diff --git a/src/wandbot/apps/discord/config.py b/src/wandbot/apps/discord/config.py index 44dd556..a6c837c 100644 --- a/src/wandbot/apps/discord/config.py +++ b/src/wandbot/apps/discord/config.py @@ -1,3 +1,20 @@ +"""Discord bot configuration module. + +This module contains the configuration settings for the Discord bot application. +It includes settings for the application name, wait time, channel IDs, bot token, +API keys, messages in English and Japanese, API URL, and a flag to include sources. + +The settings are defined in the DiscordAppConfig class, which inherits from the +BaseSettings class provided by the pydantic_settings package. The settings values +are either hardcoded or fetched from environment variables. + +Typical usage example: + + config = DiscordAppConfig() + wait_time = config.WAIT_TIME + bot_token = config.DISCORD_BOT_TOKEN +""" + from pydantic import AnyHttpUrl, Field from pydantic_settings import BaseSettings @@ -13,9 +30,14 @@ f" Was this response helpful? Please react below to let us know" ) -EN_ERROR_MESSAGE = "Oops!, Something went wrong. Please retry again in some time" +EN_ERROR_MESSAGE = ( + "Oops!, Something went wrong. Please retry again in some time" +) -JA_INTRO_MESSAGE = "Wandbotは現在アルファテスト中ですので、頻繁にアップデートされます。" "ご利用の際にはプライバシーに関わる情報は入力されないようお願いします。返答を生成しています・・・" +JA_INTRO_MESSAGE = ( + "Wandbotは現在アルファテスト中ですので、頻繁にアップデートされます。" + "ご利用の際にはプライバシーに関わる情報は入力されないようお願いします。返答を生成しています・・・" +) JA_OUTRO_MESSAGE = ( ":robot_face: この答えが十分でなかった場合には、質問を少し変えて試してみると結果が良くなることがあるので、お試しください。もしくは、" diff --git a/src/wandbot/apps/slack/__main__.py b/src/wandbot/apps/slack/__main__.py index 7848a02..607e6df 100644 --- a/src/wandbot/apps/slack/__main__.py +++ b/src/wandbot/apps/slack/__main__.py @@ -1,4 +1,15 @@ -from collections import OrderedDict +"""A Slack bot that interacts with users and processes their queries. + +This module contains the main functionality of the Slack bot. It listens for mentions of the bot in messages, +processes the text of the message, and sends a response. It also handles reactions added to messages and +saves them as feedback. The bot supports both English and Japanese languages. + +The bot uses the Slack Bolt framework for handling events and the langdetect library for language detection. +It also communicates with an external API for processing queries and storing chat history and feedback. + +""" + +import logging from functools import partial import langdetect @@ -6,8 +17,8 @@ from slack_bolt.adapter.socket_mode import SocketModeHandler from wandbot.api.client import APIClient -from wandbot.api.schemas import APIQueryResponse from wandbot.apps.slack.config import SlackAppConfig +from wandbot.apps.utils import format_response from wandbot.utils import get_logger logger = get_logger(__name__) @@ -17,44 +28,7 @@ api_client = APIClient(url=config.WANDBOT_API_URL) -def deduplicate(input_list): - return list(OrderedDict.fromkeys(input_list)) - - -def format_response(response: APIQueryResponse | None, outro_message: str = "", lang: str = "en") -> str: - if response is not None: - result = response.answer - if "gpt-4" not in response.model: - if lang == "ja": - warning_message = f"*警告: {response.model}* にフォールバックします。これらの結果は *gpt-4* ほど良くない可能性があります*" - else: - warning_message = ( - f"*Warning: Falling back to {response.model}*, These results may nor be as good as " f"*gpt-4*\n\n" - ) - result = warning_message + response.answer - - if config.include_sources and response.sources: - sources_list = deduplicate( - [item for item in response.sources.split(",") if item.strip().startswith("http")] - ) - if len(sources_list) > 0: - items = min(len(sources_list), 3) - if lang == "ja": - result = f"{result}\n\n*参考文献*\n\n>" + "\n> ".join(sources_list[:items]) + "\n\n" - else: - result = f"{result}\n\n*References*\n\n>" + "\n> ".join(sources_list[:items]) + "\n\n" - if outro_message: - result = f"{result}\n\n{outro_message}" - - else: - if lang == "ja": - result = config.JA_ERROR_MESSAGE - else: - result = config.EN_ERROR_MESSAGE - return result - - -def send_message(say, message, thread=None): +def send_message(say: callable, message: str, thread: str = None) -> None: if thread is not None: return say(text=message, thread_ts=thread) else: @@ -62,15 +36,30 @@ def send_message(say, message, thread=None): @app.event("app_mention") -def command_handler(body, say, logger): +def command_handler(body: dict, say: callable, logger: logging.Logger) -> None: + """ + Handles the command when the app is mentioned in a message. + + Args: + body (dict): The event body containing the message details. + say (function): The function to send a message. + logger (Logger): The logger instance for logging errors. + + Raises: + Exception: If there is an error posting the message. + """ try: query = body["event"].get("text") lang_code = langdetect.detect(query) user = body["event"].get("user") - thread_id = body["event"].get("thread_ts", None) or body["event"].get("ts", None) + thread_id = body["event"].get("thread_ts", None) or body["event"].get( + "ts", None + ) say = partial(say, token=config.SLACK_BOT_TOKEN) - chat_history = api_client.get_chat_history(application=config.APPLICATION, thread_id=thread_id) + chat_history = api_client.get_chat_history( + application=config.APPLICATION, thread_id=thread_id + ) if not chat_history: # send out the intro message @@ -87,11 +76,17 @@ def command_handler(body, say, logger): thread=thread_id, ) # process the query through the api - api_response = api_client.query(question=query, chat_history=chat_history) + api_response = api_client.query( + question=query, chat_history=chat_history + ) if lang_code == "ja": - response = format_response(api_response, config.JA_OUTRO_MESSAGE, lang_code) + response = format_response( + config, api_response, config.JA_OUTRO_MESSAGE, lang_code + ) else: - response = format_response(api_response, config.EN_OUTRO_MESSAGE) + response = format_response( + config, api_response, config.EN_OUTRO_MESSAGE + ) # send the response sent_message = send_message(say=say, message=response, thread=thread_id) @@ -120,7 +115,16 @@ def command_handler(body, say, logger): logger.error(f"Error posting message: {e}") -def parse_reaction(reaction: str): +def parse_reaction(reaction: str) -> int: + """ + Parses the reaction and returns the corresponding rating value. + + Args: + reaction (str): The reaction emoji. + + Returns: + int: The rating value (-1, 0, or 1). + """ if reaction == "+1": return 1 elif reaction == "-1": @@ -130,7 +134,14 @@ def parse_reaction(reaction: str): @app.event("reaction_added") -def handle_reaction_added(event, say): +def handle_reaction_added(event: dict, say: callable) -> None: + """ + Handles the event when a reaction is added to a message. + + Args: + event (dict): The event details. + say (callable): The function to send a message. + """ channel_id = event["item"]["channel"] message_ts = event["item"]["ts"] diff --git a/src/wandbot/apps/slack/config.py b/src/wandbot/apps/slack/config.py index e91b4b8..26350af 100644 --- a/src/wandbot/apps/slack/config.py +++ b/src/wandbot/apps/slack/config.py @@ -1,3 +1,17 @@ +"""This module contains the configuration settings for the Slack application. + +This module uses the Pydantic library to define the configuration settings for the Slack application. +These settings include tokens, secrets, API keys, and messages for the application. +The settings are loaded from an environment file and can be accessed as properties of the `SlackAppConfig` class. + +Typical usage example: + + from .config import SlackAppConfig + + config = SlackAppConfig() + token = config.SLACK_APP_TOKEN +""" + from pydantic import AnyHttpUrl, Field from pydantic_settings import BaseSettings @@ -13,9 +27,14 @@ f" Was this response helpful? Please react below to let us know" ) -EN_ERROR_MESSAGE = "Oops!, Something went wrong. Please retry again in some time" +EN_ERROR_MESSAGE = ( + "Oops!, Something went wrong. Please retry again in some time" +) -JA_INTRO_MESSAGE = "Wandbotは現在アルファテスト中ですので、頻繁にアップデートされます。" "ご利用の際にはプライバシーに関わる情報は入力されないようお願いします。返答を生成しています・・・" +JA_INTRO_MESSAGE = ( + "Wandbotは現在アルファテスト中ですので、頻繁にアップデートされます。" + "ご利用の際にはプライバシーに関わる情報は入力されないようお願いします。返答を生成しています・・・" +) JA_OUTRO_MESSAGE = ( ":robot_face: この答えが十分でなかった場合には、質問を少し変えて試してみると結果が良くなることがあるので、お試しください。もしくは、" diff --git a/src/wandbot/apps/utils.py b/src/wandbot/apps/utils.py new file mode 100644 index 0000000..111ce70 --- /dev/null +++ b/src/wandbot/apps/utils.py @@ -0,0 +1,94 @@ +"""This module contains utility functions for the Wandbot application. + +This module provides two main functions: `deduplicate` and `format_response`. +The `deduplicate` function is used to remove duplicates from a list while preserving the order. +The `format_response` function is used to format the response from the API query for the application + +Typical usage example: + + from .utils import deduplicate, format_response + + unique_list = deduplicate(input_list) + formatted_response = format_response(config, response, outro_message, lang, is_last) +""" + +from collections import OrderedDict +from typing import Any, List + +from pydantic_settings import BaseSettings + +from wandbot.api.schemas import APIQueryResponse + + +def deduplicate(input_list: List[Any]) -> List[Any]: + """Remove duplicates from a list while preserving order. + + Args: + input_list: The list to remove duplicates from. + + Returns: + A new list with duplicates removed while preserving the original order. + """ + return list(OrderedDict.fromkeys(input_list)) + + +def format_response( + config: BaseSettings, + response: APIQueryResponse | None, + outro_message: str = "", + lang: str = "en", + is_last: bool = True, +) -> str: + """Formats the response from the API query. + + Args: + :param config: The config object for the app. + response: The response from the API query. + outro_message: The outro message to append to the formatted response. + lang: The language of the response. + is_last: Whether the response is the last in a series. + + Returns: + The formatted response as a string. + + """ + if response is not None: + result = response.answer + if "gpt-4" not in response.model: + if lang == "ja": + warning_message = f"*警告: {response.model}* にフォールバックします。これらの結果は *gpt-4* ほど良くない可能性があります*" + else: + warning_message = ( + f"*Warning: Falling back to {response.model}*, These results may nor be as good as " + f"*gpt-4*\n\n" + ) + result = warning_message + response.answer + + if config.include_sources and response.sources and is_last: + sources_list = deduplicate( + [ + item + for item in response.sources.split(",") + if item.strip().startswith("http") + ] + ) + if len(sources_list) > 0: + items = min(len(sources_list), 3) + if lang == "ja": + result = ( + f"{result}\n\n*参考文献*\n\n>" + + "\n> ".join(sources_list[:items]) + + "\n\n" + ) + else: + result = ( + f"{result}\n\n*References*\n\n>" + + "\n> ".join(sources_list[:items]) + + "\n\n" + ) + if outro_message: + result = f"{result}\n\n{outro_message}" + + else: + result = config.ERROR_MESSAGE + return result diff --git a/src/wandbot/chat/chat.py b/src/wandbot/chat/chat.py index 41c86ab..9464cbc 100644 --- a/src/wandbot/chat/chat.py +++ b/src/wandbot/chat/chat.py @@ -1,11 +1,42 @@ +"""Handles chat interactions for WandBot. + +This module contains the Chat class which is responsible for handling chat interactions. +It includes methods for initializing the chat, loading the storage context from an artifact, +loading the chat engine, validating and formatting questions, formatting responses, and getting answers. +It also contains a function for generating a list of chat messages from a given chat history. + +Typical usage example: + + config = ChatConfig() + chat = Chat(config=config) + chat_history = [] + while True: + question = input("You: ") + if question.lower() == "quit": + break + else: + response = chat( + ChatRequest(question=question, chat_history=chat_history) + ) + chat_history.append( + QuestionAnswer(question=question, answer=response.answer) + ) + print(f"WandBot: {response.answer}") + print(f"Time taken: {response.time_taken}") +""" + import json from typing import Any, Dict, List, Optional import tiktoken import wandb from llama_index import StorageContext, load_index_from_storage -from llama_index.callbacks import CallbackManager, TokenCountingHandler, WandbCallbackHandler -from llama_index.chat_engine.types import ChatMode +from llama_index.callbacks import ( + CallbackManager, + TokenCountingHandler, + WandbCallbackHandler, +) +from llama_index.chat_engine.types import BaseChatEngine, ChatMode from llama_index.indices.postprocessor import CohereRerank from llama_index.llms import ChatMessage, MessageRole from llama_index.vector_stores import FaissVectorStore @@ -22,13 +53,29 @@ def get_chat_history( chat_history: List[QuestionAnswer] | None, ) -> Optional[List[ChatMessage]]: + """Generates a list of chat messages from a given chat history. + + This function takes a list of QuestionAnswer objects and transforms them into a list of ChatMessage objects. + Each QuestionAnswer object is split into two ChatMessage objects: one for the user's question and one for the assistant's answer. + If the chat history is empty or None, the function returns None. + + Args: + chat_history: A list of QuestionAnswer objects representing the history of a chat. Each QuestionAnswer object contains a question from the user and an answer from the assistant. + + Returns: + A list of ChatMessage objects representing the chat history. Each ChatMessage object has a role (either 'USER' or 'ASSISTANT') and content (the question or answer text). If the chat history is empty or None, the function returns None. + """ if not chat_history: return None else: messages = [ [ - ChatMessage(role=MessageRole.USER, content=question_answer.question), - ChatMessage(role=MessageRole.ASSISTANT, content=question_answer.answer), + ChatMessage( + role=MessageRole.USER, content=question_answer.question + ), + ChatMessage( + role=MessageRole.ASSISTANT, content=question_answer.answer + ), ] for question_answer in chat_history ] @@ -36,7 +83,28 @@ def get_chat_history( class Chat: + """Class for handling chat interactions. + + Attributes: + config: An instance of ChatConfig containing configuration settings. + run: An instance of wandb.Run for logging experiment information. + tokenizer: An instance of tiktoken.Tokenizer for encoding text. + storage_context: An instance of StorageContext for managing storage. + index: An instance of Index for storing and retrieving vectors. + wandb_callback: An instance of WandbCallbackHandler for handling Wandb callbacks. + token_counter: An instance of TokenCountingHandler for counting tokens. + callback_manager: An instance of CallbackManager for managing callbacks. + qa_prompt: A string representing the chat prompt. + chat_engine: An instance of ChatEngine for generating chat responses. + fallback_chat_engine: An instance of ChatEngine for fallback chat responses. + """ + def __init__(self, config: ChatConfig): + """Initializes the Chat instance. + + Args: + config: An instance of ChatConfig containing configuration settings. + """ self.config = config self.run = wandb.init( project=self.config.wandb_project, @@ -45,12 +113,18 @@ def __init__(self, config: ChatConfig): ) self.tokenizer = tiktoken.get_encoding("cl100k_base") - self.storage_context = self.load_storage_context_from_artifact(artifact_url=self.config.index_artifact) + self.storage_context = self.load_storage_context_from_artifact( + artifact_url=self.config.index_artifact + ) self.index = load_index_from_storage(self.storage_context) self.wandb_callback = WandbCallbackHandler() - self.token_counter = TokenCountingHandler(tokenizer=self.tokenizer.encode) - self.callback_manager = CallbackManager([self.wandb_callback, self.token_counter]) + self.token_counter = TokenCountingHandler( + tokenizer=self.tokenizer.encode + ) + self.callback_manager = CallbackManager( + [self.wandb_callback, self.token_counter] + ) self.qa_prompt = load_chat_prompt(self.config.chat_prompt) self.chat_engine = self._load_chat_engine( @@ -62,7 +136,17 @@ def __init__(self, config: ChatConfig): max_retries=self.config.max_fallback_retries, ) - def load_storage_context_from_artifact(self, artifact_url: str): + def load_storage_context_from_artifact( + self, artifact_url: str + ) -> StorageContext: + """Loads the storage context from the given artifact URL. + + Args: + artifact_url: A string representing the URL of the artifact. + + Returns: + An instance of StorageContext. + """ artifact = self.run.use_artifact(artifact_url) artifact_dir = artifact.download() storage_context = StorageContext.from_defaults( @@ -71,7 +155,18 @@ def load_storage_context_from_artifact(self, artifact_url: str): ) return storage_context - def _load_chat_engine(self, model_name, max_retries): + def _load_chat_engine( + self, model_name: str, max_retries: int + ) -> BaseChatEngine: + """Loads the chat engine with the given model name and maximum retries. + + Args: + model_name: A string representing the name of the model. + max_retries: An integer representing the maximum number of retries. + + Returns: + An instance of ChatEngine. + """ service_context = load_service_context( model_name, temperature=self.config.chat_temperature, @@ -85,12 +180,25 @@ def _load_chat_engine(self, model_name, max_retries): response_mode="compact", service_context=service_context, text_qa_template=self.qa_prompt, - node_postprocessors=[CohereRerank(top_n=15, model="rerank-english-v2.0")], + node_postprocessors=[ + CohereRerank(top_n=15, model="rerank-english-v2.0") + ], storage_context=self.storage_context, ) return chat_engine def validate_and_format_question(self, question: str) -> str: + """Validates and formats the given question. + + Args: + question: A string representing the question to validate and format. + + Returns: + A string representing the validated and formatted question. + + Raises: + ValueError: If the question is too long. + """ question = " ".join(question.strip().split()) if len(self.tokenizer.encode(question)) > 1024: @@ -99,7 +207,15 @@ def validate_and_format_question(self, question: str) -> str: ) return question - def format_response(self, result: Dict[str, Any]): + def format_response(self, result: Dict[str, Any]) -> Dict[str, Any]: + """Formats the response dictionary. + + Args: + result: A dictionary representing the response. + + Returns: + A formatted response dictionary. + """ response = {} if result.get("source_documents", None): source_documents = [ @@ -116,16 +232,31 @@ def format_response(self, result: Dict[str, Any]): if len(source_documents) and self.config.include_sources: response["source_documents"] = json.dumps(source_documents) - response["sources"] = ",".join([doc["source"] for doc in source_documents]) + response["sources"] = ",".join( + [doc["source"] for doc in source_documents] + ) else: response["source_documents"] = "" response["sources"] = "" return response - def get_answer(self, query: str, chat_history: Optional[List[ChatMessage]] = None): + def get_answer( + self, query: str, chat_history: Optional[List[ChatMessage]] = None + ) -> Dict[str, Any]: + """Gets the answer for the given query and chat history. + + Args: + query: A string representing the query. + chat_history: A list of ChatMessage representing the chat history. + + Returns: + A formatted response dictionary. + """ try: - response = self.chat_engine.chat(message=query, chat_history=chat_history) + response = self.chat_engine.chat( + message=query, chat_history=chat_history + ) result = { "answer": response.response, "source_documents": response.source_nodes, @@ -133,9 +264,13 @@ def get_answer(self, query: str, chat_history: Optional[List[ChatMessage]] = Non } except Exception as e: logger.warning(f"{self.config.chat_model_name} failed with {e}") - logger.warning(f"Falling back to {self.config.fallback_model_name} model") + logger.warning( + f"Falling back to {self.config.fallback_model_name} model" + ) try: - response = self.fallback_chat_engine.chat(message=query, chat_history=chat_history) + response = self.fallback_chat_engine.chat( + message=query, chat_history=chat_history + ) result = { "answer": response.response, "source_documents": response.source_nodes, @@ -143,7 +278,9 @@ def get_answer(self, query: str, chat_history: Optional[List[ChatMessage]] = Non } except Exception as e: - logger.error(f"{self.config.fallback_model_name} failed with {e}") + logger.error( + f"{self.config.fallback_model_name} failed with {e}" + ) result = { "answer": "\uE058" + " Sorry, there seems to be an issue with our LLM service. Please try again in some time.", @@ -153,6 +290,14 @@ def get_answer(self, query: str, chat_history: Optional[List[ChatMessage]] = Non return self.format_response(result) def __call__(self, chat_request: ChatRequest) -> ChatRepsonse: + """Handles the chat request and returns the chat response. + + Args: + chat_request: An instance of ChatRequest representing the chat request. + + Returns: + An instance of ChatResponse representing the chat response. + """ with Timer() as timer: try: query = self.validate_and_format_question(chat_request.question) @@ -162,7 +307,10 @@ def __call__(self, chat_request: ChatRequest) -> ChatRepsonse: "sources": "", } else: - result = self.get_answer(query, chat_history=get_chat_history(chat_request.chat_history)) + result = self.get_answer( + query, + chat_history=get_chat_history(chat_request.chat_history), + ) usage_stats = { "total_tokens": self.token_counter.total_llm_token_count, "prompt_tokens": self.token_counter.prompt_llm_token_count, @@ -195,8 +343,12 @@ def main(): if question.lower() == "quit": break else: - response = chat(ChatRequest(question=question, chat_history=chat_history)) - chat_history.append(QuestionAnswer(question=question, answer=response.answer)) + response = chat( + ChatRequest(question=question, chat_history=chat_history) + ) + chat_history.append( + QuestionAnswer(question=question, answer=response.answer) + ) print(f"WandBot: {response.answer}") print(f"Time taken: {response.time_taken}") diff --git a/src/wandbot/chat/config.py b/src/wandbot/chat/config.py index 94cae30..6cb262d 100644 --- a/src/wandbot/chat/config.py +++ b/src/wandbot/chat/config.py @@ -1,3 +1,17 @@ +"""This module contains the configuration settings for wandbot. + +The `ChatConfig` class in this module is used to define various settings for wandbot, such as the model name, +maximum retries, fallback model name, chat temperature, chat prompt, index artifact, embeddings cache, verbosity, +wandb project and entity, inclusion of sources, and query tokens threshold. These settings are used throughout the +chatbot's operation to control its behavior. + +Typical usage example: + + from wandbot.chat.config import ChatConfig + config = ChatConfig() + print(config.chat_model_name) +""" + import pathlib from pydantic import Field @@ -12,7 +26,9 @@ class ChatConfig(BaseSettings): chat_temperature: float = 0.1 chat_prompt: pathlib.Path = pathlib.Path("data/prompts/chat_prompt.json") index_artifact: str = "wandbot/wandbot-dev/wandbot_index:latest" - embeddings_cache: pathlib.Path = Field(pathlib.Path("data/cache/embeddings"), env="EMBEDDINGS_CACHE_PATH") + embeddings_cache: pathlib.Path = Field( + pathlib.Path("data/cache/embeddings"), env="EMBEDDINGS_CACHE_PATH" + ) verbose: bool = False wandb_project: str | None = Field("wandbot_public", env="WANDB_PROJECT") wandb_entity: str | None = Field("wandbot", env="WANDB_ENTITY") diff --git a/src/wandbot/chat/prompts.py b/src/wandbot/chat/prompts.py index 04d51cd..876e4a3 100644 --- a/src/wandbot/chat/prompts.py +++ b/src/wandbot/chat/prompts.py @@ -1,3 +1,15 @@ +"""This module provides functionality for loading chat prompts. + +The main function in this module is `load_chat_prompt`, which loads a chat prompt from a given JSON file. +The JSON file should contain two keys: "system_template" and "human_template", which correspond to the system and user messages respectively. + +Typical usage example: + + from wandbot.chat import prompts + + chat_prompt = prompts.load_chat_prompt('path_to_your_json_file.json') +""" + import json import logging import pathlib @@ -9,11 +21,29 @@ logger = logging.getLogger(__name__) -def load_chat_prompt(f_name: Union[pathlib.Path, str] = None) -> ChatPromptTemplate: +def load_chat_prompt( + f_name: Union[pathlib.Path, str] = None +) -> ChatPromptTemplate: + """ + Loads a chat prompt from a given file. + + This function reads a JSON file specified by f_name and constructs a ChatPromptTemplate + object from the data. The JSON file should contain two keys: "system_template" and "human_template", + which correspond to the system and user messages respectively. + + Args: + f_name: A string or a pathlib.Path object representing the path to the JSON file. + If None, a default path is used. + + Returns: + A ChatPromptTemplate object constructed from the data in the JSON file. + """ f_name = pathlib.Path(f_name) template = json.load(f_name.open("r")) messages = [ - ChatMessage(role=MessageRole.SYSTEM, content=template["system_template"]), + ChatMessage( + role=MessageRole.SYSTEM, content=template["system_template"] + ), ChatMessage(role=MessageRole.USER, content=template["human_template"]), ] diff --git a/src/wandbot/chat/schemas.py b/src/wandbot/chat/schemas.py index b39e183..ef91f11 100644 --- a/src/wandbot/chat/schemas.py +++ b/src/wandbot/chat/schemas.py @@ -1,3 +1,21 @@ +"""This module defines the Pydantic models for the chat system. + +This module contains the Pydantic models that are used to validate the data +for the chat system. It includes models for chat threads, chat requests, and +chat responses. The models are used to ensure that the data sent to and received +from the chat system is in the correct format. + +Typical usage example: + + chat_thread = ChatThread(thread_id="123", application="app1") + chat_request = ChatRequest(question="What is the weather?", chat_history=None) + chat_response = ChatRepsonse(system_prompt="Weather is sunny", question="What is the weather?", + answer="It's sunny", model="model1", sources="source1", + source_documents="doc1", total_tokens=10, prompt_tokens=2, + completion_tokens=8, time_taken=1.0, + start_time=datetime.now(), end_time=datetime.now()) +""" + from datetime import datetime from typing import List diff --git a/src/wandbot/chat/utils.py b/src/wandbot/chat/utils.py deleted file mode 100644 index b6de444..0000000 --- a/src/wandbot/chat/utils.py +++ /dev/null @@ -1,74 +0,0 @@ -import re -from typing import Any, Dict, List - -from langchain.callbacks.manager import CallbackManagerForChainRun -from langchain.chains import ConversationalRetrievalChain, StuffDocumentsChain -from langchain.schema import Document - -from wandbot.database.schemas import QuestionAnswer - - -def get_chat_history( - chat_history: List[QuestionAnswer] | None, -) -> List[tuple[str, str]]: - if not chat_history: - return [] - else: - return [(question_answer.question, question_answer.answer) for question_answer in chat_history] - - -class ConversationalRetrievalQASourcesChain(ConversationalRetrievalChain): - reduce_k_below_max_tokens: bool = True - max_tokens_limit: int = 2816 - - def _reduce_tokens_below_limit(self, docs: List[Document]) -> List[Document]: - num_docs = len(docs) - - if self.max_tokens_limit and isinstance(self.combine_docs_chain, StuffDocumentsChain): - tokens = [self.combine_docs_chain.llm_chain.llm.get_num_tokens(doc.page_content) for doc in docs] - token_count = sum(tokens[:num_docs]) - while token_count > self.max_tokens_limit: - num_docs -= 1 - token_count -= tokens[num_docs] - - return docs[:num_docs] - - def _get_docs( - self, - question: str, - inputs: Dict[str, Any], - *, - run_manager: CallbackManagerForChainRun, - ) -> List[Document]: - docs = self.retriever.get_relevant_documents(question, callbacks=run_manager.get_child()) - return self._reduce_tokens_below_limit(docs) - - @property - def output_keys(self) -> List[str]: - """Return the output keys. - - :meta private: - """ - _output_keys = [self.output_key] - if self.return_source_documents: - _output_keys = _output_keys + ["source_documents"] + ["sources"] - return _output_keys - - def _call(self, inputs: Dict[str, Any], **kwargs) -> Dict[str, Any]: - results = super()._call(inputs, **kwargs) - answer = results["answer"] - if re.search(r"Source[s]?:\s", answer, flags=re.IGNORECASE): - answers_and_sources = re.split(r"Source[s]?:\s", answer, flags=re.IGNORECASE) - if len(answers_and_sources) > 1: - answer = answers_and_sources[0] - sources = answers_and_sources[1] - elif len(answers_and_sources) == 1: - answer = answers_and_sources[0] - sources = "" - else: - sources = "" - else: - sources = "" - results["answer"] = answer - results["sources"] = sources - return results diff --git a/src/wandbot/database/client.py b/src/wandbot/database/client.py index 1f9489a..ed2f724 100644 --- a/src/wandbot/database/client.py +++ b/src/wandbot/database/client.py @@ -1,3 +1,16 @@ +"""This module provides a Database and DatabaseClient class for managing database operations. + +The Database class provides a connection to the database and manages the session. It also provides methods for getting and setting the current session object and the name of the database. + +The DatabaseClient class uses an instance of the Database class to perform operations such as getting and creating chat threads, question answers, and feedback from the database. + +Typical usage example: + + db_client = DatabaseClient() + chat_thread = db_client.get_chat_thread(application='app1', thread_id='123') + question_answer = db_client.create_question_answer(question_answer=QuestionAnswerCreateSchema()) +""" + from typing import Any, List from sqlalchemy.future import create_engine @@ -9,43 +22,102 @@ from wandbot.database.models import QuestionAnswer as QuestionAnswerModel from wandbot.database.schemas import ChatThreadCreate as ChatThreadCreateSchema from wandbot.database.schemas import Feedback as FeedbackSchema -from wandbot.database.schemas import QuestionAnswerCreate as QuestionAnswerCreateSchema +from wandbot.database.schemas import ( + QuestionAnswerCreate as QuestionAnswerCreateSchema, +) class Database: - db_config = DataBaseConfig() + """A class representing a database connection. + + This class provides a connection to the database and manages the session. + + Attributes: + db_config: An instance of the DataBaseConfig class. + SessionLocal: A sessionmaker object for creating sessions. + db: The current session object. + name: The name of the database. + """ + + db_config: DataBaseConfig = DataBaseConfig() def __init__(self, database: str | None = None): + """Initializes the Database instance. + + Args: + database: The URL of the database. If None, the default URL is used. + """ if database is not None: - engine = create_engine(url=database, connect_args=self.db_config.connect_args) + engine: Any = create_engine( + url=database, connect_args=self.db_config.connect_args + ) else: - engine = create_engine( + engine: Any = create_engine( url=self.db_config.SQLALCHEMY_DATABASE_URL, connect_args=self.db_config.connect_args, ) - self.SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + self.SessionLocal: Any = sessionmaker( + autocommit=False, autoflush=False, bind=engine + ) + + def __get__(self, instance, owner) -> Any: + """Gets the current session object. - def __get__(self, instance, owner): + Args: + instance: The instance of the owner class. + owner: The owner class. + + Returns: + The current session object. + """ if not hasattr(self, "db"): - self.db = self.SessionLocal() + self.db: Any = self.SessionLocal() return self.db - def __set__(self, instance, value): + def __set__(self, instance, value) -> None: + """Sets the current session object. + + Args: + instance: The instance of the owner class. + value: The new session object. + """ self.db = value - def __set_name__(self, owner, name): - self.name = name + def __set_name__(self, owner, name) -> None: + """Sets the name of the database. + + Args: + owner: The owner class. + name: The name of the database. + """ + self.name: str = name class DatabaseClient: - database = Database() + database: Database = Database() def __init__(self, database: str | None = None): + """Initializes the DatabaseClient instance. + + Args: + database: The URL of the database. If None, the default URL is used. + """ if database is not None: self.database = Database(database=database) - def get_chat_thread(self, application: str, thread_id: str) -> ChatThreadModel | None: - chat_thread = ( + def get_chat_thread( + self, application: str, thread_id: str + ) -> ChatThreadModel | None: + """Gets a chat thread from the database. + + Args: + application: The application name. + thread_id: The ID of the chat thread. + + Returns: + The chat thread model if found, None otherwise. + """ + chat_thread: ChatThreadModel | None = ( self.database.query(ChatThreadModel) .filter( ChatThreadModel.thread_id == thread_id, @@ -55,9 +127,22 @@ def get_chat_thread(self, application: str, thread_id: str) -> ChatThreadModel | ) return chat_thread - def create_chat_thread(self, chat_thread: ChatThreadCreateSchema) -> ChatThreadModel: + def create_chat_thread( + self, chat_thread: ChatThreadCreateSchema + ) -> ChatThreadModel: + """Creates a chat thread in the database. + + Args: + chat_thread: The chat thread to create. + + Returns: + The created chat thread model. + """ try: - chat_thread = ChatThreadModel(thread_id=chat_thread.thread_id, application=chat_thread.application) + chat_thread: ChatThreadModel = ChatThreadModel( + thread_id=chat_thread.thread_id, + application=chat_thread.application, + ) self.database.add(chat_thread) self.database.flush() self.database.commit() @@ -68,8 +153,19 @@ def create_chat_thread(self, chat_thread: ChatThreadCreateSchema) -> ChatThreadM return chat_thread - def get_question_answer(self, question_answer_id: str, thread_id: str) -> QuestionAnswerModel | None: - question_answer = ( + def get_question_answer( + self, question_answer_id: str, thread_id: str + ) -> QuestionAnswerModel | None: + """Gets a question answer from the database. + + Args: + question_answer_id: The ID of the question answer. + thread_id: The ID of the chat thread. + + Returns: + The question answer model if found, None otherwise. + """ + question_answer: QuestionAnswerModel | None = ( self.database.query(QuestionAnswerModel) .filter( QuestionAnswerModel.thread_id == thread_id, @@ -79,9 +175,21 @@ def get_question_answer(self, question_answer_id: str, thread_id: str) -> Questi ) return question_answer - def create_question_answer(self, question_answer: QuestionAnswerCreateSchema) -> QuestionAnswerModel: + def create_question_answer( + self, question_answer: QuestionAnswerCreateSchema + ) -> QuestionAnswerModel: + """Creates a question answer in the database. + + Args: + question_answer: The question answer to create. + + Returns: + The created question answer model. + """ try: - question_answer = QuestionAnswerModel(**question_answer.dict()) + question_answer: QuestionAnswerModel = QuestionAnswerModel( + **question_answer.dict() + ) self.database.add(question_answer) self.database.flush() self.database.commit() @@ -91,15 +199,33 @@ def create_question_answer(self, question_answer: QuestionAnswerCreateSchema) -> return question_answer def get_feedback(self, question_answer_id: str) -> FeedBackModel | None: - feedback = ( - self.database.query(FeedBackModel).filter(FeedBackModel.question_answer_id == question_answer_id).first() + """Gets feedback from the database. + + Args: + question_answer_id: The ID of the question answer. + + Returns: + The feedback model if found, None otherwise. + """ + feedback: FeedBackModel | None = ( + self.database.query(FeedBackModel) + .filter(FeedBackModel.question_answer_id == question_answer_id) + .first() ) return feedback def create_feedback(self, feedback: FeedbackSchema) -> FeedBackModel: + """Creates feedback in the database. + + Args: + feedback: The feedback to create. + + Returns: + The created feedback model. + """ if feedback.rating: try: - feedback = FeedBackModel(**feedback.dict()) + feedback: FeedBackModel = FeedBackModel(**feedback.dict()) self.database.add(feedback) self.database.flush() self.database.commit() @@ -109,13 +235,28 @@ def create_feedback(self, feedback: FeedbackSchema) -> FeedBackModel: return feedback - def get_all_question_answers(self, time=None) -> List[dict[str, Any]] | None: - question_answers = self.database.query(QuestionAnswerModel) + def get_all_question_answers( + self, time: Any = None + ) -> List[dict[str, Any]] | None: + """Gets all question answers from the database. + + Args: + time: The time to filter the question answers by. + + Returns: + A list of question answer dictionaries if found, None otherwise. + """ + question_answers: List[dict[str, Any]] | None = self.database.query( + QuestionAnswerModel + ) if time is not None: - question_answers = question_answers.filter(QuestionAnswerModel.end_time >= time) + question_answers = question_answers.filter( + QuestionAnswerModel.end_time >= time + ) question_answers = question_answers.all() if question_answers is not None: question_answers = [ - QuestionAnswerCreateSchema.from_orm(question_answer).dict() for question_answer in question_answers + QuestionAnswerCreateSchema.from_orm(question_answer).dict() + for question_answer in question_answers ] return question_answers diff --git a/src/wandbot/database/config.py b/src/wandbot/database/config.py index 310d95e..d5c75cb 100644 --- a/src/wandbot/database/config.py +++ b/src/wandbot/database/config.py @@ -1,3 +1,14 @@ +"""This module provides a DataBaseConfig class for managing database configuration. + +The DataBaseConfig class uses the BaseSettings class from pydantic_settings to define and manage the database configuration settings. It includes the SQLAlchemy database URL and connection arguments. + +Typical usage example: + + db_config = DataBaseConfig() + database_url = db_config.SQLALCHEMY_DATABASE_URL + connect_args = db_config.connect_args +""" + from typing import Any from pydantic import Field @@ -5,5 +16,7 @@ class DataBaseConfig(BaseSettings): - SQLALCHEMY_DATABASE_URL: str = Field("sqlite:///./data/cache/app.db", env="SQLALCHEMY_DATABASE_URL") + SQLALCHEMY_DATABASE_URL: str = Field( + "sqlite:///./data/cache/app.db", env="SQLALCHEMY_DATABASE_URL" + ) connect_args: dict[str, Any] = Field({"check_same_thread": False}) diff --git a/src/wandbot/database/database.py b/src/wandbot/database/database.py index d2aae66..d8aeec7 100644 --- a/src/wandbot/database/database.py +++ b/src/wandbot/database/database.py @@ -1,3 +1,13 @@ +"""This module provides the setup for the SQLAlchemy database engine and session. + +It imports the create_engine and sessionmaker modules from SQLAlchemy, and the DataBaseConfig class from the config module. It then creates an instance of DataBaseConfig, sets up the engine with the SQLAlchemy database URL and connection arguments, and creates a sessionmaker bound to this engine. + +Typical usage example: + + from wandbot.database.database import SessionLocal + session = SessionLocal() +""" + from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker @@ -5,5 +15,7 @@ db_config = DataBaseConfig() -engine = create_engine(db_config.SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}) +engine = create_engine( + db_config.SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False} +) SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) diff --git a/src/wandbot/database/models.py b/src/wandbot/database/models.py index 5ccecf8..1586397 100644 --- a/src/wandbot/database/models.py +++ b/src/wandbot/database/models.py @@ -1,3 +1,15 @@ +"""This module defines the SQLAlchemy models for the ChatThread, QuestionAnswer, and FeedBack tables. + +Each class represents a table in the database and includes columns and relationships. The Base class is a declarative base that stores a catalog of classes and mapped tables in the Declarative system. + +Typical usage example: + + from wandbot.database.models import ChatThread, QuestionAnswer, FeedBack + chat_thread = ChatThread(thread_id='123', application='app1') + question_answer = QuestionAnswer(question_answer_id='456', thread_id='123') + feedback = FeedBack(feedback_id='789', question_answer_id='456') +""" + from sqlalchemy import Column, DateTime, Float, ForeignKey, Integer, String from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship @@ -10,7 +22,9 @@ class ChatThread(Base): thread_id = Column(String, primary_key=True, index=True) application = Column(String) - question_answers = relationship("QuestionAnswer", back_populates="chat_thread") + question_answers = relationship( + "QuestionAnswer", back_populates="chat_thread" + ) class QuestionAnswer(Base): @@ -40,6 +54,8 @@ class FeedBack(Base): __tablename__ = "feedback" feedback_id = Column(String, primary_key=True, index=True) - question_answer_id = Column(String, ForeignKey("question_answers.question_answer_id")) + question_answer_id = Column( + String, ForeignKey("question_answers.question_answer_id") + ) rating = Column(Integer) question_answer = relationship("QuestionAnswer", back_populates="feedback") diff --git a/src/wandbot/database/schemas.py b/src/wandbot/database/schemas.py index 430a05a..2049155 100644 --- a/src/wandbot/database/schemas.py +++ b/src/wandbot/database/schemas.py @@ -1,3 +1,21 @@ +"""This module defines the Pydantic models for the chat system. + +This module contains the Pydantic models that are used to validate the data +for the chat system. It includes models for chat threads, chat requests, and +chat responses. The models are used to ensure that the data sent to and received +from the chat system is in the correct format. + +Typical usage example: + + chat_thread = ChatThread(thread_id="123", application="app1") + chat_request = ChatRequest(question="What is the weather?", chat_history=None) + chat_response = ChatRepsonse(system_prompt="Weather is sunny", question="What is the weather?", + answer="It's sunny", model="model1", sources="source1", + source_documents="doc1", total_tokens=10, prompt_tokens=2, + completion_tokens=8, time_taken=1.0, + start_time=datetime.now(), end_time=datetime.now()) +""" + from datetime import datetime from enum import IntEnum diff --git a/src/wandbot/evaluation/eval.py b/src/wandbot/evaluation/eval.py index 4df1402..2c57b54 100644 --- a/src/wandbot/evaluation/eval.py +++ b/src/wandbot/evaluation/eval.py @@ -49,7 +49,9 @@ def __init__(self, config: EvalConfig = None): def load_eval_dataframe(self): eval_artifact = self.wandb_run.use_artifact(self.config.eval_artifact) eval_artifact_dir = Path(eval_artifact.download()) - df_questions = pd.read_csv(eval_artifact_dir / "auto-eval-questions.csv") + df_questions = pd.read_csv( + eval_artifact_dir / "auto-eval-questions.csv" + ) if self.config.debug: df_questions = df_questions.sample(n=3).reset_index(drop=True) return df_questions @@ -83,18 +85,28 @@ def __call__(self, config: EvalConfig = None): ) break except Exception as e: - print(f"Error occurred: {e}. Retrying in {self.config.retry_delay} seconds...") + print( + f"Error occurred: {e}. Retrying in {self.config.retry_delay} seconds..." + ) time.sleep(self.config.retry_delay) - self.eval_df["retrieval_match"] = self.eval_df.apply(lambda x: x.orig_document in x.documents, axis=1) + self.eval_df["retrieval_match"] = self.eval_df.apply( + lambda x: x.orig_document in x.documents, axis=1 + ) self.eval_df = self.eval_df.dropna() - self.eval_df["string_distance"] = self.eval_df.apply(calculate_string_distance, axis=1) + self.eval_df["string_distance"] = self.eval_df.apply( + calculate_string_distance, axis=1 + ) eval_df.orig_document = eval_df.orig_document.apply( - lambda x: x.replace("../docodile/docs/", "https://docs.wandb.ai/").replace(".md", "") + lambda x: x.replace( + "../docodile/docs/", "https://docs.wandb.ai/" + ).replace(".md", "") ) - retrieval_accuracy = len(self.eval_df[self.eval_df["retrieval_match"] == True]) / len(self.eval_df) + retrieval_accuracy = len( + self.eval_df[self.eval_df["retrieval_match"] == True] + ) / len(self.eval_df) print(f"Retrieval accuracy: {retrieval_accuracy}") wandb.log({"retrieval_accuracy": retrieval_accuracy}) @@ -129,9 +141,13 @@ def __call__(self, config: EvalConfig = None): } ) graded_outputs = eval_chain.evaluate(examples, predictions) - self.eval_df["model_score"] = [x.get("text", "None") for x in graded_outputs] + self.eval_df["model_score"] = [ + x.get("text", "None") for x in graded_outputs + ] - model_accuracy = len(self.eval_df[self.eval_df["model_score"] == "CORRECT"]) / len(self.eval_df) + model_accuracy = len( + self.eval_df[self.eval_df["model_score"] == "CORRECT"] + ) / len(self.eval_df) print(f"Chat model accuracy: {model_accuracy}") wandb.log({"chat_accuracy": model_accuracy}) diff --git a/src/wandbot/ingestion/config.py b/src/wandbot/ingestion/config.py index a5d0b0d..ad62e36 100644 --- a/src/wandbot/ingestion/config.py +++ b/src/wandbot/ingestion/config.py @@ -1,3 +1,15 @@ +"""This module defines the configuration for data sources and stores in the Wandbot ingestion system. + +This module contains classes that define the configuration for various data sources and stores used in the Wandbot ingestion system. +Each class represents a different type of data source or store, such as English and Japanese documentation, example code, SDK code, +and more. Each class is defined with various attributes like name, data source, docstore directory, etc. + +Typical usage example: + + data_store_config = DataStoreConfig() + docodile_english_store_config = DocodileEnglishStoreConfig() +""" + import pathlib from typing import List, Optional, Union from urllib.parse import urlparse @@ -11,7 +23,9 @@ class DataSource(BaseSettings): - cache_dir: pathlib.Path = Field("data/cache/raw_data", env="WANDBOT_CACHE_DIR") + cache_dir: pathlib.Path = Field( + "data/cache/raw_data", env="WANDBOT_CACHE_DIR" + ) ignore_cache: bool = False remote_path: str = "" repo_path: str = "" @@ -29,21 +43,29 @@ class DataStoreConfig(BaseModel): @model_validator(mode="after") def _set_cache_paths(cls, values: "DataStoreConfig") -> "DataStoreConfig": - values.docstore_dir = values.data_source.cache_dir / values.name / values.docstore_dir + values.docstore_dir = ( + values.data_source.cache_dir / values.name / values.docstore_dir + ) data_source = values.data_source if data_source.repo_path: - data_source.is_git_repo = urlparse(data_source.repo_path).netloc == "github.com" + data_source.is_git_repo = ( + urlparse(data_source.repo_path).netloc == "github.com" + ) local_path = urlparse(data_source.repo_path).path.split("/")[-1] if not data_source.local_path: - data_source.local_path = data_source.cache_dir / values.name / local_path + data_source.local_path = ( + data_source.cache_dir / values.name / local_path + ) if data_source.is_git_repo: if data_source.git_id_file is None: logger.debug( "The source data is a git repo but no git_id_file is set." " Attempting to use the default ssh id file" ) - data_source.git_id_file = pathlib.Path.home() / ".ssh" / "id_rsa" + data_source.git_id_file = ( + pathlib.Path.home() / ".ssh" / "id_rsa" + ) values.data_source = data_source return values diff --git a/src/wandbot/ingestion/prepare_data.py b/src/wandbot/ingestion/prepare_data.py index 1398eda..6b7fb33 100644 --- a/src/wandbot/ingestion/prepare_data.py +++ b/src/wandbot/ingestion/prepare_data.py @@ -1,5 +1,22 @@ +""" +This module contains classes and functions for preparing data in the Wandbot ingestion system. + +The module includes the following classes: +- `DataLoader`: A base class for data loaders that provides a base implementation for lazy loading of documents. +- `DocodileDataLoader`: A data loader specifically designed for Docodile documents. +- `CodeDataLoader`: A data loader for code documents. + +The module also includes the following functions: +- `load`: Loads and prepares data for the Wandbot ingestion system. + +Typical usage example: + + load(project="my_project", entity="my_entity", result_artifact_name="raw_dataset") +""" + import json import os +import pathlib from typing import Iterator from urllib.parse import urljoin @@ -18,31 +35,78 @@ ExampleCodeStoreConfig, ExampleNotebookStoreConfig, ) -from wandbot.ingestion.utils import EXTENSION_MAP, clean_contents, fetch_git_repo +from wandbot.ingestion.utils import ( + EXTENSION_MAP, + clean_contents, + fetch_git_repo, +) from wandbot.utils import get_logger logger = get_logger(__name__) class DataLoader(BaseLoader): + """A base class for data loaders. + + This class provides a base implementation for lazy loading of documents. + Subclasses should implement the `lazy_load` method to define the specific + loading behavior. + """ + def __init__(self, config: DataStoreConfig): + """Initializes the DataLoader instance. + + Args: + config: The configuration for the data store. + """ self.config = config self.metadata = None def lazy_load( self, ) -> Iterator[Document]: - """A lazy loader for Documents.""" - raise NotImplementedError(f"{self.__class__.__name__} does not implement lazy_load()") + """A lazy loader for Documents. + + This method should be implemented by subclasses to define the specific + loading behavior. + + Returns: + An iterator of Document objects. + """ + raise NotImplementedError( + f"{self.__class__.__name__} does not implement lazy_load()" + ) def load(self): + """Loads the documents. + + Returns: + A list of Document objects. + """ documents = list(self.lazy_load()) self.metadata.update({"num_documents": len(documents)}) return documents class DocodileDataLoader(DataLoader): - def extract_slug(self, file_path): + """A data loader for Docodile documents. + + This class provides a data loader specifically designed for Docodile documents. + It implements the lazy_load method to define the loading behavior. + + Attributes: + config: The configuration for the data store. + """ + + def extract_slug(self, file_path: pathlib.Path) -> str: + """Extracts the slug from a file. + + Args: + file_path: The path to the file. + + Returns: + The extracted slug. + """ with open(file_path, "r") as file: content = file.read() md = markdown.Markdown(extensions=["meta"]) @@ -50,7 +114,16 @@ def extract_slug(self, file_path): meta = md.Meta.get("slug", [""]) return meta[0] - def generate_site_url(self, base_path, file_path): + def generate_site_url(self, base_path: Path, file_path: Path) -> str: + """Generates the site URL for a file. + + Args: + base_path: The base path of the file. + file_path: The path to the file. + + Returns: + The generated site URL. + """ relative_path = file_path.relative_to(base_path) if relative_path.parts[0] == "guides": chapter = "guides" @@ -62,7 +135,9 @@ def generate_site_url(self, base_path, file_path): file_loc = file_path.relative_to((base_path / "ref")).parent elif relative_path.parts[0] == "tutorials": chapter = "tutorials" - slug = self.extract_slug((base_path / "tutorials") / "intro_to_tutorials.md") + slug = self.extract_slug( + (base_path / "tutorials") / "intro_to_tutorials.md" + ) file_loc = file_path.relative_to((base_path / "tutorials")).parent else: chapter = "" @@ -73,7 +148,9 @@ def generate_site_url(self, base_path, file_path): if file_path.name in ("intro.md", "README.md", "intro_to_tutorials.md"): file_name = "" site_relative_path = os.path.join(chapter, slug, file_loc, file_name) - site_url = urljoin(str(self.config.data_source.remote_path), str(site_relative_path)) + site_url = urljoin( + str(self.config.data_source.remote_path), str(site_relative_path) + ) if "other/" in site_url: site_url = site_url.replace("other/", "") @@ -82,8 +159,17 @@ def generate_site_url(self, base_path, file_path): def lazy_load( self, ) -> Iterator[Document]: + """A lazy loader for Docodile documents. + + This method implements the lazy loading behavior for Docodile documents. + + Yields: + A Document object. + """ if self.config.data_source.is_git_repo: - self.metadata = fetch_git_repo(self.config.data_source, self.config.data_source.git_id_file) + self.metadata = fetch_git_repo( + self.config.data_source, self.config.data_source.git_id_file + ) local_paths = [] file_patterns = ( @@ -93,11 +179,17 @@ def lazy_load( ) for file_pattern in file_patterns: local_paths.extend( - list((self.config.data_source.local_path / self.config.data_source.base_path).rglob(file_pattern)) + list( + ( + self.config.data_source.local_path + / self.config.data_source.base_path + ).rglob(file_pattern) + ) ) document_files = { local_path: self.generate_site_url( - self.config.data_source.local_path / self.config.data_source.base_path, + self.config.data_source.local_path + / self.config.data_source.base_path, local_path, ) for local_path in local_paths @@ -108,18 +200,33 @@ def lazy_load( document = TextLoader(f_name).load()[0] contents = document.page_content document.page_content = clean_contents(contents) - document.metadata["file_type"] = os.path.splitext(document.metadata["source"])[-1] - document.metadata["source"] = document_files[document.metadata["source"]] + document.metadata["file_type"] = os.path.splitext( + document.metadata["source"] + )[-1] + document.metadata["source"] = document_files[ + document.metadata["source"] + ] document.metadata["language"] = self.config.language yield document except Exception as e: - logger.warning(f"Failed to load documentation {f_name} due to {e}") + logger.warning( + f"Failed to load documentation {f_name} due to {e}" + ) class CodeDataLoader(DataLoader): def lazy_load(self) -> Iterator[Document]: + """A lazy loader for code documents. + + This method implements the lazy loading behavior for code documents. + + Yields: + A Document object. + """ if self.config.data_source.is_git_repo: - self.metadata = fetch_git_repo(self.config.data_source, self.config.data_source.git_id_file) + self.metadata = fetch_git_repo( + self.config.data_source, self.config.data_source.git_id_file + ) local_paths = [] file_patterns = ( @@ -129,7 +236,12 @@ def lazy_load(self) -> Iterator[Document]: ) for file_pattern in file_patterns: local_paths.extend( - list((self.config.data_source.local_path / self.config.data_source.base_path).rglob(file_pattern)) + list( + ( + self.config.data_source.local_path + / self.config.data_source.base_path + ).rglob(file_pattern) + ) ) paths = list(local_paths) @@ -167,21 +279,47 @@ def lazy_load(self) -> Iterator[Document]: document.page_content = cleaned_body else: document = TextLoader(f_name).load()[0] - document.metadata["file_type"] = os.path.splitext(document.metadata["source"])[-1] - document.metadata["source"] = document_files[document.metadata["source"]] - document.metadata["language"] = EXTENSION_MAP[document.metadata["file_type"]] + document.metadata["file_type"] = os.path.splitext( + document.metadata["source"] + )[-1] + document.metadata["source"] = document_files[ + document.metadata["source"] + ] + document.metadata["language"] = EXTENSION_MAP[ + document.metadata["file_type"] + ] yield document except Exception as e: - logger.warning(f"Failed to load code in {f_name} with error {e}") + logger.warning( + f"Failed to load code in {f_name} with error {e}" + ) def load( project: str, entity: str, result_artifact_name: str = "raw_dataset", -): +) -> str: + """Load and prepare data for the Wandbot ingestion system. + + This function initializes a Wandb run, creates an artifact for the prepared dataset, + and loads and prepares data from different loaders. The prepared data is then saved + in the docstore directory and added to the artifact. + + Args: + project: The name of the Wandb project. + entity: The name of the Wandb entity. + result_artifact_name: The name of the result artifact. Default is "raw_dataset". + + Returns: + The latest version of the prepared dataset artifact in the format "{entity}/{project}/{result_artifact_name}:latest". + """ run = wandb.init(project=project, entity=entity, job_type="prepare_dataset") - artifact = wandb.Artifact(result_artifact_name, type="dataset", description="Raw documents for wandbot") + artifact = wandb.Artifact( + result_artifact_name, + type="dataset", + description="Raw documents for wandbot", + ) en_docodile_loader = DocodileDataLoader(DocodileEnglishStoreConfig()) ja_docodile_loader = DocodileDataLoader(DocodileJapaneseStoreConfig()) @@ -209,7 +347,10 @@ def load( with (loader.config.docstore_dir / "metadata.json").open("w") as f: json.dump(loader.metadata, f) - artifact.add_dir(str(loader.config.docstore_dir), name=loader.config.docstore_dir.name) + artifact.add_dir( + str(loader.config.docstore_dir), + name=loader.config.docstore_dir.name, + ) run.log_artifact(artifact) run.finish() return f"{entity}/{project}/{result_artifact_name}:latest" diff --git a/src/wandbot/ingestion/preprocess_data.py b/src/wandbot/ingestion/preprocess_data.py index 9f4c79c..7b4a7a5 100644 --- a/src/wandbot/ingestion/preprocess_data.py +++ b/src/wandbot/ingestion/preprocess_data.py @@ -1,3 +1,24 @@ +"""This module contains classes and functions for preprocessing data in the Wandbot ingestion system. + +The module includes the following classes: +- `MarkdownSplitter`: A class for splitting text into chunks based on Markdown formatting. +- `CustomCodeSplitter`: A class for splitting text into chunks based on custom code formatting. + +The module also includes the following functions: +- `make_texts_tokenization_safe`: Removes special tokens from the given documents. +- `non_whitespace_len`: Returns the length of the given string without whitespace. +- `split_by_headers`: Splits the tree into chunks based on headers. +- `split_large_chunks_by_code_blocks`: Splits large chunks into smaller chunks based on code blocks. +- `get_line_number`: Returns the line number of a given index in the source code. +- `convert_lc_to_llama`: Converts a Langchain document to a Llama document. +- `load`: Loads documents and returns a list of nodes. + +Typical usage example: + + documents = [document1, document2, document3] + nodes = load(documents, chunk_size=1024) +""" + from typing import Iterable, List import regex as re @@ -14,11 +35,27 @@ logger = get_logger(__name__) -def make_texts_tokenization_safe(documents): +def make_texts_tokenization_safe(documents: List[str]) -> List[str]: + """Removes special tokens from the given documents. + + Args: + documents: A list of strings representing the documents. + + Returns: + A list of cleaned documents with special tokens removed. + """ encoding = tiktoken.get_encoding("cl100k_base") special_tokens_set = encoding.special_tokens_set - def remove_special_tokens(text): + def remove_special_tokens(text: str) -> str: + """Removes special tokens from the given text. + + Args: + text: A string representing the text. + + Returns: + The text with special tokens removed. + """ for token in special_tokens_set: text = text.replace(token, "") return text @@ -30,13 +67,29 @@ def remove_special_tokens(text): return cleaned_documents -def non_whitespace_len(s) -> int: +def non_whitespace_len(s: Union[str, bytes]) -> int: + """Returns the length of the given string without whitespace. + + Args: + s: A string. + + Returns: + The length of the string without whitespace. + """ if isinstance(s, str): return len(re.sub("\s", "", s)) return len(re.sub("\s", "", s.decode("utf-8"))) -def split_by_headers(tree): +def split_by_headers(tree: Any) -> List[List[Any]]: + """Splits the tree into chunks based on headers. + + Args: + tree: The tree to split. + + Returns: + A list of chunks where each chunk is a list of nodes. + """ chunks = [] current_chunk = [] for child in tree.root_node.children: @@ -52,7 +105,18 @@ def split_by_headers(tree): return chunks -def split_large_chunks_by_code_blocks(chunks, max_chars): +def split_large_chunks_by_code_blocks( + chunks: List[List[Any]], max_chars: int +) -> List[List[Any]]: + """Splits large chunks into smaller chunks based on code blocks. + + Args: + chunks: A list of chunks where each chunk is a list of nodes. + max_chars: The maximum number of characters allowed in a chunk. + + Returns: + A list of smaller chunks where each chunk is a list of nodes. + """ new_chunks = [] for chunk in chunks: current_chunk = [] @@ -62,7 +126,9 @@ def split_large_chunks_by_code_blocks(chunks, max_chars): current_chars += child_chars current_chunk.append(child) if child.type == "fenced_code_block" and current_chars > max_chars: - if current_chunk: # if current_chunk is not empty, add it to new_chunks + if ( + current_chunk + ): # if current_chunk is not empty, add it to new_chunks new_chunks.append(current_chunk) current_chunk = [] # start a new chunk current_chars = 0 @@ -71,26 +137,55 @@ def split_large_chunks_by_code_blocks(chunks, max_chars): return new_chunks -def get_heading_level(chunk): +def get_heading_level(chunk: List[Any]) -> Optional[int]: + """Returns the heading level of the given chunk. + + Args: + chunk: A list of nodes representing a chunk. + + Returns: + The heading level of the chunk. + """ for child in chunk: if child.type == "atx_heading" or child.type == "setext_heading": for grandchild in child.children: - if grandchild.type.startswith("atx") and grandchild.type.endswith("marker"): + if grandchild.type.startswith( + "atx" + ) and grandchild.type.endswith("marker"): return len(grandchild.text) return None -def merge_small_chunks(chunks, max_chars): +def merge_small_chunks( + chunks: List[List[Any]], max_chars: int +) -> List[List[Any]]: + """Merges small chunks into larger chunks based on maximum characters. + + Args: + chunks: A list of chunks where each chunk is a list of nodes. + max_chars: The maximum number of characters allowed in a chunk. + + Returns: + A list of merged chunks where each chunk is a list of nodes. + """ merged_chunks = [] current_chunk = [] current_chars = 0 for chunk in chunks: chunk_chars = sum(non_whitespace_len(child.text) for child in chunk) - current_heading_level, chunk_heading_level = get_heading_level(current_chunk), get_heading_level(chunk) - cond = (current_heading_level is None and chunk_heading_level is None) or ( - current_heading_level and chunk_heading_level and current_heading_level <= chunk_heading_level + current_heading_level, chunk_heading_level = get_heading_level( + current_chunk + ), get_heading_level(chunk) + cond = ( + current_heading_level is None and chunk_heading_level is None + ) or ( + current_heading_level + and chunk_heading_level + and current_heading_level <= chunk_heading_level ) - if current_chars + chunk_chars <= max_chars and (not current_chunk or cond): + if current_chars + chunk_chars <= max_chars and ( + not current_chunk or cond + ): current_chunk.extend(chunk) current_chars += chunk_chars else: @@ -102,7 +197,18 @@ def merge_small_chunks(chunks, max_chars): return merged_chunks -def coalesce_small_chunks(chunks, min_chars=100): +def coalesce_small_chunks( + chunks: List[List[Any]], min_chars: int = 100 +) -> List[List[Any]]: + """Coalesces small chunks into larger chunks based on minimum characters. + + Args: + chunks: A list of chunks where each chunk is a list of nodes. + min_chars: The minimum number of characters allowed in a chunk. + + Returns: + A list of coalesced chunks where each chunk is a list of nodes. + """ coalesced_chunks = [] i = 0 while i < len(chunks): @@ -112,39 +218,69 @@ def coalesce_small_chunks(chunks, min_chars=100): next_chunk_heading_level = get_heading_level(chunks[i + 1]) current_chunk_heading_level = get_heading_level(chunks[i]) if next_chunk_heading_level is None or ( - current_chunk_heading_level is not None and next_chunk_heading_level > current_chunk_heading_level + current_chunk_heading_level is not None + and next_chunk_heading_level > current_chunk_heading_level ): # if the next chunk is not a heading or is a heading of a higher level - chunks[i + 1] = chunks[i] + chunks[i + 1] # prepend the chunk to the next chunk + chunks[i + 1] = ( + chunks[i] + chunks[i + 1] + ) # prepend the chunk to the next chunk i += 1 # skip to the next chunk continue # if it's the last chunk or the next chunk is a heading of the same level if coalesced_chunks: # if there are already some coalesced chunks - coalesced_chunks[-1].extend(chunks[i]) # add the chunk to the previous chunk + coalesced_chunks[-1].extend( + chunks[i] + ) # add the chunk to the previous chunk else: coalesced_chunks.append(chunks[i]) i += 1 else: - coalesced_chunks.append(chunks[i]) # add the chunk as a separate chunk + coalesced_chunks.append( + chunks[i] + ) # add the chunk as a separate chunk i += 1 return coalesced_chunks def get_line_number(index: int, source_code: bytes) -> int: + """Returns the line number corresponding to the given index in the source code. + + Args: + index: The index in the source code. + source_code: The source code as bytes. + + Returns: + The line number corresponding to the index. + """ total_chars = 0 - for line_number, line in enumerate(source_code.splitlines(keepends=True), start=1): + for line_number, line in enumerate( + source_code.splitlines(keepends=True), start=1 + ): total_chars += len(line) if total_chars > index: return line_number - 1 return line_number -def coalesce_strings(strings, max_length): +def coalesce_strings(strings: List[str], max_length: int) -> List[str]: + """Coalesces strings into larger strings based on maximum length. + + Args: + strings: A list of strings. + max_length: The maximum length allowed for a coalesced string. + + Returns: + A list of coalesced strings. + """ result = [] current_string = "" for string in strings: - if non_whitespace_len(current_string) + non_whitespace_len(string) <= max_length: + if ( + non_whitespace_len(current_string) + non_whitespace_len(string) + <= max_length + ): current_string += "\n" + string else: result.append(current_string) @@ -157,7 +293,15 @@ def coalesce_strings(strings, max_length): return result -def clean_extra_newlines(strings): +def clean_extra_newlines(strings: List[str]) -> List[str]: + """Cleans extra newlines in the given strings. + + Args: + strings: A list of strings. + + Returns: + A list of strings with extra newlines cleaned. + """ result = [] for string in strings: string = re.sub(r"\n```\n", "CODEBREAK", string) @@ -170,12 +314,28 @@ def clean_extra_newlines(strings): class MarkdownSplitter(TextSplitter): - sub_splitter = RecursiveCharacterTextSplitter.from_language("markdown") - chunk_size = 1024 + """Splits text into chunks based on Markdown formatting. + + Attributes: + sub_splitter: The sub-splitter used to split text into smaller chunks. + chunk_size: The maximum size of each chunk. + """ + + sub_splitter: RecursiveCharacterTextSplitter = ( + RecursiveCharacterTextSplitter.from_language("markdown") + ) + chunk_size: int = 1024 def __init__(self, **kwargs): + """Initializes the MarkdownSplitter instance. + + Args: + **kwargs: Additional keyword arguments. + """ super().__init__(**kwargs) - self.sub_splitter = RecursiveCharacterTextSplitter.from_language("markdown") + self.sub_splitter = RecursiveCharacterTextSplitter.from_language( + "markdown" + ) @classmethod def class_name(cls) -> str: @@ -187,16 +347,16 @@ def _chunk_text(self, text: str) -> Iterable[str]: parser = get_parser("markdown") tree = parser.parse(text.encode("utf-8")) - chunks = split_by_headers(tree) + chunks: List[List[Any]] = split_by_headers(tree) chunks = split_large_chunks_by_code_blocks(chunks, self.chunk_size) chunks = merge_small_chunks(chunks, self.chunk_size) for chunk in chunks: if chunk: chunk_bytes = chunk[0].start_byte, chunk[-1].end_byte chunk_lines = text.encode("utf-8").splitlines()[ - get_line_number(chunk_bytes[0], text.encode("utf-8")) : get_line_number( - chunk_bytes[1], text.encode("utf-8") - ) + get_line_number( + chunk_bytes[0], text.encode("utf-8") + ) : get_line_number(chunk_bytes[1], text.encode("utf-8")) + 1 ] chunk_str = "" @@ -206,7 +366,9 @@ def _chunk_text(self, text: str) -> Iterable[str]: else: chunk_str += line.decode() + "\n" - for split in self.sub_splitter.split_documents([LcDocument(page_content=chunk_str)]): + for split in self.sub_splitter.split_documents( + [LcDocument(page_content=chunk_str)] + ): split_content = split.page_content yield split_content @@ -221,23 +383,56 @@ def split_text(self, text: str) -> List[str]: class CustomCodeSplitter(CodeSplitter): + """Splits text into chunks based on custom code formatting. + + Attributes: + language: The programming language of the code. + max_chars: The maximum number of characters allowed in a chunk. + """ + def split_text(self, text: str) -> List[str]: + """Split text into chunks.""" text_splits = super().split_text(text) text_splits = make_texts_tokenization_safe(text_splits) return text_splits -def convert_lc_to_llama(document: LcDocument): +def convert_lc_to_llama(document: LcDocument) -> LlamaDocument: + """Converts a Langchain document to a Llama document. + + Args: + document: A Langchain document. + + Returns: + A Llama document. + """ return LlamaDocument.from_langchain_format(document) -def load(documents, chunk_size=1024): - md_parser = SimpleNodeParser(text_splitter=MarkdownSplitter(chunk_size=chunk_size)) - code_parser = SimpleNodeParser(text_splitter=CustomCodeSplitter(language="python", max_chars=chunk_size)) +def load(documents: List[LcDocument], chunk_size: int = 1024) -> List[Any]: + """Loads documents and returns a list of nodes. + + Args: + documents: A list of documents. + chunk_size: The size of each chunk. + + Returns: + A list of nodes. + """ + md_parser: SimpleNodeParser = SimpleNodeParser( + text_splitter=MarkdownSplitter(chunk_size=chunk_size) + ) + code_parser: SimpleNodeParser = SimpleNodeParser( + text_splitter=CustomCodeSplitter( + language="python", max_chars=chunk_size + ) + ) - llama_docs = list(map(lambda x: convert_lc_to_llama(x), documents)) + llama_docs: List[LlamaDocument] = list( + map(lambda x: convert_lc_to_llama(x), documents) + ) - nodes = [] + nodes: List[Any] = [] for doc in llama_docs: if doc.metadata["file_type"] == ".py": parser = code_parser diff --git a/src/wandbot/ingestion/report.py b/src/wandbot/ingestion/report.py index 93a1946..122d4cf 100644 --- a/src/wandbot/ingestion/report.py +++ b/src/wandbot/ingestion/report.py @@ -1,3 +1,19 @@ +"""This module contains functions for creating and logging data ingestion reports in the Wandbot system. + +The module includes the following functions: +- `log_raw_counts`: Logs the number of documents for each data source. +- `get_metadata_from_artifacts`: Extracts metadata from raw and vectorstore artifacts. +- `create_ingestion_report`: Creates a data ingestion report. +- `main`: The main function that runs the report creation process. + +Typical usage example: + + project = "wandbot-dev" + entity = "wandbot" + raw_artifact = "wandbot/wandbot-dev/raw_dataset:latest" + vectorstore_artifact = "wandbot/wandbot-dev/vectorstores:latest" + create_ingestion_report(project, entity, raw_artifact, vectorstore_artifact) +""" import json import pathlib from datetime import datetime @@ -6,30 +22,53 @@ import wandb.apis.reports as wr -def log_raw_counts( - metadata: dict, -): - data = {} +def log_raw_counts(metadata: dict[str, dict[str, int]]) -> list[str]: + """Logs the number of documents for each data source. + + Args: + metadata: A dictionary containing metadata about each data source. + + Returns: + A list of data source names. + """ + data: dict[str, int] = {} for source, info in metadata.items(): data[source] = info["num_documents"] wandb.run.log(data) return list(data.keys()) -def get_metadata_from_artifacts(raw_artifact, vectorstore_artifact): +def get_metadata_from_artifacts( + raw_artifact: str, vectorstore_artifact: str +) -> tuple[dict[str, dict[str, int]], dict[str, int]]: + """Extracts metadata from raw and vectorstore artifacts. + + Args: + raw_artifact: The raw artifact to extract metadata from. + vectorstore_artifact: The vectorstore artifact to extract metadata from. + + Returns: + A tuple containing dictionaries of raw and vectorstore metadata. + """ raw_artifact = wandb.run.use_artifact(raw_artifact, type="dataset") raw_artifact_dir = raw_artifact.download() - vectorstore_artifact = wandb.run.use_artifact(vectorstore_artifact, type="storage_context") + vectorstore_artifact = wandb.run.use_artifact( + vectorstore_artifact, type="storage_context" + ) vectorstore_artifact_dir = vectorstore_artifact.download() - raw_metadata_files = list(pathlib.Path(raw_artifact_dir).rglob("metadata.json")) - vectorstore_metadata_files = list(pathlib.Path(vectorstore_artifact_dir).rglob("docstore.json")) + raw_metadata_files = list( + pathlib.Path(raw_artifact_dir).rglob("metadata.json") + ) + vectorstore_metadata_files = list( + pathlib.Path(vectorstore_artifact_dir).rglob("docstore.json") + ) - raw_metadata = {} + raw_metadata: dict[str, dict[str, int]] = {} for metadata_file in raw_metadata_files: with metadata_file.open("r") as f: raw_metadata[metadata_file.parent.name] = json.load(f) - vectorstore_metadata = {} + vectorstore_metadata: dict[str, int] = {} num_nodes = 0 for metadata_file in vectorstore_metadata_files: with metadata_file.open("r") as f: @@ -48,7 +87,15 @@ def create_ingestion_report( entity: str, raw_artifact: str, vectorstore_artifact: str, -): +) -> None: + """Creates a data ingestion report. + + Args: + project: The name of the project. + entity: The name of the entity. + raw_artifact: The raw artifact to include in the report. + vectorstore_artifact: The vectorstore artifact to include in the report. + """ report = wr.Report( project=project, entity=entity, @@ -80,7 +127,9 @@ def create_ingestion_report( wr.UnorderedList(list(raw_metadata.keys())), pg_raw, wr.H1("Raw Datasources Metadata"), - wr.CodeBlock([json.dumps(dict(raw_metadata), indent=2)], language="json"), + wr.CodeBlock( + [json.dumps(dict(raw_metadata), indent=2)], language="json" + ), wr.H1("VectorsStore Artifact Summary"), wr.WeaveBlockArtifact( wandb.run.entity, diff --git a/src/wandbot/ingestion/utils.py b/src/wandbot/ingestion/utils.py index 1e0f532..2d7399b 100644 --- a/src/wandbot/ingestion/utils.py +++ b/src/wandbot/ingestion/utils.py @@ -1,32 +1,74 @@ -import json -import pathlib +"""This module contains utility functions for the Wandbot ingestion system. + +The module includes the following functions: +- `convert_contents_to_soup`: Converts contents to a BeautifulSoup object. +- `clean_soup`: Cleans the BeautifulSoup object. +- `clean_contents`: Cleans the contents. +- `get_git_command`: Get the git command with the given id file. +- `fetch_git_remote_hash`: Fetches the remote hash of the git repository. +- `fetch_repo_metadata`: Fetches the metadata of the git repository. +- `fetch_git_repo`: Fetches the git repository. +- `concatenate_cells`: Combines cells information in a readable format. + +The module also includes the following constants: +- `EXTENSION_MAP`: A dictionary mapping file extensions to programming languages. + +Typical usage example: + + contents = "This is some markdown content." + soup = convert_contents_to_soup(contents) + cleaned_soup = clean_soup(soup) + cleaned_contents = clean_contents(contents) + git_command = get_git_command(id_file) + remote_hash = fetch_git_remote_hash(repo_url, id_file) + repo_metadata = fetch_repo_metadata(repo) + git_repo_metadata = fetch_git_repo(paths, id_file) + cell_info = concatenate_cells(cell, include_outputs, max_output_length, traceback) +""" + import re import subprocess -from typing import Dict, List +from pathlib import Path +from typing import Any, Dict, Optional import giturlparse import markdown import markdownify -import pandas as pd from bs4 import BeautifulSoup, Comment from git import Repo -from langchain.document_loaders import NotebookLoader -from langchain.document_loaders.notebook import remove_newlines -from langchain.schema import Document from wandbot.utils import get_logger logger = get_logger(__name__) -def get_git_command(id_file): +def get_git_command(id_file: Path) -> str: + """Get the git command with the given id file. + + Args: + id_file: The path to the id file. + + Returns: + The git command with the id file. + """ assert id_file.is_file() git_command = f"ssh -v -i /{id_file}" return git_command -def fetch_git_remote_hash(repo_url=None, id_file=None): +def fetch_git_remote_hash( + repo_url: Optional[str] = None, id_file: Optional[Path] = None +) -> Optional[str]: + """Fetch the remote hash of the git repository. + + Args: + repo_url: The URL of the git repository. + id_file: The path to the id file. + + Returns: + The remote hash of the git repository. + """ if repo_url is None: logger.warning(f"No repo url was supplied. Not returning a repo hash") return None @@ -47,36 +89,74 @@ def fetch_git_remote_hash(repo_url=None, id_file=None): def fetch_repo_metadata(repo: "Repo") -> Dict[str, str]: + """Fetch the metadata of the git repository. + + Args: + repo: The git repository. + + Returns: + The metadata of the git repository. + """ head_commit = repo.head.commit return dict( commit_summary=head_commit.summary, commit_message=head_commit.message, commit_author=str(head_commit.author), - commit_time=head_commit.committed_datetime.strftime("%Y-%m-%d %H:%M:%S"), + commit_time=head_commit.committed_datetime.strftime( + "%Y-%m-%d %H:%M:%S" + ), commit_hash=head_commit.hexsha, commit_stats=head_commit.stats.total, ) -def fetch_git_repo(paths, id_file) -> Dict[str, str]: +def fetch_git_repo(paths: Any, id_file: Path) -> Dict[str, str]: + """Fetch the git repository. + + Args: + paths: The paths of the git repository. + id_file: The path to the id file. + + Returns: + The metadata of the git repository. + """ git_command = get_git_command(id_file) if paths.local_path.is_dir(): repo = Repo(paths.local_path) - logger.debug(f"Repo {paths.local_path} already exists... Pulling changes from {repo.remotes.origin.url}") + logger.debug( + f"Repo {paths.local_path} already exists... Pulling changes from {repo.remotes.origin.url}" + ) with repo.git.custom_environment(GIT_SSH_COMMAND=git_command): repo.remotes.origin.pull() else: remote_url = giturlparse.parse(f"{paths.repo_path}").urls.get("ssh") logger.debug(f"Cloning {remote_url} to {paths.local_path}") - repo = Repo.clone_from(remote_url, paths.local_path, env=dict(GIT_SSH_COMMAND=git_command)) + repo = Repo.clone_from( + remote_url, paths.local_path, env=dict(GIT_SSH_COMMAND=git_command) + ) return fetch_repo_metadata(repo) -def concatenate_cells(cell: dict, include_outputs: bool, max_output_length: int, traceback: bool) -> str: - """Combine cells information in a readable format ready to be used.""" +def concatenate_cells( + cell: Dict[str, Any], + include_outputs: bool, + max_output_length: int, + traceback: bool, +) -> str: + """Combine cells information in a readable format ready to be used. + + Args: + cell: The cell dictionary. + include_outputs: Whether to include outputs. + max_output_length: The maximum length of the output. + traceback: Whether to include traceback. + + Returns: + The combined cell information. + """ cell_type = cell["cell_type"] source = cell["source"] output = cell["outputs"] @@ -100,7 +180,10 @@ def concatenate_cells(cell: dict, include_outputs: bool, max_output_length: int, elif output[0]["output_type"] == "stream": output = output[0]["text"] min_output = min(max_output_length, len(output)) - return f"'{cell_type}' cell: '{source}'\n with " f"output: '{output[:min_output]}'\n\n" + return ( + f"'{cell_type}' cell: '{source}'\n with " + f"output: '{output[:min_output]}'\n\n" + ) else: if cell_type == "markdown": source = re.sub(r"!\[.*?\]\((.*?)\)", "", f"{source}").strip() @@ -112,34 +195,7 @@ def concatenate_cells(cell: dict, include_outputs: bool, max_output_length: int, return "" -class WandbNotebookLoader(NotebookLoader): - """Loader that loads .ipynb notebook files in wandb examples.""" - - def load( - self, - ) -> List[Document]: - """Load documents.""" - p = pathlib.Path(self.file_path) - - with open(p, encoding="utf8") as f: - d = json.load(f) - - data = pd.json_normalize(d["cells"]) - filtered_data = data[["cell_type", "source", "outputs"]] - if self.remove_newline: - filtered_data = filtered_data.applymap(remove_newlines) - - text = filtered_data.apply( - lambda x: concatenate_cells(x, self.include_outputs, self.max_output_length, self.traceback), - axis=1, - ).str.cat(sep=" ") - - metadata = {"source": str(p)} - - return [Document(page_content=text, metadata=metadata)] - - -EXTENSION_MAP = { +EXTENSION_MAP: Dict[str, str] = { ".py": "python", ".ipynb": "python", ".md": "markdown", @@ -148,7 +204,15 @@ def load( } -def convert_contents_to_soup(contents): +def convert_contents_to_soup(contents: str) -> BeautifulSoup: + """Converts contents to BeautifulSoup object. + + Args: + contents: The contents to convert. + + Returns: + The BeautifulSoup object. + """ markdown_document = markdown.markdown( contents, extensions=[ @@ -174,7 +238,15 @@ def convert_contents_to_soup(contents): return soup -def clean_soup(soup): +def clean_soup(soup: BeautifulSoup) -> BeautifulSoup: + """Cleans the BeautifulSoup object. + + Args: + soup: The BeautifulSoup object to clean. + + Returns: + The cleaned BeautifulSoup object. + """ for img_tag in soup.find_all("img", src=True): img_tag.extract() comments = soup.find_all(string=lambda text: isinstance(text, Comment)) @@ -186,10 +258,20 @@ def clean_soup(soup): return soup -def clean_contents(contents): +def clean_contents(contents: str) -> str: + """Cleans the contents. + + Args: + contents: The contents to clean. + + Returns: + The cleaned contents. + """ soup = convert_contents_to_soup(contents) soup = clean_soup(soup) - cleaned_document = markdownify.MarkdownConverter(heading_style="ATX").convert_soup(soup) + cleaned_document = markdownify.MarkdownConverter( + heading_style="ATX" + ).convert_soup(soup) cleaned_document = cleaned_document.replace("![]()", "\n") cleaned_document = re.sub(r"\[([^]]+)\]\([^)]+\)", r"\1", cleaned_document) diff --git a/src/wandbot/ingestion/vectorstores.py b/src/wandbot/ingestion/vectorstores.py index 04eb040..6f4f6a0 100644 --- a/src/wandbot/ingestion/vectorstores.py +++ b/src/wandbot/ingestion/vectorstores.py @@ -1,3 +1,17 @@ +"""This module contains functions for loading and managing vector stores in the Wandbot ingestion system. + +The module includes the following functions: +- `load`: Loads the vector store from the specified source artifact path and returns the name of the resulting artifact. + +Typical usage example: + + project = "wandbot-dev" + entity = "wandbot" + source_artifact_path = "wandbot/wandbot-dev/raw_dataset:latest" + result_artifact_name = "wandbot_index" + load(project, entity, source_artifact_path, result_artifact_name) +""" + import json import pathlib @@ -7,7 +21,12 @@ from wandbot.ingestion import preprocess_data from wandbot.ingestion.config import VectorStoreConfig -from wandbot.utils import get_logger, load_index, load_service_context, load_storage_context +from wandbot.utils import ( + get_logger, + load_index, + load_service_context, + load_storage_context, +) logger = get_logger(__name__) @@ -17,12 +36,34 @@ def load( entity: str, source_artifact_path: str, result_artifact_name: str = "wandbot_index", -): - config = VectorStoreConfig() - run = wandb.init(project=project, entity=entity, job_type="create_vectorstore") - artifact = run.use_artifact(source_artifact_path, type="dataset") - artifact_dir = artifact.download() - storage_context = load_storage_context(config.embedding_dim, config.persist_dir) +) -> str: + """Load the vector store. + + Loads the vector store from the specified source artifact path and returns the name of the resulting artifact. + + Args: + project: The name of the project. + entity: The name of the entity. + source_artifact_path: The path to the source artifact. + result_artifact_name: The name of the resulting artifact. Defaults to "wandbot_index". + + Returns: + The name of the resulting artifact. + + Raises: + wandb.Error: An error occurred during the loading process. + """ + config: VectorStoreConfig = VectorStoreConfig() + run: wandb.Run = wandb.init( + project=project, entity=entity, job_type="create_vectorstore" + ) + artifact: wandb.Artifact = run.use_artifact( + source_artifact_path, type="dataset" + ) + artifact_dir: str = artifact.download() + storage_context = load_storage_context( + config.embedding_dim, config.persist_dir + ) service_context = load_service_context( config.chat_model_name, config.temperature, @@ -30,15 +71,17 @@ def load( config.max_retries, ) - document_files = list(pathlib.Path(artifact_dir).rglob("documents.jsonl")) + document_files: List[pathlib.Path] = list( + pathlib.Path(artifact_dir).rglob("documents.jsonl") + ) - transformed_documents = [] + transformed_documents: List[LcDocument] = [] for document_file in document_files: - documents = [] + documents: List[LcDocument] = [] with document_file.open() as f: for line in f: - doc_dict = json.loads(line) - doc = LcDocument(**doc_dict) + doc_dict: Dict[str, Any] = json.loads(line) + doc: LcDocument = LcDocument(**doc_dict) documents.append(doc) transformed_documents.extend(preprocess_data.load(documents)) @@ -48,7 +91,7 @@ def load( storage_context, persist_dir=config.persist_dir, ) - wandb_callback = WandbCallbackHandler() + wandb_callback: WandbCallbackHandler = WandbCallbackHandler() wandb_callback.persist_index(index, index_name=result_artifact_name) wandb_callback.finish() diff --git a/src/wandbot/utils.py b/src/wandbot/utils.py index bee5088..8e8a4a8 100644 --- a/src/wandbot/utils.py +++ b/src/wandbot/utils.py @@ -1,3 +1,25 @@ +"""This module contains utility functions and classes for the Wandbot system. + +The module includes the following functions: +- `get_logger`: Creates and returns a logger with the specified name. +- `load_embeddings`: Loads embeddings from cache or creates new ones if not found. +- `load_llm`: Loads a language model with the specified parameters. +- `load_service_context`: Loads a service context with the specified parameters. +- `load_storage_context`: Loads a storage context with the specified parameters. +- `load_index`: Loads an index from storage or creates a new one if not found. + +The module also includes the following classes: +- `Timer`: A simple timer class for measuring elapsed time. + +Typical usage example: + + logger = get_logger("my_logger") + embeddings = load_embeddings("/path/to/cache") + llm = load_llm("gpt-3", 0.5, 3) + service_context = load_service_context(llm, 0.5, "/path/to/cache", 3) + storage_context = load_storage_context(768, "/path/to/persist") + index = load_index(nodes, service_context, storage_context, "/path/to/persist") +""" import datetime import logging import os @@ -6,12 +28,25 @@ import faiss from langchain.embeddings import CacheBackedEmbeddings, OpenAIEmbeddings from langchain.storage import LocalFileStore -from llama_index import ServiceContext, StorageContext, VectorStoreIndex, load_index_from_storage +from llama_index import ( + ServiceContext, + StorageContext, + VectorStoreIndex, + load_index_from_storage, +) from llama_index.llms import OpenAI from llama_index.vector_stores import FaissVectorStore -def get_logger(name): +def get_logger(name: str) -> logging.Logger: + """Creates and returns a logger with the specified name. + + Args: + name: The name of the logger. + + Returns: + A logger instance with the specified name. + """ logging.basicConfig( format="%(asctime)s : %(levelname)s : %(message)s", level=logging.getLevelName(os.environ.get("LOG_LEVEL", "INFO")), @@ -21,22 +56,36 @@ def get_logger(name): class Timer: + """A simple timer class for measuring elapsed time.""" + def __init__(self) -> None: + """Initializes the timer.""" self.start = datetime.datetime.utcnow() self.stop = self.start def __enter__(self) -> "Timer": + """Starts the timer.""" return self def __exit__(self, *args: Any) -> None: + """Stops the timer.""" self.stop = datetime.datetime.utcnow() @property def elapsed(self) -> float: + """Calculates the elapsed time in seconds.""" return (self.stop - self.start).total_seconds() -def load_embeddings(cache_dir): +def load_embeddings(cache_dir: str) -> CacheBackedEmbeddings: + """Loads embeddings from cache or creates new ones if not found. + + Args: + cache_dir: The directory where the embeddings cache is stored. + + Returns: + A cached embedder instance. + """ underlying_embeddings = OpenAIEmbeddings() embeddings_cache_fs = LocalFileStore(cache_dir) @@ -48,18 +97,64 @@ def load_embeddings(cache_dir): return cached_embedder -def load_llm(model_name, temperature, max_retries): - llm = OpenAI(model=model_name, temperature=temperature, streaming=True, max_retries=max_retries) +def load_llm(model_name: str, temperature: float, max_retries: int) -> OpenAI: + """Loads a language model with the specified parameters. + + Args: + model_name: The name of the model to load. + temperature: The temperature parameter for the model. + max_retries: The maximum number of retries for loading the model. + + Returns: + An instance of the loaded language model. + """ + llm = OpenAI( + model=model_name, + temperature=temperature, + streaming=True, + max_retries=max_retries, + ) return llm -def load_service_context(llm, temperature, embeddings_cache, max_retries, callback_manager=None): +def load_service_context( + llm: OpenAI, + temperature: float, + embeddings_cache: str, + max_retries: int, + callback_manager: Optional[Any] = None, +) -> ServiceContext: + """Loads a service context with the specified parameters. + + Args: + llm: The language model to load. + temperature: The temperature parameter for the model. + embeddings_cache: The directory where the embeddings cache is stored. + max_retries: The maximum number of retries for loading the model. + callback_manager: The callback manager for the service context (optional). + + Returns: + A service context instance with the specified parameters. + """ llm = load_llm(llm, temperature, max_retries=max_retries) embed_model = load_embeddings(embeddings_cache) - return ServiceContext.from_defaults(llm=llm, embed_model=embed_model, callback_manager=callback_manager) + return ServiceContext.from_defaults( + llm=llm, embed_model=embed_model, callback_manager=callback_manager + ) + +def load_storage_context( + embed_dimensions: int, persist_dir: str +) -> StorageContext: + """Loads a storage context with the specified parameters. -def load_storage_context(embed_dimensions, persist_dir): + Args: + embed_dimensions: The dimensions of the embeddings. + persist_dir: The directory where the storage context is persisted. + + Returns: + A storage context instance with the specified parameters. + """ if os.path.isdir(persist_dir): storage_context = StorageContext.from_defaults( vector_store=FaissVectorStore.from_persist_dir(persist_dir), @@ -73,7 +168,23 @@ def load_storage_context(embed_dimensions, persist_dir): return storage_context -def load_index(nodes, service_context, storage_context, persist_dir): +def load_index( + nodes: Any, + service_context: ServiceContext, + storage_context: StorageContext, + persist_dir: str, +) -> VectorStoreIndex: + """Loads an index from storage or creates a new one if not found. + + Args: + nodes: The nodes to include in the index. + service_context: The service context for the index. + storage_context: The storage context for the index. + persist_dir: The directory where the index is persisted. + + Returns: + An index instance with the specified parameters. + """ try: index = load_index_from_storage(storage_context) except Exception: From ea1edf3311d8f9bba656ca40702d294d5bb786c1 Mon Sep 17 00:00:00 2001 From: Bharat Ramanathan Date: Thu, 5 Oct 2023 16:06:46 +0530 Subject: [PATCH 2/7] fix: add missing imports and fix crlf --- src/wandbot/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/wandbot/utils.py b/src/wandbot/utils.py index 8e8a4a8..ebbfa0b 100644 --- a/src/wandbot/utils.py +++ b/src/wandbot/utils.py @@ -23,7 +23,7 @@ import datetime import logging import os -from typing import Any +from typing import Any, Optional import faiss from langchain.embeddings import CacheBackedEmbeddings, OpenAIEmbeddings From 1ddc741ede74871dc0e70c78f4c7511aa3caac09 Mon Sep 17 00:00:00 2001 From: Bharat Ramanathan Date: Thu, 26 Oct 2023 12:00:15 +0530 Subject: [PATCH 3/7] fix: add missing ports for the deployment --- .replit | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.replit b/.replit index 0bbafd4..2f58dfd 100644 --- a/.replit +++ b/.replit @@ -13,4 +13,7 @@ channel = "stable-23_05" run = ["sh", "-c", "bash run.sh"] build = ["sh", "-c", "bash build.sh"] deploymentTarget = "gce" -ignorePorts = true + +[[ports]] +localPort=8000 +externalPort=80 From 6c658e517622f627bba5dfd54cd356190995bbc2 Mon Sep 17 00:00:00 2001 From: Bharat Ramanathan Date: Thu, 26 Oct 2023 12:08:13 +0530 Subject: [PATCH 4/7] fix: change pydantic schema serialization --- src/wandbot/database/client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/wandbot/database/client.py b/src/wandbot/database/client.py index 1f9489a..c35434b 100644 --- a/src/wandbot/database/client.py +++ b/src/wandbot/database/client.py @@ -116,6 +116,7 @@ def get_all_question_answers(self, time=None) -> List[dict[str, Any]] | None: question_answers = question_answers.all() if question_answers is not None: question_answers = [ - QuestionAnswerCreateSchema.from_orm(question_answer).dict() for question_answer in question_answers + QuestionAnswerCreateSchema.from_orm(question_answer).model_dump() + for question_answer in question_answers ] return question_answers From d900778e6d4f0d36983dd379395ac6df7a8d89c4 Mon Sep 17 00:00:00 2001 From: Bharat Ramanathan Date: Thu, 26 Oct 2023 12:14:19 +0530 Subject: [PATCH 5/7] chore: update requirements and env --- poetry.lock | 1057 +++++++++++++++++++++++++++--------------------- pyproject.toml | 16 +- 2 files changed, 610 insertions(+), 463 deletions(-) diff --git a/poetry.lock b/poetry.lock index c242559..1c1e553 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,98 +2,98 @@ [[package]] name = "aiohttp" -version = "3.8.5" +version = "3.8.6" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.6" files = [ - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"}, - {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"}, - {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"}, - {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"}, - {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"}, - {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"}, - {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"}, - {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"}, - {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"}, - {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"}, - {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:41d55fc043954cddbbd82503d9cc3f4814a40bcef30b3569bc7b5e34130718c1"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1d84166673694841d8953f0a8d0c90e1087739d24632fe86b1a08819168b4566"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:253bf92b744b3170eb4c4ca2fa58f9c4b87aeb1df42f71d4e78815e6e8b73c9e"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fd194939b1f764d6bb05490987bfe104287bbf51b8d862261ccf66f48fb4096"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c5f938d199a6fdbdc10bbb9447496561c3a9a565b43be564648d81e1102ac22"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2817b2f66ca82ee699acd90e05c95e79bbf1dc986abb62b61ec8aaf851e81c93"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fa375b3d34e71ccccf172cab401cd94a72de7a8cc01847a7b3386204093bb47"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9de50a199b7710fa2904be5a4a9b51af587ab24c8e540a7243ab737b45844543"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e1d8cb0b56b3587c5c01de3bf2f600f186da7e7b5f7353d1bf26a8ddca57f965"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8e31e9db1bee8b4f407b77fd2507337a0a80665ad7b6c749d08df595d88f1cf5"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7bc88fc494b1f0311d67f29fee6fd636606f4697e8cc793a2d912ac5b19aa38d"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ec00c3305788e04bf6d29d42e504560e159ccaf0be30c09203b468a6c1ccd3b2"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad1407db8f2f49329729564f71685557157bfa42b48f4b93e53721a16eb813ed"}, + {file = "aiohttp-3.8.6-cp310-cp310-win32.whl", hash = "sha256:ccc360e87341ad47c777f5723f68adbb52b37ab450c8bc3ca9ca1f3e849e5fe2"}, + {file = "aiohttp-3.8.6-cp310-cp310-win_amd64.whl", hash = "sha256:93c15c8e48e5e7b89d5cb4613479d144fda8344e2d886cf694fd36db4cc86865"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e2f9cc8e5328f829f6e1fb74a0a3a939b14e67e80832975e01929e320386b34"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e6a00ffcc173e765e200ceefb06399ba09c06db97f401f920513a10c803604ca"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:41bdc2ba359032e36c0e9de5a3bd00d6fb7ea558a6ce6b70acedf0da86458321"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14cd52ccf40006c7a6cd34a0f8663734e5363fd981807173faf3a017e202fec9"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d5b785c792802e7b275c420d84f3397668e9d49ab1cb52bd916b3b3ffcf09ad"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1bed815f3dc3d915c5c1e556c397c8667826fbc1b935d95b0ad680787896a358"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96603a562b546632441926cd1293cfcb5b69f0b4159e6077f7c7dbdfb686af4d"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d76e8b13161a202d14c9584590c4df4d068c9567c99506497bdd67eaedf36403"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e3f1e3f1a1751bb62b4a1b7f4e435afcdade6c17a4fd9b9d43607cebd242924a"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76b36b3124f0223903609944a3c8bf28a599b2cc0ce0be60b45211c8e9be97f8"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a2ece4af1f3c967a4390c284797ab595a9f1bc1130ef8b01828915a05a6ae684"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:16d330b3b9db87c3883e565340d292638a878236418b23cc8b9b11a054aaa887"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42c89579f82e49db436b69c938ab3e1559e5a4409eb8639eb4143989bc390f2f"}, + {file = "aiohttp-3.8.6-cp311-cp311-win32.whl", hash = "sha256:efd2fcf7e7b9d7ab16e6b7d54205beded0a9c8566cb30f09c1abe42b4e22bdcb"}, + {file = "aiohttp-3.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b2ab182fc28e7a81f6c70bfbd829045d9480063f5ab06f6e601a3eddbbd49a0"}, + {file = "aiohttp-3.8.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fdee8405931b0615220e5ddf8cd7edd8592c606a8e4ca2a00704883c396e4479"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d25036d161c4fe2225d1abff2bd52c34ed0b1099f02c208cd34d8c05729882f0"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d791245a894be071d5ab04bbb4850534261a7d4fd363b094a7b9963e8cdbd31"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0cccd1de239afa866e4ce5c789b3032442f19c261c7d8a01183fd956b1935349"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f13f60d78224f0dace220d8ab4ef1dbc37115eeeab8c06804fec11bec2bbd07"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a9b5a0606faca4f6cc0d338359d6fa137104c337f489cd135bb7fbdbccb1e39"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:13da35c9ceb847732bf5c6c5781dcf4780e14392e5d3b3c689f6d22f8e15ae31"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:4d4cbe4ffa9d05f46a28252efc5941e0462792930caa370a6efaf491f412bc66"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:229852e147f44da0241954fc6cb910ba074e597f06789c867cb7fb0621e0ba7a"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:713103a8bdde61d13490adf47171a1039fd880113981e55401a0f7b42c37d071"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:45ad816b2c8e3b60b510f30dbd37fe74fd4a772248a52bb021f6fd65dff809b6"}, + {file = "aiohttp-3.8.6-cp36-cp36m-win32.whl", hash = "sha256:2b8d4e166e600dcfbff51919c7a3789ff6ca8b3ecce16e1d9c96d95dd569eb4c"}, + {file = "aiohttp-3.8.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0912ed87fee967940aacc5306d3aa8ba3a459fcd12add0b407081fbefc931e53"}, + {file = "aiohttp-3.8.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2a988a0c673c2e12084f5e6ba3392d76c75ddb8ebc6c7e9ead68248101cd446"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf3fd9f141700b510d4b190094db0ce37ac6361a6806c153c161dc6c041ccda"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3161ce82ab85acd267c8f4b14aa226047a6bee1e4e6adb74b798bd42c6ae1f80"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95fc1bf33a9a81469aa760617b5971331cdd74370d1214f0b3109272c0e1e3c"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c43ecfef7deaf0617cee936836518e7424ee12cb709883f2c9a1adda63cc460"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca80e1b90a05a4f476547f904992ae81eda5c2c85c66ee4195bb8f9c5fb47f28"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:90c72ebb7cb3a08a7f40061079817133f502a160561d0675b0a6adf231382c92"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bb54c54510e47a8c7c8e63454a6acc817519337b2b78606c4e840871a3e15349"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:de6a1c9f6803b90e20869e6b99c2c18cef5cc691363954c93cb9adeb26d9f3ae"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:a3628b6c7b880b181a3ae0a0683698513874df63783fd89de99b7b7539e3e8a8"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fc37e9aef10a696a5a4474802930079ccfc14d9f9c10b4662169671ff034b7df"}, + {file = "aiohttp-3.8.6-cp37-cp37m-win32.whl", hash = "sha256:f8ef51e459eb2ad8e7a66c1d6440c808485840ad55ecc3cafefadea47d1b1ba2"}, + {file = "aiohttp-3.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:b2fe42e523be344124c6c8ef32a011444e869dc5f883c591ed87f84339de5976"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e2ee0ac5a1f5c7dd3197de309adfb99ac4617ff02b0603fd1e65b07dc772e4b"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01770d8c04bd8db568abb636c1fdd4f7140b284b8b3e0b4584f070180c1e5c62"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c68330a59506254b556b99a91857428cab98b2f84061260a67865f7f52899f5"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89341b2c19fb5eac30c341133ae2cc3544d40d9b1892749cdd25892bbc6ac951"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71783b0b6455ac8f34b5ec99d83e686892c50498d5d00b8e56d47f41b38fbe04"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f628dbf3c91e12f4d6c8b3f092069567d8eb17814aebba3d7d60c149391aee3a"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04691bc6601ef47c88f0255043df6f570ada1a9ebef99c34bd0b72866c217ae"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee912f7e78287516df155f69da575a0ba33b02dd7c1d6614dbc9463f43066e3"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9c19b26acdd08dd239e0d3669a3dddafd600902e37881f13fbd8a53943079dbc"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:99c5ac4ad492b4a19fc132306cd57075c28446ec2ed970973bbf036bcda1bcc6"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f0f03211fd14a6a0aed2997d4b1c013d49fb7b50eeb9ffdf5e51f23cfe2c77fa"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:8d399dade330c53b4106160f75f55407e9ae7505263ea86f2ccca6bfcbdb4921"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ec4fd86658c6a8964d75426517dc01cbf840bbf32d055ce64a9e63a40fd7b771"}, + {file = "aiohttp-3.8.6-cp38-cp38-win32.whl", hash = "sha256:33164093be11fcef3ce2571a0dccd9041c9a93fa3bde86569d7b03120d276c6f"}, + {file = "aiohttp-3.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:bdf70bfe5a1414ba9afb9d49f0c912dc524cf60141102f3a11143ba3d291870f"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d52d5dc7c6682b720280f9d9db41d36ebe4791622c842e258c9206232251ab2b"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ac39027011414dbd3d87f7edb31680e1f430834c8cef029f11c66dad0670aa5"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3f5c7ce535a1d2429a634310e308fb7d718905487257060e5d4598e29dc17f0b"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b30e963f9e0d52c28f284d554a9469af073030030cef8693106d918b2ca92f54"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:918810ef188f84152af6b938254911055a72e0f935b5fbc4c1a4ed0b0584aed1"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:002f23e6ea8d3dd8d149e569fd580c999232b5fbc601c48d55398fbc2e582e8c"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fcf3eabd3fd1a5e6092d1242295fa37d0354b2eb2077e6eb670accad78e40e1"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:255ba9d6d5ff1a382bb9a578cd563605aa69bec845680e21c44afc2670607a95"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d67f8baed00870aa390ea2590798766256f31dc5ed3ecc737debb6e97e2ede78"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:86f20cee0f0a317c76573b627b954c412ea766d6ada1a9fcf1b805763ae7feeb"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:39a312d0e991690ccc1a61f1e9e42daa519dcc34ad03eb6f826d94c1190190dd"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e827d48cf802de06d9c935088c2924e3c7e7533377d66b6f31ed175c1620e05e"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd111d7fc5591ddf377a408ed9067045259ff2770f37e2d94e6478d0f3fc0c17"}, + {file = "aiohttp-3.8.6-cp39-cp39-win32.whl", hash = "sha256:caf486ac1e689dda3502567eb89ffe02876546599bbf915ec94b1fa424eeffd4"}, + {file = "aiohttp-3.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:3f0e27e5b733803333bb2371249f41cf42bae8884863e8e8965ec69bebe53132"}, + {file = "aiohttp-3.8.6.tar.gz", hash = "sha256:b0cf2a4501bff9330a8a5248b4ce951851e415bdcce9dc158e76cfd55e15085c"}, ] [package.dependencies] @@ -124,13 +124,13 @@ frozenlist = ">=1.1.0" [[package]] name = "annotated-types" -version = "0.5.0" +version = "0.6.0" description = "Reusable constraint types to use with typing.Annotated" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"}, - {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"}, + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, ] [[package]] @@ -247,101 +247,101 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.3.0" +version = "3.3.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, - {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, + {file = "charset-normalizer-3.3.1.tar.gz", hash = "sha256:d9137a876020661972ca6eec0766d81aef8a5627df628b664b234b73396e727e"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8aee051c89e13565c6bd366813c386939f8e928af93c29fda4af86d25b73d8f8"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:352a88c3df0d1fa886562384b86f9a9e27563d4704ee0e9d56ec6fcd270ea690"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:223b4d54561c01048f657fa6ce41461d5ad8ff128b9678cfe8b2ecd951e3f8a2"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f861d94c2a450b974b86093c6c027888627b8082f1299dfd5a4bae8e2292821"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1171ef1fc5ab4693c5d151ae0fdad7f7349920eabbaca6271f95969fa0756c2d"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28f512b9a33235545fbbdac6a330a510b63be278a50071a336afc1b78781b147"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0e842112fe3f1a4ffcf64b06dc4c61a88441c2f02f373367f7b4c1aa9be2ad5"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f9bc2ce123637a60ebe819f9fccc614da1bcc05798bbbaf2dd4ec91f3e08846"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f194cce575e59ffe442c10a360182a986535fd90b57f7debfaa5c845c409ecc3"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9a74041ba0bfa9bc9b9bb2cd3238a6ab3b7618e759b41bd15b5f6ad958d17605"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b578cbe580e3b41ad17b1c428f382c814b32a6ce90f2d8e39e2e635d49e498d1"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6db3cfb9b4fcecb4390db154e75b49578c87a3b9979b40cdf90d7e4b945656e1"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:debb633f3f7856f95ad957d9b9c781f8e2c6303ef21724ec94bea2ce2fcbd056"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-win32.whl", hash = "sha256:87071618d3d8ec8b186d53cb6e66955ef2a0e4fa63ccd3709c0c90ac5a43520f"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:e372d7dfd154009142631de2d316adad3cc1c36c32a38b16a4751ba78da2a397"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae4070f741f8d809075ef697877fd350ecf0b7c5837ed68738607ee0a2c572cf"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58e875eb7016fd014c0eea46c6fa92b87b62c0cb31b9feae25cbbe62c919f54d"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dbd95e300367aa0827496fe75a1766d198d34385a58f97683fe6e07f89ca3e3c"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de0b4caa1c8a21394e8ce971997614a17648f94e1cd0640fbd6b4d14cab13a72"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:985c7965f62f6f32bf432e2681173db41336a9c2611693247069288bcb0c7f8b"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a15c1fe6d26e83fd2e5972425a772cca158eae58b05d4a25a4e474c221053e2d"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae55d592b02c4349525b6ed8f74c692509e5adffa842e582c0f861751701a673"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be4d9c2770044a59715eb57c1144dedea7c5d5ae80c68fb9959515037cde2008"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:851cf693fb3aaef71031237cd68699dded198657ec1e76a76eb8be58c03a5d1f"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:31bbaba7218904d2eabecf4feec0d07469284e952a27400f23b6628439439fa7"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:871d045d6ccc181fd863a3cd66ee8e395523ebfbc57f85f91f035f50cee8e3d4"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:501adc5eb6cd5f40a6f77fbd90e5ab915c8fd6e8c614af2db5561e16c600d6f3"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f5fb672c396d826ca16a022ac04c9dce74e00a1c344f6ad1a0fdc1ba1f332213"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-win32.whl", hash = "sha256:bb06098d019766ca16fc915ecaa455c1f1cd594204e7f840cd6258237b5079a8"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:8af5a8917b8af42295e86b64903156b4f110a30dca5f3b5aedea123fbd638bff"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ae8e5142dcc7a49168f4055255dbcced01dc1714a90a21f87448dc8d90617d1"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5b70bab78accbc672f50e878a5b73ca692f45f5b5e25c8066d748c09405e6a55"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ceca5876032362ae73b83347be8b5dbd2d1faf3358deb38c9c88776779b2e2f"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34d95638ff3613849f473afc33f65c401a89f3b9528d0d213c7037c398a51296"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9edbe6a5bf8b56a4a84533ba2b2f489d0046e755c29616ef8830f9e7d9cf5728"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6a02a3c7950cafaadcd46a226ad9e12fc9744652cc69f9e5534f98b47f3bbcf"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10b8dd31e10f32410751b3430996f9807fc4d1587ca69772e2aa940a82ab571a"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edc0202099ea1d82844316604e17d2b175044f9bcb6b398aab781eba957224bd"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b891a2f68e09c5ef989007fac11476ed33c5c9994449a4e2c3386529d703dc8b"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:71ef3b9be10070360f289aea4838c784f8b851be3ba58cf796262b57775c2f14"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:55602981b2dbf8184c098bc10287e8c245e351cd4fdcad050bd7199d5a8bf514"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:46fb9970aa5eeca547d7aa0de5d4b124a288b42eaefac677bde805013c95725c"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:520b7a142d2524f999447b3a0cf95115df81c4f33003c51a6ab637cbda9d0bf4"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-win32.whl", hash = "sha256:8ec8ef42c6cd5856a7613dcd1eaf21e5573b2185263d87d27c8edcae33b62a61"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:baec8148d6b8bd5cee1ae138ba658c71f5b03e0d69d5907703e3e1df96db5e41"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63a6f59e2d01310f754c270e4a257426fe5a591dc487f1983b3bbe793cf6bac6"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d6bfc32a68bc0933819cfdfe45f9abc3cae3877e1d90aac7259d57e6e0f85b1"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f3100d86dcd03c03f7e9c3fdb23d92e32abbca07e7c13ebd7ddfbcb06f5991f"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39b70a6f88eebe239fa775190796d55a33cfb6d36b9ffdd37843f7c4c1b5dc67"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e12f8ee80aa35e746230a2af83e81bd6b52daa92a8afaef4fea4a2ce9b9f4fa"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b6cefa579e1237ce198619b76eaa148b71894fb0d6bcf9024460f9bf30fd228"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:61f1e3fb621f5420523abb71f5771a204b33c21d31e7d9d86881b2cffe92c47c"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4f6e2a839f83a6a76854d12dbebde50e4b1afa63e27761549d006fa53e9aa80e"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:1ec937546cad86d0dce5396748bf392bb7b62a9eeb8c66efac60e947697f0e58"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:82ca51ff0fc5b641a2d4e1cc8c5ff108699b7a56d7f3ad6f6da9dbb6f0145b48"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:633968254f8d421e70f91c6ebe71ed0ab140220469cf87a9857e21c16687c034"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-win32.whl", hash = "sha256:c0c72d34e7de5604df0fde3644cc079feee5e55464967d10b24b1de268deceb9"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:63accd11149c0f9a99e3bc095bbdb5a464862d77a7e309ad5938fbc8721235ae"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5a3580a4fdc4ac05f9e53c57f965e3594b2f99796231380adb2baaab96e22761"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2465aa50c9299d615d757c1c888bc6fef384b7c4aec81c05a0172b4400f98557"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb7cd68814308aade9d0c93c5bd2ade9f9441666f8ba5aa9c2d4b389cb5e2a45"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e43805ccafa0a91831f9cd5443aa34528c0c3f2cc48c4cb3d9a7721053874b"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:854cc74367180beb327ab9d00f964f6d91da06450b0855cbbb09187bcdb02de5"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c15070ebf11b8b7fd1bfff7217e9324963c82dbdf6182ff7050519e350e7ad9f"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4c99f98fc3a1835af8179dcc9013f93594d0670e2fa80c83aa36346ee763d2"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fb765362688821404ad6cf86772fc54993ec11577cd5a92ac44b4c2ba52155b"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dced27917823df984fe0c80a5c4ad75cf58df0fbfae890bc08004cd3888922a2"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a66bcdf19c1a523e41b8e9d53d0cedbfbac2e93c649a2e9502cb26c014d0980c"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ecd26be9f112c4f96718290c10f4caea6cc798459a3a76636b817a0ed7874e42"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3f70fd716855cd3b855316b226a1ac8bdb3caf4f7ea96edcccc6f484217c9597"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:17a866d61259c7de1bdadef418a37755050ddb4b922df8b356503234fff7932c"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-win32.whl", hash = "sha256:548eefad783ed787b38cb6f9a574bd8664468cc76d1538215d510a3cd41406cb"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:45f053a0ece92c734d874861ffe6e3cc92150e32136dd59ab1fb070575189c97"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bc791ec3fd0c4309a753f95bb6c749ef0d8ea3aea91f07ee1cf06b7b02118f2f"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c8c61fb505c7dad1d251c284e712d4e0372cef3b067f7ddf82a7fa82e1e9a93"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2c092be3885a1b7899cd85ce24acedc1034199d6fca1483fa2c3a35c86e43041"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2000c54c395d9e5e44c99dc7c20a64dc371f777faf8bae4919ad3e99ce5253e"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cb50a0335382aac15c31b61d8531bc9bb657cfd848b1d7158009472189f3d62"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c30187840d36d0ba2893bc3271a36a517a717f9fd383a98e2697ee890a37c273"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe81b35c33772e56f4b6cf62cf4aedc1762ef7162a31e6ac7fe5e40d0149eb67"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0bf89afcbcf4d1bb2652f6580e5e55a840fdf87384f6063c4a4f0c95e378656"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:06cf46bdff72f58645434d467bf5228080801298fbba19fe268a01b4534467f5"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3c66df3f41abee950d6638adc7eac4730a306b022570f71dd0bd6ba53503ab57"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd805513198304026bd379d1d516afbf6c3c13f4382134a2c526b8b854da1c2e"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:9505dc359edb6a330efcd2be825fdb73ee3e628d9010597aa1aee5aa63442e97"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:31445f38053476a0c4e6d12b047b08ced81e2c7c712e5a1ad97bc913256f91b2"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-win32.whl", hash = "sha256:bd28b31730f0e982ace8663d108e01199098432a30a4c410d06fe08fdb9e93f4"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:555fe186da0068d3354cdf4bbcbc609b0ecae4d04c921cc13e209eece7720727"}, + {file = "charset_normalizer-3.3.1-py3-none-any.whl", hash = "sha256:800561453acdecedaac137bf09cd719c7a440b6800ec182f077bb8e7025fb708"}, ] [[package]] @@ -360,13 +360,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "cohere" -version = "4.27" +version = "4.32" description = "" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "cohere-4.27-py3-none-any.whl", hash = "sha256:a2b977867a247bf44b2eba1b947acfe44e5881b15cacc40469fbdb117a7f1f55"}, - {file = "cohere-4.27.tar.gz", hash = "sha256:5d61eaca698dcf7f5b0b7cccca269d448e341314fedd921a0cc7c7bbf05f8181"}, + {file = "cohere-4.32-py3-none-any.whl", hash = "sha256:b5ab3509a34c20d51b246e38eb64adc839c8bc131c41ed92ec3613998df9a8e0"}, + {file = "cohere-4.32.tar.gz", hash = "sha256:3807747be984f211dce911c1335bd713af2ac2b70f729678381e6ff6e450e681"}, ] [package.dependencies] @@ -656,13 +656,13 @@ files = [ [[package]] name = "fsspec" -version = "2023.9.2" +version = "2023.10.0" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2023.9.2-py3-none-any.whl", hash = "sha256:603dbc52c75b84da501b9b2ec8c11e1f61c25984c4a0dda1f129ef391fbfc9b4"}, - {file = "fsspec-2023.9.2.tar.gz", hash = "sha256:80bfb8c70cc27b2178cc62a935ecf242fc6e8c3fb801f9c571fc01b1e715ba7d"}, + {file = "fsspec-2023.10.0-py3-none-any.whl", hash = "sha256:346a8f024efeb749d2a5fca7ba8854474b1ff9af7c3faaf636a4548781136529"}, + {file = "fsspec-2023.10.0.tar.gz", hash = "sha256:330c66757591df346ad3091a53bd907e15348c2ba17d63fd54f5c39c4457d2a5"}, ] [package.extras] @@ -691,13 +691,13 @@ tqdm = ["tqdm"] [[package]] name = "gitdb" -version = "4.0.10" +version = "4.0.11" description = "Git Object Database" optional = false python-versions = ">=3.7" files = [ - {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, - {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, ] [package.dependencies] @@ -705,20 +705,20 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.37" +version = "3.1.40" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.37-py3-none-any.whl", hash = "sha256:5f4c4187de49616d710a77e98ddf17b4782060a1788df441846bddefbb89ab33"}, - {file = "GitPython-3.1.37.tar.gz", hash = "sha256:f9b9ddc0761c125d5780eab2d64be4873fc6817c2899cbcb34b02344bdc7bc54"}, + {file = "GitPython-3.1.40-py3-none-any.whl", hash = "sha256:cf14627d5a8049ffbf49915732e5eddbe8134c3bdb9d476e6182b676fc573f8a"}, + {file = "GitPython-3.1.40.tar.gz", hash = "sha256:22b126e9ffb671fdd0c129796343a02bf67bf2994b35449ffc9321aa755e18a4"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" [package.extras] -test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-sugar"] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-instafail", "pytest-subtests", "pytest-sugar"] [[package]] name = "giturlparse" @@ -733,74 +733,68 @@ files = [ [[package]] name = "greenlet" -version = "3.0.0" +version = "3.0.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e09dea87cc91aea5500262993cbd484b41edf8af74f976719dd83fe724644cd6"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47932c434a3c8d3c86d865443fadc1fbf574e9b11d6650b656e602b1797908a"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bdfaeecf8cc705d35d8e6de324bf58427d7eafb55f67050d8f28053a3d57118c"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a68d670c8f89ff65c82b936275369e532772eebc027c3be68c6b87ad05ca695"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ad562a104cd41e9d4644f46ea37167b93190c6d5e4048fcc4b80d34ecb278f"}, - {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02a807b2a58d5cdebb07050efe3d7deaf915468d112dfcf5e426d0564aa3aa4a"}, - {file = "greenlet-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b1660a15a446206c8545edc292ab5c48b91ff732f91b3d3b30d9a915d5ec4779"}, - {file = "greenlet-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:813720bd57e193391dfe26f4871186cf460848b83df7e23e6bef698a7624b4c9"}, - {file = "greenlet-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:aa15a2ec737cb609ed48902b45c5e4ff6044feb5dcdfcf6fa8482379190330d7"}, - {file = "greenlet-3.0.0-cp310-universal2-macosx_11_0_x86_64.whl", hash = "sha256:7709fd7bb02b31908dc8fd35bfd0a29fc24681d5cc9ac1d64ad07f8d2b7db62f"}, - {file = "greenlet-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:211ef8d174601b80e01436f4e6905aca341b15a566f35a10dd8d1e93f5dbb3b7"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6512592cc49b2c6d9b19fbaa0312124cd4c4c8a90d28473f86f92685cc5fef8e"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:871b0a8835f9e9d461b7fdaa1b57e3492dd45398e87324c047469ce2fc9f516c"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b505fcfc26f4148551826a96f7317e02c400665fa0883fe505d4fcaab1dabfdd"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123910c58234a8d40eaab595bc56a5ae49bdd90122dde5bdc012c20595a94c14"}, - {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:96d9ea57292f636ec851a9bb961a5cc0f9976900e16e5d5647f19aa36ba6366b"}, - {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b72b802496cccbd9b31acea72b6f87e7771ccfd7f7927437d592e5c92ed703c"}, - {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:527cd90ba3d8d7ae7dceb06fda619895768a46a1b4e423bdb24c1969823b8362"}, - {file = "greenlet-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:37f60b3a42d8b5499be910d1267b24355c495064f271cfe74bf28b17b099133c"}, - {file = "greenlet-3.0.0-cp311-universal2-macosx_10_9_universal2.whl", hash = "sha256:c3692ecf3fe754c8c0f2c95ff19626584459eab110eaab66413b1e7425cd84e9"}, - {file = "greenlet-3.0.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:be557119bf467d37a8099d91fbf11b2de5eb1fd5fc5b91598407574848dc910f"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b2f1922a39d5d59cc0e597987300df3396b148a9bd10b76a058a2f2772fc04"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1e22c22f7826096ad503e9bb681b05b8c1f5a8138469b255eb91f26a76634f2"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d363666acc21d2c204dd8705c0e0457d7b2ee7a76cb16ffc099d6799744ac99"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:334ef6ed8337bd0b58bb0ae4f7f2dcc84c9f116e474bb4ec250a8bb9bd797a66"}, - {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6672fdde0fd1a60b44fb1751a7779c6db487e42b0cc65e7caa6aa686874e79fb"}, - {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:952256c2bc5b4ee8df8dfc54fc4de330970bf5d79253c863fb5e6761f00dda35"}, - {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:269d06fa0f9624455ce08ae0179430eea61085e3cf6457f05982b37fd2cefe17"}, - {file = "greenlet-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9adbd8ecf097e34ada8efde9b6fec4dd2a903b1e98037adf72d12993a1c80b51"}, - {file = "greenlet-3.0.0-cp312-universal2-macosx_10_9_universal2.whl", hash = "sha256:553d6fb2324e7f4f0899e5ad2c427a4579ed4873f42124beba763f16032959af"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b5ce7f40f0e2f8b88c28e6691ca6806814157ff05e794cdd161be928550f4c"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf94aa539e97a8411b5ea52fc6ccd8371be9550c4041011a091eb8b3ca1d810"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80dcd3c938cbcac986c5c92779db8e8ce51a89a849c135172c88ecbdc8c056b7"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e52a712c38e5fb4fd68e00dc3caf00b60cb65634d50e32281a9d6431b33b4af1"}, - {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5539f6da3418c3dc002739cb2bb8d169056aa66e0c83f6bacae0cd3ac26b423"}, - {file = "greenlet-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:343675e0da2f3c69d3fb1e894ba0a1acf58f481f3b9372ce1eb465ef93cf6fed"}, - {file = "greenlet-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:abe1ef3d780de56defd0c77c5ba95e152f4e4c4e12d7e11dd8447d338b85a625"}, - {file = "greenlet-3.0.0-cp37-cp37m-win32.whl", hash = "sha256:e693e759e172fa1c2c90d35dea4acbdd1d609b6936115d3739148d5e4cd11947"}, - {file = "greenlet-3.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bdd696947cd695924aecb3870660b7545a19851f93b9d327ef8236bfc49be705"}, - {file = "greenlet-3.0.0-cp37-universal2-macosx_11_0_x86_64.whl", hash = "sha256:cc3e2679ea13b4de79bdc44b25a0c4fcd5e94e21b8f290791744ac42d34a0353"}, - {file = "greenlet-3.0.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:63acdc34c9cde42a6534518e32ce55c30f932b473c62c235a466469a710bfbf9"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a1a6244ff96343e9994e37e5b4839f09a0207d35ef6134dce5c20d260d0302c"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b822fab253ac0f330ee807e7485769e3ac85d5eef827ca224feaaefa462dc0d0"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8060b32d8586e912a7b7dac2d15b28dbbd63a174ab32f5bc6d107a1c4143f40b"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:621fcb346141ae08cb95424ebfc5b014361621b8132c48e538e34c3c93ac7365"}, - {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6bb36985f606a7c49916eff74ab99399cdfd09241c375d5a820bb855dfb4af9f"}, - {file = "greenlet-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10b5582744abd9858947d163843d323d0b67be9432db50f8bf83031032bc218d"}, - {file = "greenlet-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f351479a6914fd81a55c8e68963609f792d9b067fb8a60a042c585a621e0de4f"}, - {file = "greenlet-3.0.0-cp38-cp38-win32.whl", hash = "sha256:9de687479faec7db5b198cc365bc34addd256b0028956501f4d4d5e9ca2e240a"}, - {file = "greenlet-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:3fd2b18432e7298fcbec3d39e1a0aa91ae9ea1c93356ec089421fabc3651572b"}, - {file = "greenlet-3.0.0-cp38-universal2-macosx_11_0_x86_64.whl", hash = "sha256:3c0d36f5adc6e6100aedbc976d7428a9f7194ea79911aa4bf471f44ee13a9464"}, - {file = "greenlet-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4cd83fb8d8e17633ad534d9ac93719ef8937568d730ef07ac3a98cb520fd93e4"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a5b2d4cdaf1c71057ff823a19d850ed5c6c2d3686cb71f73ae4d6382aaa7a06"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e7dcdfad252f2ca83c685b0fa9fba00e4d8f243b73839229d56ee3d9d219314"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94e4e924d09b5a3e37b853fe5924a95eac058cb6f6fb437ebb588b7eda79870"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad6fb737e46b8bd63156b8f59ba6cdef46fe2b7db0c5804388a2d0519b8ddb99"}, - {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d55db1db455c59b46f794346efce896e754b8942817f46a1bada2d29446e305a"}, - {file = "greenlet-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:56867a3b3cf26dc8a0beecdb4459c59f4c47cdd5424618c08515f682e1d46692"}, - {file = "greenlet-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a812224a5fb17a538207e8cf8e86f517df2080c8ee0f8c1ed2bdaccd18f38f4"}, - {file = "greenlet-3.0.0-cp39-cp39-win32.whl", hash = "sha256:0d3f83ffb18dc57243e0151331e3c383b05e5b6c5029ac29f754745c800f8ed9"}, - {file = "greenlet-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:831d6f35037cf18ca5e80a737a27d822d87cd922521d18ed3dbc8a6967be50ce"}, - {file = "greenlet-3.0.0-cp39-universal2-macosx_11_0_x86_64.whl", hash = "sha256:a048293392d4e058298710a54dfaefcefdf49d287cd33fb1f7d63d55426e4355"}, - {file = "greenlet-3.0.0.tar.gz", hash = "sha256:19834e3f91f485442adc1ee440171ec5d9a4840a1f7bd5ed97833544719ce10b"}, + {file = "greenlet-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19bbdf1cce0346ef7341705d71e2ecf6f41a35c311137f29b8a2dc2341374565"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599daf06ea59bfedbec564b1692b0166a0045f32b6f0933b0dd4df59a854caf2"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b641161c302efbb860ae6b081f406839a8b7d5573f20a455539823802c655f63"}, + {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d57e20ba591727da0c230ab2c3f200ac9d6d333860d85348816e1dca4cc4792e"}, + {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5805e71e5b570d490938d55552f5a9e10f477c19400c38bf1d5190d760691846"}, + {file = "greenlet-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:52e93b28db27ae7d208748f45d2db8a7b6a380e0d703f099c949d0f0d80b70e9"}, + {file = "greenlet-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f7bfb769f7efa0eefcd039dd19d843a4fbfbac52f1878b1da2ed5793ec9b1a65"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e6c7db42638dc45cf2e13c73be16bf83179f7859b07cfc139518941320be96"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1757936efea16e3f03db20efd0cd50a1c86b06734f9f7338a90c4ba85ec2ad5a"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19075157a10055759066854a973b3d1325d964d498a805bb68a1f9af4aaef8ec"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9d21aaa84557d64209af04ff48e0ad5e28c5cca67ce43444e939579d085da72"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2847e5d7beedb8d614186962c3d774d40d3374d580d2cbdab7f184580a39d234"}, + {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97e7ac860d64e2dcba5c5944cfc8fa9ea185cd84061c623536154d5a89237884"}, + {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b2c02d2ad98116e914d4f3155ffc905fd0c025d901ead3f6ed07385e19122c94"}, + {file = "greenlet-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:22f79120a24aeeae2b4471c711dcf4f8c736a2bb2fabad2a67ac9a55ea72523c"}, + {file = "greenlet-3.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:100f78a29707ca1525ea47388cec8a049405147719f47ebf3895e7509c6446aa"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60d5772e8195f4e9ebf74046a9121bbb90090f6550f81d8956a05387ba139353"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:daa7197b43c707462f06d2c693ffdbb5991cbb8b80b5b984007de431493a319c"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea6b8aa9e08eea388c5f7a276fabb1d4b6b9d6e4ceb12cc477c3d352001768a9"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d11ebbd679e927593978aa44c10fc2092bc454b7d13fdc958d3e9d508aba7d0"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd4c177afb8a8d9ba348d925b0b67246147af806f0b104af4d24f144d461cd5"}, + {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20107edf7c2c3644c67c12205dc60b1bb11d26b2610b276f97d666110d1b511d"}, + {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8bef097455dea90ffe855286926ae02d8faa335ed8e4067326257cb571fc1445"}, + {file = "greenlet-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:b2d3337dcfaa99698aa2377c81c9ca72fcd89c07e7eb62ece3f23a3fe89b2ce4"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80ac992f25d10aaebe1ee15df45ca0d7571d0f70b645c08ec68733fb7a020206"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:337322096d92808f76ad26061a8f5fccb22b0809bea39212cd6c406f6a7060d2"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9934adbd0f6e476f0ecff3c94626529f344f57b38c9a541f87098710b18af0a"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4d815b794fd8868c4d67602692c21bf5293a75e4b607bb92a11e821e2b859a"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41bdeeb552d814bcd7fb52172b304898a35818107cc8778b5101423c9017b3de"}, + {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6e6061bf1e9565c29002e3c601cf68569c450be7fc3f7336671af7ddb4657166"}, + {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fa24255ae3c0ab67e613556375a4341af04a084bd58764731972bcbc8baeba36"}, + {file = "greenlet-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:b489c36d1327868d207002391f662a1d163bdc8daf10ab2e5f6e41b9b96de3b1"}, + {file = "greenlet-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f33f3258aae89da191c6ebaa3bc517c6c4cbc9b9f689e5d8452f7aedbb913fa8"}, + {file = "greenlet-3.0.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d2905ce1df400360463c772b55d8e2518d0e488a87cdea13dd2c71dcb2a1fa16"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a02d259510b3630f330c86557331a3b0e0c79dac3d166e449a39363beaae174"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55d62807f1c5a1682075c62436702aaba941daa316e9161e4b6ccebbbf38bda3"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fcc780ae8edbb1d050d920ab44790201f027d59fdbd21362340a85c79066a74"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eddd98afc726f8aee1948858aed9e6feeb1758889dfd869072d4465973f6bfd"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eabe7090db68c981fca689299c2d116400b553f4b713266b130cfc9e2aa9c5a9"}, + {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f2f6d303f3dee132b322a14cd8765287b8f86cdc10d2cb6a6fae234ea488888e"}, + {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d923ff276f1c1f9680d32832f8d6c040fe9306cbfb5d161b0911e9634be9ef0a"}, + {file = "greenlet-3.0.1-cp38-cp38-win32.whl", hash = "sha256:0b6f9f8ca7093fd4433472fd99b5650f8a26dcd8ba410e14094c1e44cd3ceddd"}, + {file = "greenlet-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:990066bff27c4fcf3b69382b86f4c99b3652bab2a7e685d968cd4d0cfc6f67c6"}, + {file = "greenlet-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ce85c43ae54845272f6f9cd8320d034d7a946e9773c693b27d620edec825e376"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89ee2e967bd7ff85d84a2de09df10e021c9b38c7d91dead95b406ed6350c6997"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87c8ceb0cf8a5a51b8008b643844b7f4a8264a2c13fcbcd8a8316161725383fe"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6a8c9d4f8692917a3dc7eb25a6fb337bff86909febe2f793ec1928cd97bedfc"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fbc5b8f3dfe24784cee8ce0be3da2d8a79e46a276593db6868382d9c50d97b1"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85d2b77e7c9382f004b41d9c72c85537fac834fb141b0296942d52bf03fe4a3d"}, + {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:696d8e7d82398e810f2b3622b24e87906763b6ebfd90e361e88eb85b0e554dc8"}, + {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:329c5a2e5a0ee942f2992c5e3ff40be03e75f745f48847f118a3cfece7a28546"}, + {file = "greenlet-3.0.1-cp39-cp39-win32.whl", hash = "sha256:cf868e08690cb89360eebc73ba4be7fb461cfbc6168dd88e2fbbe6f31812cd57"}, + {file = "greenlet-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:ac4a39d1abae48184d420aa8e5e63efd1b75c8444dd95daa3e03f6c6310e9619"}, + {file = "greenlet-3.0.1.tar.gz", hash = "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b"}, ] [package.extras] @@ -886,13 +880,13 @@ files = [ [[package]] name = "langchain" -version = "0.0.308" +version = "0.0.323" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain-0.0.308-py3-none-any.whl", hash = "sha256:807de0a8f4177e42e435682cfd33e600518d04e1688149afda8542b9d31a407f"}, - {file = "langchain-0.0.308.tar.gz", hash = "sha256:496ddef6c0aa8e73b3c28bad8c4cb02cdb7330e8ba80b238f1b3e0d663756b1b"}, + {file = "langchain-0.0.323-py3-none-any.whl", hash = "sha256:8c305c8d162262439b0cb73a6621c6ae8b3abde56d45c561a6b88709567cc765"}, + {file = "langchain-0.0.323.tar.gz", hash = "sha256:320116337933fdda48911e84f46c2d71e74eba647c05922a117c71669ccee9e2"}, ] [package.dependencies] @@ -901,7 +895,7 @@ anyio = "<4.0" async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} dataclasses-json = ">=0.5.7,<0.7" jsonpatch = ">=1.33,<2.0" -langsmith = ">=0.0.40,<0.1.0" +langsmith = ">=0.0.43,<0.1.0" numpy = ">=1,<2" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -910,13 +904,14 @@ SQLAlchemy = ">=1.4,<3" tenacity = ">=8.1.0,<9.0.0" [package.extras] -all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "amadeus (>=8.1.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.9,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clarifai (>=9.1.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=4,<5)", "deeplake (>=3.6.8,<4.0.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=3.8.3,<4.0.0)", "elasticsearch (>=8,<9)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.6,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "libdeeplake (>=0.0.60,<0.0.61)", "librosa (>=0.10.0.post2,<0.11.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "marqo (>=1.2.4,<2.0.0)", "momento (>=1.5.0,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<4)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "python-arango (>=7.5.9,<8.0.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.3.1,<2.0.0)", "rdflib (>=6.3.2,<7.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.7.1,<0.8.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.6.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"] +all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "amadeus (>=8.1.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.9,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clarifai (>=9.1.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=4,<5)", "deeplake (>=3.6.8,<4.0.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=3.8.3,<4.0.0)", "elasticsearch (>=8,<9)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.6,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "libdeeplake (>=0.0.60,<0.0.61)", "librosa (>=0.10.0.post2,<0.11.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "marqo (>=1.2.4,<2.0.0)", "momento (>=1.10.1,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<4)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "python-arango (>=7.5.9,<8.0.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.3.1,<2.0.0)", "rdflib (>=6.3.2,<7.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.7.1,<0.8.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.6.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"] azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (>=0,<1)"] clarifai = ["clarifai (>=9.1.0)"] +cli = ["typer (>=0.9.0,<0.10.0)"] cohere = ["cohere (>=4,<5)"] docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] embeddings = ["sentence-transformers (>=2,<3)"] -extended-testing = ["amazon-textract-caller (<2)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "dashvector (>=1.0.1,<2.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (>=0,<1)", "openapi-schema-pydantic (>=1.2,<2.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "amazon-textract-caller (<2)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "dashvector (>=1.0.1,<2.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (>=0,<1)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] javascript = ["esprima (>=4.0.1,<5.0.0)"] llms = ["clarifai (>=9.1.0)", "cohere (>=4,<5)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] openai = ["openai (>=0,<1)", "tiktoken (>=0.3.2,<0.6.0)"] @@ -939,13 +934,13 @@ six = "*" [[package]] name = "langsmith" -version = "0.0.41" +version = "0.0.52" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.41-py3-none-any.whl", hash = "sha256:a555bef3d51e37bce284090b155e2148ec4098efa96ee918b3092c43c4bfaa77"}, - {file = "langsmith-0.0.41.tar.gz", hash = "sha256:ea05649bb140d6e58614e171df6539410b77ce393c23545453278677e916e351"}, + {file = "langsmith-0.0.52-py3-none-any.whl", hash = "sha256:d02a0ade5a53b36143084e57003ed38ccbdf5fc15a5a0eb14f8989ceaee0b807"}, + {file = "langsmith-0.0.52.tar.gz", hash = "sha256:1dc29082d257deea1859cb22c53d9481ca5c4a37f3af40c0f9d300fb8adc91db"}, ] [package.dependencies] @@ -954,13 +949,13 @@ requests = ">=2,<3" [[package]] name = "llama-index" -version = "0.8.39.post2" +version = "0.8.42" description = "Interface between LLMs and your data" optional = false python-versions = "*" files = [ - {file = "llama_index-0.8.39.post2-py3-none-any.whl", hash = "sha256:52fd490a14dada49270a746b8efc7874ab2a98265a61b46678e62f1bb89a0a9d"}, - {file = "llama_index-0.8.39.post2.tar.gz", hash = "sha256:3145b15a6330c7c08cedbd60dcfad19b8d40553d4a0da1da248ead113c67d8a4"}, + {file = "llama_index-0.8.42-py3-none-any.whl", hash = "sha256:08720554ceaef169e1a151f4fd982c0555703e85b23bf0dbf290d9dc0605655d"}, + {file = "llama_index-0.8.42.tar.gz", hash = "sha256:14cca43188d9afd058d74c5574e745dc937e724a20576a9ef81967b9f2659b03"}, ] [package.dependencies] @@ -1089,17 +1084,17 @@ source = ["Cython (>=0.29.35)"] [[package]] name = "markdown" -version = "3.4.4" +version = "3.5" description = "Python implementation of John Gruber's Markdown." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941"}, - {file = "Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6"}, + {file = "Markdown-3.5-py3-none-any.whl", hash = "sha256:4afb124395ce5fc34e6d9886dab977fd9ae987fc6e85689f08278cf0c69d4bf3"}, + {file = "Markdown-3.5.tar.gz", hash = "sha256:a807eb2e4778d9156c8f07876c6e4d50b5494c5665c4834f67b06459dfd877b3"}, ] [package.extras] -docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.0)", "mkdocs-nature (>=0.4)"] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] [[package]] @@ -1269,43 +1264,43 @@ twitter = ["twython"] [[package]] name = "numpy" -version = "1.26.0" +version = "1.26.1" description = "Fundamental package for array computing in Python" optional = false python-versions = "<3.13,>=3.9" files = [ - {file = "numpy-1.26.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8db2f125746e44dce707dd44d4f4efeea8d7e2b43aace3f8d1f235cfa2733dd"}, - {file = "numpy-1.26.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0621f7daf973d34d18b4e4bafb210bbaf1ef5e0100b5fa750bd9cde84c7ac292"}, - {file = "numpy-1.26.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51be5f8c349fdd1a5568e72713a21f518e7d6707bcf8503b528b88d33b57dc68"}, - {file = "numpy-1.26.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:767254ad364991ccfc4d81b8152912e53e103ec192d1bb4ea6b1f5a7117040be"}, - {file = "numpy-1.26.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:436c8e9a4bdeeee84e3e59614d38c3dbd3235838a877af8c211cfcac8a80b8d3"}, - {file = "numpy-1.26.0-cp310-cp310-win32.whl", hash = "sha256:c2e698cb0c6dda9372ea98a0344245ee65bdc1c9dd939cceed6bb91256837896"}, - {file = "numpy-1.26.0-cp310-cp310-win_amd64.whl", hash = "sha256:09aaee96c2cbdea95de76ecb8a586cb687d281c881f5f17bfc0fb7f5890f6b91"}, - {file = "numpy-1.26.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:637c58b468a69869258b8ae26f4a4c6ff8abffd4a8334c830ffb63e0feefe99a"}, - {file = "numpy-1.26.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:306545e234503a24fe9ae95ebf84d25cba1fdc27db971aa2d9f1ab6bba19a9dd"}, - {file = "numpy-1.26.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6adc33561bd1d46f81131d5352348350fc23df4d742bb246cdfca606ea1208"}, - {file = "numpy-1.26.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e062aa24638bb5018b7841977c360d2f5917268d125c833a686b7cbabbec496c"}, - {file = "numpy-1.26.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:546b7dd7e22f3c6861463bebb000646fa730e55df5ee4a0224408b5694cc6148"}, - {file = "numpy-1.26.0-cp311-cp311-win32.whl", hash = "sha256:c0b45c8b65b79337dee5134d038346d30e109e9e2e9d43464a2970e5c0e93229"}, - {file = "numpy-1.26.0-cp311-cp311-win_amd64.whl", hash = "sha256:eae430ecf5794cb7ae7fa3808740b015aa80747e5266153128ef055975a72b99"}, - {file = "numpy-1.26.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:166b36197e9debc4e384e9c652ba60c0bacc216d0fc89e78f973a9760b503388"}, - {file = "numpy-1.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f042f66d0b4ae6d48e70e28d487376204d3cbf43b84c03bac57e28dac6151581"}, - {file = "numpy-1.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5e18e5b14a7560d8acf1c596688f4dfd19b4f2945b245a71e5af4ddb7422feb"}, - {file = "numpy-1.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f6bad22a791226d0a5c7c27a80a20e11cfe09ad5ef9084d4d3fc4a299cca505"}, - {file = "numpy-1.26.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4acc65dd65da28060e206c8f27a573455ed724e6179941edb19f97e58161bb69"}, - {file = "numpy-1.26.0-cp312-cp312-win32.whl", hash = "sha256:bb0d9a1aaf5f1cb7967320e80690a1d7ff69f1d47ebc5a9bea013e3a21faec95"}, - {file = "numpy-1.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:ee84ca3c58fe48b8ddafdeb1db87388dce2c3c3f701bf447b05e4cfcc3679112"}, - {file = "numpy-1.26.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a873a8180479bc829313e8d9798d5234dfacfc2e8a7ac188418189bb8eafbd2"}, - {file = "numpy-1.26.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:914b28d3215e0c721dc75db3ad6d62f51f630cb0c277e6b3bcb39519bed10bd8"}, - {file = "numpy-1.26.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c78a22e95182fb2e7874712433eaa610478a3caf86f28c621708d35fa4fd6e7f"}, - {file = "numpy-1.26.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f737708b366c36b76e953c46ba5827d8c27b7a8c9d0f471810728e5a2fe57c"}, - {file = "numpy-1.26.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b44e6a09afc12952a7d2a58ca0a2429ee0d49a4f89d83a0a11052da696440e49"}, - {file = "numpy-1.26.0-cp39-cp39-win32.whl", hash = "sha256:5671338034b820c8d58c81ad1dafc0ed5a00771a82fccc71d6438df00302094b"}, - {file = "numpy-1.26.0-cp39-cp39-win_amd64.whl", hash = "sha256:020cdbee66ed46b671429c7265cf00d8ac91c046901c55684954c3958525dab2"}, - {file = "numpy-1.26.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0792824ce2f7ea0c82ed2e4fecc29bb86bee0567a080dacaf2e0a01fe7654369"}, - {file = "numpy-1.26.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d484292eaeb3e84a51432a94f53578689ffdea3f90e10c8b203a99be5af57d8"}, - {file = "numpy-1.26.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:186ba67fad3c60dbe8a3abff3b67a91351100f2661c8e2a80364ae6279720299"}, - {file = "numpy-1.26.0.tar.gz", hash = "sha256:f93fc78fe8bf15afe2b8d6b6499f1c73953169fad1e9a8dd086cdff3190e7fdf"}, + {file = "numpy-1.26.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82e871307a6331b5f09efda3c22e03c095d957f04bf6bc1804f30048d0e5e7af"}, + {file = "numpy-1.26.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdd9ec98f0063d93baeb01aad472a1a0840dee302842a2746a7a8e92968f9575"}, + {file = "numpy-1.26.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d78f269e0c4fd365fc2992c00353e4530d274ba68f15e968d8bc3c69ce5f5244"}, + {file = "numpy-1.26.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ab9163ca8aeb7fd32fe93866490654d2f7dda4e61bc6297bf72ce07fdc02f67"}, + {file = "numpy-1.26.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:78ca54b2f9daffa5f323f34cdf21e1d9779a54073f0018a3094ab907938331a2"}, + {file = "numpy-1.26.1-cp310-cp310-win32.whl", hash = "sha256:d1cfc92db6af1fd37a7bb58e55c8383b4aa1ba23d012bdbba26b4bcca45ac297"}, + {file = "numpy-1.26.1-cp310-cp310-win_amd64.whl", hash = "sha256:d2984cb6caaf05294b8466966627e80bf6c7afd273279077679cb010acb0e5ab"}, + {file = "numpy-1.26.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cd7837b2b734ca72959a1caf3309457a318c934abef7a43a14bb984e574bbb9a"}, + {file = "numpy-1.26.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c59c046c31a43310ad0199d6299e59f57a289e22f0f36951ced1c9eac3665b9"}, + {file = "numpy-1.26.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d58e8c51a7cf43090d124d5073bc29ab2755822181fcad978b12e144e5e5a4b3"}, + {file = "numpy-1.26.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6081aed64714a18c72b168a9276095ef9155dd7888b9e74b5987808f0dd0a974"}, + {file = "numpy-1.26.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:97e5d6a9f0702c2863aaabf19f0d1b6c2628fbe476438ce0b5ce06e83085064c"}, + {file = "numpy-1.26.1-cp311-cp311-win32.whl", hash = "sha256:b9d45d1dbb9de84894cc50efece5b09939752a2d75aab3a8b0cef6f3a35ecd6b"}, + {file = "numpy-1.26.1-cp311-cp311-win_amd64.whl", hash = "sha256:3649d566e2fc067597125428db15d60eb42a4e0897fc48d28cb75dc2e0454e53"}, + {file = "numpy-1.26.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1d1bd82d539607951cac963388534da3b7ea0e18b149a53cf883d8f699178c0f"}, + {file = "numpy-1.26.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:afd5ced4e5a96dac6725daeb5242a35494243f2239244fad10a90ce58b071d24"}, + {file = "numpy-1.26.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a03fb25610ef560a6201ff06df4f8105292ba56e7cdd196ea350d123fc32e24e"}, + {file = "numpy-1.26.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcfaf015b79d1f9f9c9fd0731a907407dc3e45769262d657d754c3a028586124"}, + {file = "numpy-1.26.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e509cbc488c735b43b5ffea175235cec24bbc57b227ef1acc691725beb230d1c"}, + {file = "numpy-1.26.1-cp312-cp312-win32.whl", hash = "sha256:af22f3d8e228d84d1c0c44c1fbdeb80f97a15a0abe4f080960393a00db733b66"}, + {file = "numpy-1.26.1-cp312-cp312-win_amd64.whl", hash = "sha256:9f42284ebf91bdf32fafac29d29d4c07e5e9d1af862ea73686581773ef9e73a7"}, + {file = "numpy-1.26.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb894accfd16b867d8643fc2ba6c8617c78ba2828051e9a69511644ce86ce83e"}, + {file = "numpy-1.26.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e44ccb93f30c75dfc0c3aa3ce38f33486a75ec9abadabd4e59f114994a9c4617"}, + {file = "numpy-1.26.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9696aa2e35cc41e398a6d42d147cf326f8f9d81befcb399bc1ed7ffea339b64e"}, + {file = "numpy-1.26.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5b411040beead47a228bde3b2241100454a6abde9df139ed087bd73fc0a4908"}, + {file = "numpy-1.26.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1e11668d6f756ca5ef534b5be8653d16c5352cbb210a5c2a79ff288e937010d5"}, + {file = "numpy-1.26.1-cp39-cp39-win32.whl", hash = "sha256:d1d2c6b7dd618c41e202c59c1413ef9b2c8e8a15f5039e344af64195459e3104"}, + {file = "numpy-1.26.1-cp39-cp39-win_amd64.whl", hash = "sha256:59227c981d43425ca5e5c01094d59eb14e8772ce6975d4b2fc1e106a833d5ae2"}, + {file = "numpy-1.26.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:06934e1a22c54636a059215d6da99e23286424f316fddd979f5071093b648668"}, + {file = "numpy-1.26.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76ff661a867d9272cd2a99eed002470f46dbe0943a5ffd140f49be84f68ffc42"}, + {file = "numpy-1.26.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6965888d65d2848e8768824ca8288db0a81263c1efccec881cb35a0d805fcd2f"}, + {file = "numpy-1.26.1.tar.gz", hash = "sha256:c8c6c72d4a9f831f328efb1312642a1cafafaa88981d9ab76368d50d07d93cbe"}, ] [[package]] @@ -1439,25 +1434,27 @@ files = [ [[package]] name = "psutil" -version = "5.9.5" +version = "5.9.6" description = "Cross-platform lib for process and system monitoring in Python." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, - {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, - {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, - {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, - {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, - {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, - {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, - {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, + {file = "psutil-5.9.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fb8a697f11b0f5994550555fcfe3e69799e5b060c8ecf9e2f75c69302cc35c0d"}, + {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:91ecd2d9c00db9817a4b4192107cf6954addb5d9d67a969a4f436dbc9200f88c"}, + {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:10e8c17b4f898d64b121149afb136c53ea8b68c7531155147867b7b1ac9e7e28"}, + {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:18cd22c5db486f33998f37e2bb054cc62fd06646995285e02a51b1e08da97017"}, + {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ca2780f5e038379e520281e4c032dddd086906ddff9ef0d1b9dcf00710e5071c"}, + {file = "psutil-5.9.6-cp27-none-win32.whl", hash = "sha256:70cb3beb98bc3fd5ac9ac617a327af7e7f826373ee64c80efd4eb2856e5051e9"}, + {file = "psutil-5.9.6-cp27-none-win_amd64.whl", hash = "sha256:51dc3d54607c73148f63732c727856f5febec1c7c336f8f41fcbd6315cce76ac"}, + {file = "psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a"}, + {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92e0cc43c524834af53e9d3369245e6cc3b130e78e26100d1f63cdb0abeb3d3c"}, + {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4"}, + {file = "psutil-5.9.6-cp36-cp36m-win32.whl", hash = "sha256:3ebf2158c16cc69db777e3c7decb3c0f43a7af94a60d72e87b2823aebac3d602"}, + {file = "psutil-5.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:ff18b8d1a784b810df0b0fff3bcb50ab941c3b8e2c8de5726f9c71c601c611aa"}, + {file = "psutil-5.9.6-cp37-abi3-win32.whl", hash = "sha256:a6f01f03bf1843280f4ad16f4bde26b817847b4c1a0db59bf6419807bc5ce05c"}, + {file = "psutil-5.9.6-cp37-abi3-win_amd64.whl", hash = "sha256:6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a"}, + {file = "psutil-5.9.6-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:daecbcbd29b289aac14ece28eca6a3e60aa361754cf6da3dfb20d4d32b6c7f57"}, + {file = "psutil-5.9.6.tar.gz", hash = "sha256:e4b92ddcd7dd4cdd3f900180ea1e104932c7bce234fb88976e2a3b296441225a"}, ] [package.extras] @@ -1738,6 +1735,126 @@ files = [ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] +[[package]] +name = "rapidfuzz" +version = "3.4.0" +description = "rapid fuzzy string matching" +optional = false +python-versions = ">=3.7" +files = [ + {file = "rapidfuzz-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1438e68fe8869fe6819a313140e98641b34bfc89234b82486d8fd02044a067e8"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59f851c7a54a9652b9598553547e0940244bfce7c9b672bac728efa0b9028d03"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6286510910fcd649471a7f5b77fcc971e673729e7c84216dbf321bead580d5a1"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87409e12f9a82aa33a5b845c49dd8d5d4264f2f171f0a69ddc638e100fcc50de"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1d81d380ceabc8297880525c9d8b9e93fead38d3d2254e558c36c18aaf2553f"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a716efcfc92659d8695291f07da4fa60f42a131dc4ceab583931452dd5662e92"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:83387fb81c4c0234b199110655779762dd5982cdf9de4f7c321110713193133e"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55efb3231bb954f3597313ebdf104289b8d139d5429ad517051855f84e12b94e"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51d47d52c890cbdb2d8b2085d747e557f15efd9c990cb6ae624c8f6948c4aa3a"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3db79070888d0dcd4f6a20fd30b8184dd975d6b0f7818acff5d7e07eba19b71f"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:46efc5e4675e2bd5118427513f86eaf3689e1482ebd309ad4532bcefae78179d"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d15c364c5aa8f032dadf5b82fa02b7a4bd9688a961a27961cd5b985203f58037"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f1e91460baa42f5408f3c062913456a24b2fc1a181959b58a9c06b5eef700ca6"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c7f4f6dac25c120de8845a65a97090658c8a976827ac22b6b86e2a16a60bb820"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:124578029d926b2be32d60b748be95ee0de6cb2753eb49d6d1d6146269b428b9"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:3af0384132e79fe6f6370d49347649382e04f689277525903bef84d30f3992fd"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:66ff93b81b382269dc7c2d46c839ce72e2d2331ad46a06321770bc94016fe236"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:da2764604a31fd1e3f1cacf226b43a871cc9f28844a3196c2a6b1ba52ae12922"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8eb33895353bfcc33ccf4b4bae837c0afb4eaf20a0361aa6f0800cef12505e91"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed3da08830c08c8bcd49414cc06b704a760d3067804775facc0df725b52085a4"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b38c7021f6114cfacba5717192fb3e1e50053261d49a774e645021a2f77e20a3"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5ea97886d2ec7b2b9a8172812a76e1d243f2ce705c2f24baf46f9ef5d3951"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b9a7ab061c1b75b274fc2ebd1d29cfa2e510c36e2f4cd9518a6d56d589003c8"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23b07685c21c93cdf6d68b49eccacfe975651b8d99ea8a02687400c60315e5bc"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c2a564f748497b6a5e08a1dc0ac06655f65377cf072c4f0e2c73818acc655d36"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ef30b5f2720f0acbcfba0e0661a4cc118621c47cf69b5fe92531dfed1e369e1c"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:ab981f9091ae8bd32bca9289fa1019b4ec656543489e7e13e64882d57d989282"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a80f9aa4245a49e0677896d1b51b2b3bc36472aff7cec31c4a96f789135f03fe"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d8c6cb80b5d2edf88bf6a88ac6827a353c974405c2d7e3025ed9527a5dbe1a6"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-win32.whl", hash = "sha256:c0150d521199277b5ad8bd3b060a5f3c1dbdf11df0533b4d79f458ef11d07e8c"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:bd50bc90167601963e2a90b820fb862d239ecb096a991bf3ce33ffaa1d6eedee"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:bd10d68baabb63a3bb36b683f98fc481fcc62230e493e4b31e316bd5b299ef68"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7f497f850d46c5e08f3340343842a28ede5d3997e5d1cadbd265793cf47417e5"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7d6a9f04ea1277add8943d4e144e59215009f54f2668124ff26dee18a875343"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b6fe2aff0d9b35191701714e05afe08f79eaea376a3a6ca802b72d9e5b48b545"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b81b8bc29114ca861fed23da548a837832b85495b0c1b2600e6060e3cf4d50aa"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:805dc2aa3ac295dcbf2df8c1e420e8a73b1f632d6820a5a1c8506d22c11e0f27"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1276c7f50cd90a48b00084feb25256135c9ace6c599295dd5932949ec30c0e70"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b9197656a6d71483959bf7d216e7fb7a6b80ca507433bcb3015fb92abc266f8"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3456f4df5b8800315fd161045c996479016c112228e4da370d09ed80c24853e5"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:734046d557550589edb83d5ad1468a1341d1092f1c64f26fd0b1fc50f9efdce1"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:37d5f0fbad6c092c89840eea2c4c845564d40849785de74c5e6ff48b47b0ecf6"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:bfe14711b9a7b744e242a482c6cabb696517a1a9946fc1e88d353cd3eb384788"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a733c10b1fcc47f837c23ab4a255cc4021a88939ff81baa64d6738231cba33d"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:929e6b71e5b36caee2ee11c209e75a0fcbd716a1b76ae6162b89ee9b591b63b1"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-win32.whl", hash = "sha256:c56073ba1d1b25585359ad9769163cb2f3183e7a03c03b914a0667fcbd95dc5c"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:bf58ba21df06fc8aeef3056fd137eca0a593c2f5c82923a4524d251dc5f3df5d"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:f3effbe9c677658b3149da0d2778a740a6b7d8190c1407fd0c0770a4e223cfe0"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ed0d5761b44d9dd87278d5c32903bb55632346e4d84ea67ba2e4a84afc3b7d45"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bafbd3e2e9e0b5f740f66155cc7e1e23eee1e1f2c44eff12daf14f90af0e8ab"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2543fd8d0fb3b1ac065bf94ee54c0ea33343c62481d8e54b6117a88c92c9b721"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93ceb62ade1a0e62696487274002157a58bb751fc82cd25016fc5523ba558ca5"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76f4162ce5fe08609455d318936ed4aa709f40784be61fb4e200a378137b0230"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f723197f2dbce508a7030dcf6d3fc940117aa54fc876021bf6f6feeaf3825ba1"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cfdc74afd93ac71270b5be5c25cb864b733b9ae32b07495705a6ac294ac4c390"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:273c7c7f5b405f2f54d41e805883572d57e1f0a56861f93ca5a6733672088acb"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:712dd91d429afaddbf7e86662155f2ad9bc8135fca5803a01035a3c1d76c5977"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:9814905414696080d8448d6e6df788a0148954ab34d7cd8d75bcb85ba30e0b25"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:01013ee67fb15608c8c5961af3bc2b1f242cff94c19f53237c9b3f0edb8e0a2d"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:8f5d2adc48c181486125d42230e80479a1e0568942e883d1ebdeb76cd3f83470"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c92d847c997c384670e3b4cf6727cb73a4d7a7ba6457310e2083cf06d56013c4"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d0bda173b0ec1fa546f123088c0d42c9096304771b4c0555d4e08a66a246b3f6"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bbb05b1203f683b341f44ebe8fe38afed6e56f606094f9840d6406e4a7bf0eab"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f0075ff8990437923da42202b60cf04b5c122ee2856f0cf2344fb890cadecf57"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f295842c282fe7fe93bfe7a20e78f33f43418f47fb601f2f0a05df8a8282b43"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ebee7313719dfe652debb74bdd4024e8cf381a59adc6d065520ff927f3445f4"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f71454249ddd29d8ba5415ed7307e7b7493fc7e9018f1ff496127b8b9a8df94b"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:52c6b7a178f0e800488fa1aede17b00f6397cab0b79d48531504b0d89e45315f"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d38596c804a9f2bd49360c15e1f4afbf016f181fe37fc4f1a4ddd247d3e91e5"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8756461e7ee79723b8f762fc6db226e65eb453bf9fa64b14fc0274d4aaaf9e21"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e14799297f194a4480f373e45142ef16d5dc68a42084c0e2018e0bdba56a8fef"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f813fb663d90038c1171d30ea1b6b275e09fced32f1d12b972c6045d9d4233f2"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0df66e07e42e2831fae84dea481f7803bec7cfa53c31d770e86ac47bb18dcd57"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b05c7d4b4ddb617e977d648689013e50e5688140ee03538d3760a3a11d4fa8a2"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-win32.whl", hash = "sha256:74b9a1c1fc139d325fb0b89ccc85527d27096a76f6ed690ee3378143cc38e91d"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5fe3ef7daecd79f852936528e37528fd88818bc000991e0fea23b9ac5b79e875"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61f16bb0f3026853500e7968261831a2e1a35d56947752bb6cf6953afd70b9de"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d188e8fb5a9709931c6a48cc62c4ac9b9d163969333711e426d9dbd134c1489b"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c006aa481d1b91c2600920ce16e42d208a4b6f318d393aef4dd2172d568f2641"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02afbe7ed12e9191082ed7bda43398baced1d9d805302b7b010d397de3ae973f"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01d64710060bc3241c08ac1f1a9012c7184f3f4c3d6e2eebb16c6093a03f6a67"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3198f70b97127e52a4f96bb2f7de447f89baa338ff398eb126930c8e3137ad1"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:50ad7bac98a0f00492687eddda73d2c0bdf71c78b52fddaa5901634ae323d3ce"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc3efc06db79e818f4a6783a4e001b3c8b2c61bd05c0d5c4d333adaf64ed1b34"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:75d1365387ec8ef2128fd7e2f7436aa1a04a1953bc6d7068835bb769cd07c146"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a0750278693525b5ce58d3b313e432dfa5d90f00d06ae54fa8cde87f2a397eb0"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:2e49151572b842d290dcee2cc6f9ce7a7b40b77cc20d0f6d6b54e7afb7bafa5c"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:8b38d7677b2f20b137bb7aaf0dcd3d8ac2a2cde65f09f5621bf3f57d9a1e5d6e"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d904ac97f2e370f91e8170802669c8ad68641bf84d742968416b53c5960410c6"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-win32.whl", hash = "sha256:53bbef345644eac1c2d7cc21ade4fe9554fa289f60eb2c576f7fdc454dbc0641"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:233bf022938c38060a93863ec548e624d69a56d7384634d8bea435b915b88e52"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:63933792146f3d333680d415cecc237e6275b42ad948d0a798f9a81325517666"}, + {file = "rapidfuzz-3.4.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e182ea5c809e7ed36ebfbcef4bb1808e213d27b33c036007a33bcbb7ba498356"}, + {file = "rapidfuzz-3.4.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e1142c8d35fa6f3af8150d02ff8edcbea3723c851d889e8b2172e0d1b99f3f7"}, + {file = "rapidfuzz-3.4.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b8258846e56b03230fa733d29bb4f9fb1f4790ac97d1ebe9faa3ff9d2850999"}, + {file = "rapidfuzz-3.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:950d1dfd2927cd45c9bb2927933926718f0a17792841e651d42f4d1cb04a5c1d"}, + {file = "rapidfuzz-3.4.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dd54dd0355225dc3c1d55e233d510adcccee9bb25d656b4cf1136114b92e7bf3"}, + {file = "rapidfuzz-3.4.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f5921780e7995e9ac3cea41fa57b623159d7295788618d3f2946d61328c25c25"}, + {file = "rapidfuzz-3.4.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc4b1b69a64d337c40fa07a721dae1b1550d90f17973fb348055f6440d597e26"}, + {file = "rapidfuzz-3.4.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f5c8b901b6d3be63591c68e2612f76ad85af27193d0a88d4d87bb047aeafcb3"}, + {file = "rapidfuzz-3.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c67f5ced39aff6277dd772b239ef8aa8fc810200a3b42f69ddbb085ea0e18232"}, + {file = "rapidfuzz-3.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4fd94acab871afbc845400814134a83512a711e824dc2c9a9776d6123464a221"}, + {file = "rapidfuzz-3.4.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:437508ec1ea6e71a77126715ac6208cb9c3e74272536ebfa79be9dd008cfb85f"}, + {file = "rapidfuzz-3.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7215f7c5de912b364d5cf7c4c66915ccf4acf71aafbb8da62ad346569196e15"}, + {file = "rapidfuzz-3.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:698488002eb7be2f737e48679ed0cd310b76291f26d8ec792db8345d13eb6573"}, + {file = "rapidfuzz-3.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e77873126eb07e7461f0b675263e6c5d42c8a952e88e4a44eeff96f237b2b024"}, + {file = "rapidfuzz-3.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:28d03cd33817f6e0bea9b618b460f85ff9c9c3fedc6c19cfa0992f719a0d1801"}, + {file = "rapidfuzz-3.4.0.tar.gz", hash = "sha256:a74112e2126b428c77db5e96f7ce34e91e750552147305b2d361122cbede2955"}, +] + +[package.extras] +full = ["numpy"] + [[package]] name = "regex" version = "2023.10.3" @@ -1858,32 +1975,37 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "scikit-learn" -version = "1.3.1" +version = "1.3.2" description = "A set of python modules for machine learning and data mining" optional = false python-versions = ">=3.8" files = [ - {file = "scikit-learn-1.3.1.tar.gz", hash = "sha256:1a231cced3ee3fa04756b4a7ab532dc9417acd581a330adff5f2c01ac2831fcf"}, - {file = "scikit_learn-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3153612ff8d36fa4e35ef8b897167119213698ea78f3fd130b4068e6f8d2da5a"}, - {file = "scikit_learn-1.3.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:6bb9490fdb8e7e00f1354621689187bef3cab289c9b869688f805bf724434755"}, - {file = "scikit_learn-1.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7135a03af71138669f19bc96e7d0cc8081aed4b3565cc3b131135d65fc642ba"}, - {file = "scikit_learn-1.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d8dee8c1f40eeba49a85fe378bdf70a07bb64aba1a08fda1e0f48d27edfc3e6"}, - {file = "scikit_learn-1.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:4d379f2b34096105a96bd857b88601dffe7389bd55750f6f29aaa37bc6272eb5"}, - {file = "scikit_learn-1.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14e8775eba072ab10866a7e0596bc9906873e22c4c370a651223372eb62de180"}, - {file = "scikit_learn-1.3.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:58b0c2490eff8355dc26e884487bf8edaccf2ba48d09b194fb2f3a026dd64f9d"}, - {file = "scikit_learn-1.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f66eddfda9d45dd6cadcd706b65669ce1df84b8549875691b1f403730bdef217"}, - {file = "scikit_learn-1.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6448c37741145b241eeac617028ba6ec2119e1339b1385c9720dae31367f2be"}, - {file = "scikit_learn-1.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c413c2c850241998168bbb3bd1bb59ff03b1195a53864f0b80ab092071af6028"}, - {file = "scikit_learn-1.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:52b77cc08bd555969ec5150788ed50276f5ef83abb72e6f469c5b91a0009bbca"}, - {file = "scikit_learn-1.3.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a683394bc3f80b7c312c27f9b14ebea7766b1f0a34faf1a2e9158d80e860ec26"}, - {file = "scikit_learn-1.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15d964d9eb181c79c190d3dbc2fff7338786bf017e9039571418a1d53dab236"}, - {file = "scikit_learn-1.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ce9233cdf0cdcf0858a5849d306490bf6de71fa7603a3835124e386e62f2311"}, - {file = "scikit_learn-1.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:1ec668ce003a5b3d12d020d2cde0abd64b262ac5f098b5c84cf9657deb9996a8"}, - {file = "scikit_learn-1.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccbbedae99325628c1d1cbe3916b7ef58a1ce949672d8d39c8b190e10219fd32"}, - {file = "scikit_learn-1.3.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:845f81c7ceb4ea6bac64ab1c9f2ce8bef0a84d0f21f3bece2126adcc213dfecd"}, - {file = "scikit_learn-1.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8454d57a22d856f1fbf3091bd86f9ebd4bff89088819886dc0c72f47a6c30652"}, - {file = "scikit_learn-1.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d993fb70a1d78c9798b8f2f28705bfbfcd546b661f9e2e67aa85f81052b9c53"}, - {file = "scikit_learn-1.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:66f7bb1fec37d65f4ef85953e1df5d3c98a0f0141d394dcdaead5a6de9170347"}, + {file = "scikit-learn-1.3.2.tar.gz", hash = "sha256:a2f54c76accc15a34bfb9066e6c7a56c1e7235dda5762b990792330b52ccfb05"}, + {file = "scikit_learn-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e326c0eb5cf4d6ba40f93776a20e9a7a69524c4db0757e7ce24ba222471ee8a1"}, + {file = "scikit_learn-1.3.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:535805c2a01ccb40ca4ab7d081d771aea67e535153e35a1fd99418fcedd1648a"}, + {file = "scikit_learn-1.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1215e5e58e9880b554b01187b8c9390bf4dc4692eedeaf542d3273f4785e342c"}, + {file = "scikit_learn-1.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ee107923a623b9f517754ea2f69ea3b62fc898a3641766cb7deb2f2ce450161"}, + {file = "scikit_learn-1.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:35a22e8015048c628ad099da9df5ab3004cdbf81edc75b396fd0cff8699ac58c"}, + {file = "scikit_learn-1.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6fb6bc98f234fda43163ddbe36df8bcde1d13ee176c6dc9b92bb7d3fc842eb66"}, + {file = "scikit_learn-1.3.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:18424efee518a1cde7b0b53a422cde2f6625197de6af36da0b57ec502f126157"}, + {file = "scikit_learn-1.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3271552a5eb16f208a6f7f617b8cc6d1f137b52c8a1ef8edf547db0259b2c9fb"}, + {file = "scikit_learn-1.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4144a5004a676d5022b798d9e573b05139e77f271253a4703eed295bde0433"}, + {file = "scikit_learn-1.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:67f37d708f042a9b8d59551cf94d30431e01374e00dc2645fa186059c6c5d78b"}, + {file = "scikit_learn-1.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8db94cd8a2e038b37a80a04df8783e09caac77cbe052146432e67800e430c028"}, + {file = "scikit_learn-1.3.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:61a6efd384258789aa89415a410dcdb39a50e19d3d8410bd29be365bcdd512d5"}, + {file = "scikit_learn-1.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb06f8dce3f5ddc5dee1715a9b9f19f20d295bed8e3cd4fa51e1d050347de525"}, + {file = "scikit_learn-1.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b2de18d86f630d68fe1f87af690d451388bb186480afc719e5f770590c2ef6c"}, + {file = "scikit_learn-1.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:0402638c9a7c219ee52c94cbebc8fcb5eb9fe9c773717965c1f4185588ad3107"}, + {file = "scikit_learn-1.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a19f90f95ba93c1a7f7924906d0576a84da7f3b2282ac3bfb7a08a32801add93"}, + {file = "scikit_learn-1.3.2-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:b8692e395a03a60cd927125eef3a8e3424d86dde9b2370d544f0ea35f78a8073"}, + {file = "scikit_learn-1.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15e1e94cc23d04d39da797ee34236ce2375ddea158b10bee3c343647d615581d"}, + {file = "scikit_learn-1.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:785a2213086b7b1abf037aeadbbd6d67159feb3e30263434139c98425e3dcfcf"}, + {file = "scikit_learn-1.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:64381066f8aa63c2710e6b56edc9f0894cc7bf59bd71b8ce5613a4559b6145e0"}, + {file = "scikit_learn-1.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6c43290337f7a4b969d207e620658372ba3c1ffb611f8bc2b6f031dc5c6d1d03"}, + {file = "scikit_learn-1.3.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:dc9002fc200bed597d5d34e90c752b74df516d592db162f756cc52836b38fe0e"}, + {file = "scikit_learn-1.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d08ada33e955c54355d909b9c06a4789a729977f165b8bae6f225ff0a60ec4a"}, + {file = "scikit_learn-1.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763f0ae4b79b0ff9cca0bf3716bcc9915bdacff3cebea15ec79652d1cc4fa5c9"}, + {file = "scikit_learn-1.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:ed932ea780517b00dae7431e031faae6b49b20eb6950918eb83bd043237950e0"}, ] [package.dependencies] @@ -1942,13 +2064,13 @@ test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeo [[package]] name = "sentry-sdk" -version = "1.31.0" +version = "1.32.0" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = "*" files = [ - {file = "sentry-sdk-1.31.0.tar.gz", hash = "sha256:6de2e88304873484207fed836388e422aeff000609b104c802749fd89d56ba5b"}, - {file = "sentry_sdk-1.31.0-py2.py3-none-any.whl", hash = "sha256:64a7141005fb775b9db298a30de93e3b83e0ddd1232dc6f36eb38aebc1553291"}, + {file = "sentry-sdk-1.32.0.tar.gz", hash = "sha256:935e8fbd7787a3702457393b74b13d89a5afb67185bc0af85c00cb27cbd42e7c"}, + {file = "sentry_sdk-1.32.0-py2.py3-none-any.whl", hash = "sha256:eeb0b3550536f3bbc05bb1c7e0feb3a78d74acb43b607159a606ed2ec0a33a4d"}, ] [package.dependencies] @@ -2183,52 +2305,60 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.21" +version = "2.0.22" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.21-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e7dc99b23e33c71d720c4ae37ebb095bebebbd31a24b7d99dfc4753d2803ede"}, - {file = "SQLAlchemy-2.0.21-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7f0c4ee579acfe6c994637527c386d1c22eb60bc1c1d36d940d8477e482095d4"}, - {file = "SQLAlchemy-2.0.21-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f7d57a7e140efe69ce2d7b057c3f9a595f98d0bbdfc23fd055efdfbaa46e3a5"}, - {file = "SQLAlchemy-2.0.21-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca38746eac23dd7c20bec9278d2058c7ad662b2f1576e4c3dbfcd7c00cc48fa"}, - {file = "SQLAlchemy-2.0.21-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3cf229704074bce31f7f47d12883afee3b0a02bb233a0ba45ddbfe542939cca4"}, - {file = "SQLAlchemy-2.0.21-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fb87f763b5d04a82ae84ccff25554ffd903baafba6698e18ebaf32561f2fe4aa"}, - {file = "SQLAlchemy-2.0.21-cp310-cp310-win32.whl", hash = "sha256:89e274604abb1a7fd5c14867a412c9d49c08ccf6ce3e1e04fffc068b5b6499d4"}, - {file = "SQLAlchemy-2.0.21-cp310-cp310-win_amd64.whl", hash = "sha256:e36339a68126ffb708dc6d1948161cea2a9e85d7d7b0c54f6999853d70d44430"}, - {file = "SQLAlchemy-2.0.21-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bf8eebccc66829010f06fbd2b80095d7872991bfe8415098b9fe47deaaa58063"}, - {file = "SQLAlchemy-2.0.21-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b977bfce15afa53d9cf6a632482d7968477625f030d86a109f7bdfe8ce3c064a"}, - {file = "SQLAlchemy-2.0.21-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ff3dc2f60dbf82c9e599c2915db1526d65415be323464f84de8db3e361ba5b9"}, - {file = "SQLAlchemy-2.0.21-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44ac5c89b6896f4740e7091f4a0ff2e62881da80c239dd9408f84f75a293dae9"}, - {file = "SQLAlchemy-2.0.21-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:87bf91ebf15258c4701d71dcdd9c4ba39521fb6a37379ea68088ce8cd869b446"}, - {file = "SQLAlchemy-2.0.21-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b69f1f754d92eb1cc6b50938359dead36b96a1dcf11a8670bff65fd9b21a4b09"}, - {file = "SQLAlchemy-2.0.21-cp311-cp311-win32.whl", hash = "sha256:af520a730d523eab77d754f5cf44cc7dd7ad2d54907adeb3233177eeb22f271b"}, - {file = "SQLAlchemy-2.0.21-cp311-cp311-win_amd64.whl", hash = "sha256:141675dae56522126986fa4ca713739d00ed3a6f08f3c2eb92c39c6dfec463ce"}, - {file = "SQLAlchemy-2.0.21-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7614f1eab4336df7dd6bee05bc974f2b02c38d3d0c78060c5faa4cd1ca2af3b8"}, - {file = "SQLAlchemy-2.0.21-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d59cb9e20d79686aa473e0302e4a82882d7118744d30bb1dfb62d3c47141b3ec"}, - {file = "SQLAlchemy-2.0.21-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a95aa0672e3065d43c8aa80080cdd5cc40fe92dc873749e6c1cf23914c4b83af"}, - {file = "SQLAlchemy-2.0.21-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8c323813963b2503e54d0944813cd479c10c636e3ee223bcbd7bd478bf53c178"}, - {file = "SQLAlchemy-2.0.21-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:419b1276b55925b5ac9b4c7044e999f1787c69761a3c9756dec6e5c225ceca01"}, - {file = "SQLAlchemy-2.0.21-cp37-cp37m-win32.whl", hash = "sha256:4615623a490e46be85fbaa6335f35cf80e61df0783240afe7d4f544778c315a9"}, - {file = "SQLAlchemy-2.0.21-cp37-cp37m-win_amd64.whl", hash = "sha256:cca720d05389ab1a5877ff05af96551e58ba65e8dc65582d849ac83ddde3e231"}, - {file = "SQLAlchemy-2.0.21-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b4eae01faee9f2b17f08885e3f047153ae0416648f8e8c8bd9bc677c5ce64be9"}, - {file = "SQLAlchemy-2.0.21-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3eb7c03fe1cd3255811cd4e74db1ab8dca22074d50cd8937edf4ef62d758cdf4"}, - {file = "SQLAlchemy-2.0.21-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2d494b6a2a2d05fb99f01b84cc9af9f5f93bf3e1e5dbdafe4bed0c2823584c1"}, - {file = "SQLAlchemy-2.0.21-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b19ae41ef26c01a987e49e37c77b9ad060c59f94d3b3efdfdbf4f3daaca7b5fe"}, - {file = "SQLAlchemy-2.0.21-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fc6b15465fabccc94bf7e38777d665b6a4f95efd1725049d6184b3a39fd54880"}, - {file = "SQLAlchemy-2.0.21-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:014794b60d2021cc8ae0f91d4d0331fe92691ae5467a00841f7130fe877b678e"}, - {file = "SQLAlchemy-2.0.21-cp38-cp38-win32.whl", hash = "sha256:0268256a34806e5d1c8f7ee93277d7ea8cc8ae391f487213139018b6805aeaf6"}, - {file = "SQLAlchemy-2.0.21-cp38-cp38-win_amd64.whl", hash = "sha256:73c079e21d10ff2be54a4699f55865d4b275fd6c8bd5d90c5b1ef78ae0197301"}, - {file = "SQLAlchemy-2.0.21-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:785e2f2c1cb50d0a44e2cdeea5fd36b5bf2d79c481c10f3a88a8be4cfa2c4615"}, - {file = "SQLAlchemy-2.0.21-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c111cd40910ffcb615b33605fc8f8e22146aeb7933d06569ac90f219818345ef"}, - {file = "SQLAlchemy-2.0.21-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9cba4e7369de663611ce7460a34be48e999e0bbb1feb9130070f0685e9a6b66"}, - {file = "SQLAlchemy-2.0.21-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50a69067af86ec7f11a8e50ba85544657b1477aabf64fa447fd3736b5a0a4f67"}, - {file = "SQLAlchemy-2.0.21-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ccb99c3138c9bde118b51a289d90096a3791658da9aea1754667302ed6564f6e"}, - {file = "SQLAlchemy-2.0.21-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:513fd5b6513d37e985eb5b7ed89da5fd9e72354e3523980ef00d439bc549c9e9"}, - {file = "SQLAlchemy-2.0.21-cp39-cp39-win32.whl", hash = "sha256:f9fefd6298433b6e9188252f3bff53b9ff0443c8fde27298b8a2b19f6617eeb9"}, - {file = "SQLAlchemy-2.0.21-cp39-cp39-win_amd64.whl", hash = "sha256:2e617727fe4091cedb3e4409b39368f424934c7faa78171749f704b49b4bb4ce"}, - {file = "SQLAlchemy-2.0.21-py3-none-any.whl", hash = "sha256:ea7da25ee458d8f404b93eb073116156fd7d8c2a776d8311534851f28277b4ce"}, - {file = "SQLAlchemy-2.0.21.tar.gz", hash = "sha256:05b971ab1ac2994a14c56b35eaaa91f86ba080e9ad481b20d99d77f381bb6258"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f146c61ae128ab43ea3a0955de1af7e1633942c2b2b4985ac51cc292daf33222"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:875de9414393e778b655a3d97d60465eb3fae7c919e88b70cc10b40b9f56042d"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13790cb42f917c45c9c850b39b9941539ca8ee7917dacf099cc0b569f3d40da7"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e04ab55cf49daf1aeb8c622c54d23fa4bec91cb051a43cc24351ba97e1dd09f5"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a42c9fa3abcda0dcfad053e49c4f752eef71ecd8c155221e18b99d4224621176"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:14cd3bcbb853379fef2cd01e7c64a5d6f1d005406d877ed9509afb7a05ff40a5"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-win32.whl", hash = "sha256:d143c5a9dada696bcfdb96ba2de4a47d5a89168e71d05a076e88a01386872f97"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-win_amd64.whl", hash = "sha256:ccd87c25e4c8559e1b918d46b4fa90b37f459c9b4566f1dfbce0eb8122571547"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f6ff392b27a743c1ad346d215655503cec64405d3b694228b3454878bf21590"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f776c2c30f0e5f4db45c3ee11a5f2a8d9de68e81eb73ec4237de1e32e04ae81c"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8f1792d20d2f4e875ce7a113f43c3561ad12b34ff796b84002a256f37ce9437"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80eeb5189d7d4b1af519fc3f148fe7521b9dfce8f4d6a0820e8f5769b005051"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69fd9e41cf9368afa034e1c81f3570afb96f30fcd2eb1ef29cb4d9371c6eece2"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54bcceaf4eebef07dadfde424f5c26b491e4a64e61761dea9459103ecd6ccc95"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-win32.whl", hash = "sha256:7ee7ccf47aa503033b6afd57efbac6b9e05180f492aeed9fcf70752556f95624"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-win_amd64.whl", hash = "sha256:b560f075c151900587ade06706b0c51d04b3277c111151997ea0813455378ae0"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2c9bac865ee06d27a1533471405ad240a6f5d83195eca481f9fc4a71d8b87df8"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:625b72d77ac8ac23da3b1622e2da88c4aedaee14df47c8432bf8f6495e655de2"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b39a6e21110204a8c08d40ff56a73ba542ec60bab701c36ce721e7990df49fb9"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53a766cb0b468223cafdf63e2d37f14a4757476157927b09300c8c5832d88560"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0e1ce8ebd2e040357dde01a3fb7d30d9b5736b3e54a94002641dfd0aa12ae6ce"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:505f503763a767556fa4deae5194b2be056b64ecca72ac65224381a0acab7ebe"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-win32.whl", hash = "sha256:154a32f3c7b00de3d090bc60ec8006a78149e221f1182e3edcf0376016be9396"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-win_amd64.whl", hash = "sha256:129415f89744b05741c6f0b04a84525f37fbabe5dc3774f7edf100e7458c48cd"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3940677d341f2b685a999bffe7078697b5848a40b5f6952794ffcf3af150c301"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55914d45a631b81a8a2cb1a54f03eea265cf1783241ac55396ec6d735be14883"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2096d6b018d242a2bcc9e451618166f860bb0304f590d205173d317b69986c95"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:19c6986cf2fb4bc8e0e846f97f4135a8e753b57d2aaaa87c50f9acbe606bd1db"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ac28bd6888fe3c81fbe97584eb0b96804bd7032d6100b9701255d9441373ec1"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-win32.whl", hash = "sha256:cb9a758ad973e795267da334a92dd82bb7555cb36a0960dcabcf724d26299db8"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-win_amd64.whl", hash = "sha256:40b1206a0d923e73aa54f0a6bd61419a96b914f1cd19900b6c8226899d9742ad"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3aa1472bf44f61dd27987cd051f1c893b7d3b17238bff8c23fceaef4f1133868"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:56a7e2bb639df9263bf6418231bc2a92a773f57886d371ddb7a869a24919face"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccca778c0737a773a1ad86b68bda52a71ad5950b25e120b6eb1330f0df54c3d0"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6c3e9350f9fb16de5b5e5fbf17b578811a52d71bb784cc5ff71acb7de2a7f9"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:564e9f9e4e6466273dbfab0e0a2e5fe819eec480c57b53a2cdee8e4fdae3ad5f"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:af66001d7b76a3fab0d5e4c1ec9339ac45748bc4a399cbc2baa48c1980d3c1f4"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-win32.whl", hash = "sha256:9e55dff5ec115316dd7a083cdc1a52de63693695aecf72bc53a8e1468ce429e5"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-win_amd64.whl", hash = "sha256:4e869a8ff7ee7a833b74868a0887e8462445ec462432d8cbeff5e85f475186da"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9886a72c8e6371280cb247c5d32c9c8fa141dc560124348762db8a8b236f8692"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a571bc8ac092a3175a1d994794a8e7a1f2f651e7c744de24a19b4f740fe95034"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8db5ba8b7da759b727faebc4289a9e6a51edadc7fc32207a30f7c6203a181592"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b0b3f2686c3f162123adba3cb8b626ed7e9b8433ab528e36ed270b4f70d1cdb"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c1fea8c0abcb070ffe15311853abfda4e55bf7dc1d4889497b3403629f3bf00"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4bb062784f37b2d75fd9b074c8ec360ad5df71f933f927e9e95c50eb8e05323c"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-win32.whl", hash = "sha256:58a3aba1bfb32ae7af68da3f277ed91d9f57620cf7ce651db96636790a78b736"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-win_amd64.whl", hash = "sha256:92e512a6af769e4725fa5b25981ba790335d42c5977e94ded07db7d641490a85"}, + {file = "SQLAlchemy-2.0.22-py3-none-any.whl", hash = "sha256:3076740335e4aaadd7deb3fe6dcb96b3015f1613bd190a4e1634e1b99b02ec86"}, + {file = "SQLAlchemy-2.0.22.tar.gz", hash = "sha256:5434cc601aa17570d79e5377f5fd45ff92f9379e2abed0be5e8c2fba8d353d2b"}, ] [package.dependencies] @@ -2450,57 +2580,69 @@ files = [ [[package]] name = "tree-sitter-languages" -version = "1.7.0" +version = "1.8.0" description = "Binary Python wheels for all tree sitter languages." optional = false python-versions = "*" files = [ - {file = "tree_sitter_languages-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fd8b856c224a74c395ed9495761c3ef8ba86014dbf6037d73634436ae683c808"}, - {file = "tree_sitter_languages-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:277d1bec6e101a26a4445cd7cb1eb8f8cf5a9bbad1ca80692bfae1af63568272"}, - {file = "tree_sitter_languages-1.7.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0473bd896799ccc87f428766813ddedd3506cad8430dbe863b663c81d7387680"}, - {file = "tree_sitter_languages-1.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb6799419bc7e3029112f2a3f8b77b6c299f94f03bb70e5c31a437b3180486be"}, - {file = "tree_sitter_languages-1.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e5b705c8ce6ef47fc461484878956ecd42a67cbeb0a17e323b86a4439a8fdc3d"}, - {file = "tree_sitter_languages-1.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:28a732be6fced2f70184c1b34f64961e3b6259fe6d5f7540c91028c2a43a7109"}, - {file = "tree_sitter_languages-1.7.0-cp310-cp310-win32.whl", hash = "sha256:f5cdb1ec88f0b8c617330c953555a20cc7e96ca6b1f5c68ab6db347e869cfeeb"}, - {file = "tree_sitter_languages-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:26cb344a75798fce1a73b690504d8e7789f6ba25a178efcd203444d7868caf38"}, - {file = "tree_sitter_languages-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:433b56cb3dca02b30f21c596f431a2cff90905326be1f8913c3515acb984b21e"}, - {file = "tree_sitter_languages-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96686390e1a01af44aedef7b33d6be82de3cf674a98a5c7b417e540e6afa62cc"}, - {file = "tree_sitter_languages-1.7.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25a4b6d559fbd76c6ec1b73cf03d09f53aaa5a1b61078a3f518b162866d9d97e"}, - {file = "tree_sitter_languages-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e504f199c7a4c8b1b1efb05a063450aa23234feea6fa6c06f4077f7248ea9c98"}, - {file = "tree_sitter_languages-1.7.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6b29856e9314b5f68f05dfa45e6674f47535229dda32294ba6d129077a97759c"}, - {file = "tree_sitter_languages-1.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:786fdaf3d2120eef9384b0f22d7e2e42a561073ba753c7b438e90a1e7b351650"}, - {file = "tree_sitter_languages-1.7.0-cp311-cp311-win32.whl", hash = "sha256:a55a7007056d0927b78481b437d79ea0487cc991c7f9c19d67adcceac3d47f53"}, - {file = "tree_sitter_languages-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:4b01d3bdf7ce2aeee4d0df62071a0ca91e618a29845686a5bd714d93c5ef3b36"}, - {file = "tree_sitter_languages-1.7.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9b603f1ad01bfb9d178f965125e2528cb7da9666d180f4a9a1acfaedbf5862ea"}, - {file = "tree_sitter_languages-1.7.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70610aa26dd985d2fb9eb07ea8eacc3ceb0cc9c2e91416f51305120cfd919e28"}, - {file = "tree_sitter_languages-1.7.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0444ebc8bdb7dc0d66a816050cfd52376c4e62a94a9c54fde90b29acf3e4bab1"}, - {file = "tree_sitter_languages-1.7.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7eeb5a3307ff1c0994ffff5ea37ec656a716a728b8c9359374104da521a76ded"}, - {file = "tree_sitter_languages-1.7.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:6c319cef16f2df667f1c165fe4eee160f2b51a0c4b61db1e70de2ab86420ca9a"}, - {file = "tree_sitter_languages-1.7.0-cp36-cp36m-win32.whl", hash = "sha256:b216650126d95d494f927393903e836a7ef5f0c4db0834f3a0b576f97c13abaf"}, - {file = "tree_sitter_languages-1.7.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6c96e5785d164a205962a10256808b3d12dccee9827ec88a46899063a2a2d28"}, - {file = "tree_sitter_languages-1.7.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:adafeabbd8d47b80122fad18bb61c25ed3da04f5347b7d774b53826accb27b7a"}, - {file = "tree_sitter_languages-1.7.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50e2bc5d2da770ecd5af94f9d716faa4764f890fd61bc0a488e9269653d9fb71"}, - {file = "tree_sitter_languages-1.7.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac773097cff7de6cf265c5be9990b4c6690161452da1d9fc41021d4bf7e8c73a"}, - {file = "tree_sitter_languages-1.7.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b233bfc48cf0f16436200afc7d7643cd87101c321de25b919b61f21f1693aa52"}, - {file = "tree_sitter_languages-1.7.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:eab3caedf50467045ed5cab776a57b494332616376d387c6600fd7ea4f5483cf"}, - {file = "tree_sitter_languages-1.7.0-cp37-cp37m-win32.whl", hash = "sha256:d533f743a22f5696494d3a5a60adb4cfbef63d58b8b5622993d93d6d0a602444"}, - {file = "tree_sitter_languages-1.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:aab96f64be30c9f73d6dc958ec22bb1a9fe70e90b2d2a3d233d537b347cea729"}, - {file = "tree_sitter_languages-1.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1bf89d771621e28847036b377f865f947e555a6654356d21beab738bb2531a69"}, - {file = "tree_sitter_languages-1.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b2f171089ec3c4f1de275edc8f0722e1e3dc7a54e83107098315ea2f0952cfcd"}, - {file = "tree_sitter_languages-1.7.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a091577d3a8454c40f813ee2834314c73cc504522f70f9e33d7c2268d33973f9"}, - {file = "tree_sitter_languages-1.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8287efa87d080b340b583a6e81266cc3d8266deb61b8f3312649a9d1562e665a"}, - {file = "tree_sitter_languages-1.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9c5080c06a2df7a59c69d2422a6ae83a5e37e92d57c4bd5e572d0eb5226ab3b0"}, - {file = "tree_sitter_languages-1.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ca8f629cfb406a2f9b9f8a3a5c804d4d1ba4cdca41cccba63f51fc1bab13e5de"}, - {file = "tree_sitter_languages-1.7.0-cp38-cp38-win32.whl", hash = "sha256:fd3561b37a99c9d501719819a8736529ae3a6d597128c15be432d1855f3cb0d9"}, - {file = "tree_sitter_languages-1.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:377ad60f7a7bf27315676c4fa84cc766aa0019c1e556083763136ed951e934c0"}, - {file = "tree_sitter_languages-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1dc71b68e48f58cd5b6a9ab7a541714201815629a6554a969cfc579a6ee6e53"}, - {file = "tree_sitter_languages-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fb1521367b14c275bef70997ea90526e7049f840ba1bbd3ef56c72f5b15596e9"}, - {file = "tree_sitter_languages-1.7.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f73651f7e78371dc3d455e8aba510cc6fb9e1ac1d648c3334157950781eb295"}, - {file = "tree_sitter_languages-1.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:049b0dd63be721fe3f9642a2b5a044bea2852de2b35818467996242ae4b7f01f"}, - {file = "tree_sitter_languages-1.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c428a8e1f5ecc4eb5c79abff3eb2881123446cde16fd1d8866d527470a6fdd2f"}, - {file = "tree_sitter_languages-1.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:40fb3fc11ff90caf65b4713feeb6c4852e5d2a04ef8ae6a2ac734a702a6a6c7e"}, - {file = "tree_sitter_languages-1.7.0-cp39-cp39-win32.whl", hash = "sha256:f28e9904833b7a909f8227c4560401049bd3310cebe3e0a884d9461f783b9af2"}, - {file = "tree_sitter_languages-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ea47ee390ec2e1c9bf96d7b418775263766021a834910c9f2d578f95a3e27d0f"}, + {file = "tree_sitter_languages-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a20045f0c7a8394ac0c085c3a7da88438f9e62c6a8b661ebf63c3edb8c3f2bf6"}, + {file = "tree_sitter_languages-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ef80d5896b420d434f7322abbc2c5a5548a37b3821c5486ed0612d2bd760d5a"}, + {file = "tree_sitter_languages-1.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19e7c7100c7b4a364035417e811ab8d43c8ee4e38d0c6ab9cad9c4d8133c0abd"}, + {file = "tree_sitter_languages-1.8.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9618bfb5874c43fcb4da43cd71bc24f01f4f94cd55bb9923c4210c7f9e977eb5"}, + {file = "tree_sitter_languages-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7b0b606be0c61155bde8e913528b7dc038e8476891f5b198996f780c678ecc0"}, + {file = "tree_sitter_languages-1.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:306b49d60afb8c08f95a55e38744687521aa9350a97e9d6d1512db47ea401c51"}, + {file = "tree_sitter_languages-1.8.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b561b979d1dc15a0b2bc35586fe4ccf95049812944042ea5760d8450b63c3fe0"}, + {file = "tree_sitter_languages-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c46c82a5649c41fd4ce7483534fe548a98af6ef6490b5c9f066e2df43e40aa9"}, + {file = "tree_sitter_languages-1.8.0-cp310-cp310-win32.whl", hash = "sha256:4d84b2bf63f8dc51188f83a6dfc7d70365e1c720310c1222f44d0cd2ec76e4d0"}, + {file = "tree_sitter_languages-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:c59b81123fa73e7d66d3a8bc0e64af2f2a8fcbbce1b08676d9188ec5edb4fb49"}, + {file = "tree_sitter_languages-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a5816a1e394d717a86b9f5cbb0af08ad92a9badbb4b95678d75052e6bd7402"}, + {file = "tree_sitter_languages-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:912a12a56361077715b231f1931cf7d472f7d6cfdc76abb806e6b1bdf11d3835"}, + {file = "tree_sitter_languages-1.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33838baa8583b2c9f9df4d672237158dcc9d845782413569b51cc8dfed2fb4de"}, + {file = "tree_sitter_languages-1.8.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b6f148e459e8af180be68e9f9c8f8c4db0db170850482b083fd078fba3f4076"}, + {file = "tree_sitter_languages-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96dbdaff9d317d193451bc5b566098717096381d67674f9e65fb8f0ebe98c847"}, + {file = "tree_sitter_languages-1.8.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c719535ebdd39f94c26f2182b0d16c45a2996b03b5ad7b78a863178eca1546d"}, + {file = "tree_sitter_languages-1.8.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d5c4cb2f4231135d038155787c96f4ecdf44f63eeee8d9e36b100b96a80a7764"}, + {file = "tree_sitter_languages-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:524bfa0bcbf0fe8cbb93712336d1de0a3073f08c004bb920270d69c0c3eaaf14"}, + {file = "tree_sitter_languages-1.8.0-cp311-cp311-win32.whl", hash = "sha256:26a0b923c47eeed551e4c307b7badb337564523cca36f9c40e188a308f471c72"}, + {file = "tree_sitter_languages-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3f0ed6297878f9f335f652843e9ab48c561f9a5b312a41a868b5fc127567447b"}, + {file = "tree_sitter_languages-1.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0f18d0d98b92bfa40ec15fc4cc5eb5e1f39b9f2f8986cf4cb3e1f8a8e31b06cf"}, + {file = "tree_sitter_languages-1.8.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c742b0733be6d057d323252c56b8419fa2e120510baf601f710363971ae99ae7"}, + {file = "tree_sitter_languages-1.8.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4417710db978edf6bad1e1e59efba04693919ed45c4115bae7da359354d9d8af"}, + {file = "tree_sitter_languages-1.8.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a051e1cceddd1126ce0fa0d3faa12873e5b52cafae0893cc82d22b21348fc83c"}, + {file = "tree_sitter_languages-1.8.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2665768f7ef6d00ab3847c5a3a5fdd54fbc62a9abf80475bff26dcc7a4e8544f"}, + {file = "tree_sitter_languages-1.8.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:76be6fd0d1e514e496eb3430b05ce0efd2f7d09fc3dfe47cc99afc653313c36a"}, + {file = "tree_sitter_languages-1.8.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:510c5ba5dd3ce502f2963c46cc56ad4a0acd1b776be9b119da03f392bda9f8bf"}, + {file = "tree_sitter_languages-1.8.0-cp36-cp36m-win32.whl", hash = "sha256:f852ff7b77df5c7a3f8b825c31673aee59456a93347b58cfa43fdda81fe1cb63"}, + {file = "tree_sitter_languages-1.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:53934c8b09650e576ad5724b84c6891d84b69508ad71a78bb2d4dc88b63543fc"}, + {file = "tree_sitter_languages-1.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:400ba190fd08cec9412d70efa09e2f1791a0db82a3e9b31f677e145ad2e48a9a"}, + {file = "tree_sitter_languages-1.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:937b0e8cc07fb6574b475fcaded8dd16fa445c66f40bf449b4e50684fd8c380b"}, + {file = "tree_sitter_languages-1.8.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c165c5d13ee335c74a2b6dc6edfcf85045839fa2f7254d2aae3ae9f76020e87d"}, + {file = "tree_sitter_languages-1.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:124117c6184653cdd381c70a16e5d6a45a41c3f6470d9d756452ea50aa6bb472"}, + {file = "tree_sitter_languages-1.8.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4c12232c93d4c5c8b3b6324850085971fa93c2226842778f07fe3fba9a7683c1"}, + {file = "tree_sitter_languages-1.8.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b9baf99c00366fe2c8e61bf7489d86eaab4c884f669abdb30ba2450cfabb77f7"}, + {file = "tree_sitter_languages-1.8.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f97baf3d574fc44872c1de8c941888c940a0376c8f80a15ec6931d19b4fe2091"}, + {file = "tree_sitter_languages-1.8.0-cp37-cp37m-win32.whl", hash = "sha256:c40267904f734d8a7e9a05ce60f04ea95db59cad183207c4af34e6bc1f5bbd1f"}, + {file = "tree_sitter_languages-1.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:06b8d11ea550d3c4f0ce0774d6b521c44f2e83d1a77d50f85bea3ed150e66c28"}, + {file = "tree_sitter_languages-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9a151d4f2637309f1780b9a0422cdeea3c0a8a6209800f587fe4374ebe13e6a1"}, + {file = "tree_sitter_languages-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1a3afb35a316495ff1b848aadeb4f9f7ef6522e9b730a7a35cfe28361398404e"}, + {file = "tree_sitter_languages-1.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22eb91d745b96936c13fc1c100d78e6dcbaa14e9fbe54e180cdc6ca1b262c0f"}, + {file = "tree_sitter_languages-1.8.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54a3a83474d3abb44a178aa1f0a5ef73002c014e7e489977fd39624c1ac0a476"}, + {file = "tree_sitter_languages-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5a13aa1e6f0fc76268e8fed282fb433ca4b8f6644bb75476a10d28cc19d6cf3"}, + {file = "tree_sitter_languages-1.8.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:68872fcea16f7ddbfeec52120b7070e18a820407d16f6b513ec95ede4110df82"}, + {file = "tree_sitter_languages-1.8.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:43928c43d8a25204297c43bbaab0c4b567a7e85901a19ef9317a3964ad8eb76e"}, + {file = "tree_sitter_languages-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cca84cacd5530f23ae5d05e4904c2d42f7479fd80541eda34c27cadbf9611d6b"}, + {file = "tree_sitter_languages-1.8.0-cp38-cp38-win32.whl", hash = "sha256:9d043fdbaf260d0f36f8843acf43096765bed913be71ad705265dccb8e381e1c"}, + {file = "tree_sitter_languages-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f5bbccf1250dc07e74fd86f08a9ed614efd64986a48c142846cd21e84267d46b"}, + {file = "tree_sitter_languages-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:10046058a4213304e3ba78a52ab88d8d5a2703f5d193e7e976d0a53c2fa12f4b"}, + {file = "tree_sitter_languages-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2fc84bb37ca0bb1f45f808a38733f6bb9c2e8fc8a02712fe8658fe3d31ed74e7"}, + {file = "tree_sitter_languages-1.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36b13199282d71d2a841f404f58ccf914b3917b27a99917b0a79b80c93f8a24e"}, + {file = "tree_sitter_languages-1.8.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a94f5f5ac57591004823385bd7f4cc1b62c7b0b08efc1c39a5e33fb2f8c201bf"}, + {file = "tree_sitter_languages-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a796a359bd6fb4f2b67e29f86c9130bd6ae840d75d31d356594f92d5505f43d"}, + {file = "tree_sitter_languages-1.8.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:45a6edf0106ff653940fe52fb8a47f8c03d0c5981312ac036888d44102840452"}, + {file = "tree_sitter_languages-1.8.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f077fe6099bb310a247514b68d7103c6dbafef552856fcd225d0867f78b620b7"}, + {file = "tree_sitter_languages-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3842ef8d05e3368c227fd5a57f08f636374b4b870070916d08c4aafb99d04cd1"}, + {file = "tree_sitter_languages-1.8.0-cp39-cp39-win32.whl", hash = "sha256:3e9eafc7079114783b5385a769fd190c93525bcae3cf6791fd819c617067394e"}, + {file = "tree_sitter_languages-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:9d30b7f48f18a60eea9a0f9494e0f0ea6f560d861770a84c3faab8d7a446fc55"}, ] [package.dependencies] @@ -2545,16 +2687,17 @@ files = [ [[package]] name = "unstructured" -version = "0.10.18" +version = "0.10.26" description = "A library that prepares raw documents for downstream ML tasks." optional = false python-versions = ">=3.7.0" files = [ - {file = "unstructured-0.10.18-py3-none-any.whl", hash = "sha256:eaec0f0ecc470bb646a750cb32c125275d34d258ced46cfc3364098939d9ca77"}, - {file = "unstructured-0.10.18.tar.gz", hash = "sha256:7f330573d4297182f4b1500e05c9fc4779a08811bce23c527a96898b2ff374f6"}, + {file = "unstructured-0.10.26-py3-none-any.whl", hash = "sha256:90f0d53886988c189016db8683e05115b25ebdd510c05e343845fdb8ab81aca6"}, + {file = "unstructured-0.10.26.tar.gz", hash = "sha256:eb31884095f29e9b536a1ea2cbede8675afb3c54b8d59920c73b8cc0639e8379"}, ] [package.dependencies] +backoff = "*" beautifulsoup4 = "*" chardet = "*" dataclasses-json = "*" @@ -2566,43 +2709,47 @@ nltk = "*" numpy = "*" python-iso639 = "*" python-magic = "*" +rapidfuzz = "*" requests = "*" tabulate = "*" +typing-extensions = "*" [package.extras] airtable = ["pyairtable"] -all-docs = ["ebooklib", "markdown", "msg-parser", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx", "python-pptx (<=0.6.21)", "unstructured-inference (==0.5.31)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +all-docs = ["ebooklib", "markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.0.1)", "python-pptx (<=0.6.21)", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] azure = ["adlfs", "fsspec (==2023.9.1)"] azure-cognitive-search = ["azure-search-documents"] +bedrock = ["boto3", "langchain"] biomed = ["bs4"] box = ["boxfs", "fsspec (==2023.9.1)"] confluence = ["atlassian-python-api"] csv = ["pandas"] delta-table = ["deltalake", "fsspec (==2023.9.1)"] discord = ["discord-py"] -doc = ["python-docx"] -docx = ["python-docx"] +doc = ["python-docx (>=1.0.1)"] +docx = ["python-docx (>=1.0.1)"] dropbox = ["dropboxdrivefs", "fsspec (==2023.9.1)"] elasticsearch = ["elasticsearch", "jq"] +embed-huggingface = ["huggingface", "langchain", "sentence-transformers"] epub = ["ebooklib"] gcs = ["bs4", "fsspec (==2023.9.1)", "gcsfs"] github = ["pygithub (>1.58.0)"] gitlab = ["python-gitlab"] google-drive = ["google-api-python-client"] huggingface = ["langdetect", "sacremoses", "sentencepiece", "torch", "transformers"] -image = ["pdf2image", "pdfminer.six", "unstructured-inference (==0.5.31)", "unstructured.pytesseract (>=0.3.12)"] +image = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)"] jira = ["atlassian-python-api"] -local-inference = ["ebooklib", "markdown", "msg-parser", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx", "python-pptx (<=0.6.21)", "unstructured-inference (==0.5.31)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +local-inference = ["ebooklib", "markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.0.1)", "python-pptx (<=0.6.21)", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] md = ["markdown"] msg = ["msg-parser"] notion = ["htmlBuilder", "notion-client"] -odt = ["pypandoc", "python-docx"] +odt = ["pypandoc", "python-docx (>=1.0.1)"] onedrive = ["Office365-REST-Python-Client (<2.4.3)", "bs4", "msal"] openai = ["langchain", "openai", "tiktoken"] org = ["pypandoc"] outlook = ["Office365-REST-Python-Client (<2.4.3)", "msal"] paddleocr = ["unstructured.paddleocr (==2.6.1.3)"] -pdf = ["pdf2image", "pdfminer.six", "unstructured-inference (==0.5.31)", "unstructured.pytesseract (>=0.3.12)"] +pdf = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.10)", "unstructured.pytesseract (>=0.3.12)"] ppt = ["python-pptx (<=0.6.21)"] pptx = ["python-pptx (<=0.6.21)"] reddit = ["praw"] @@ -2614,17 +2761,17 @@ sharepoint = ["Office365-REST-Python-Client (<2.4.3)", "msal"] slack = ["slack-sdk"] tsv = ["pandas"] wikipedia = ["wikipedia"] -xlsx = ["openpyxl", "pandas", "xlrd"] +xlsx = ["networkx", "openpyxl", "pandas", "xlrd"] [[package]] name = "urllib3" -version = "1.26.17" +version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.17-py2.py3-none-any.whl", hash = "sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b"}, - {file = "urllib3-1.26.17.tar.gz", hash = "sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21"}, + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ] [package.extras] @@ -2794,4 +2941,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.10.0,<3.11" -content-hash = "bfe95f62da4f94b68ae1aa1d1c67fd61d2393d53855425cecb18989fb7acdfcb" +content-hash = "a03356cd2f9c38ba657c562593239e8fc8c3119bcc619a2449061e260e585d54" diff --git a/pyproject.toml b/pyproject.toml index ab1adbb..17319e1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,27 +11,27 @@ include = ["src/**/*", "LICENSE", "README.md"] [tool.poetry.dependencies] python = ">=3.10.0,<3.11" -numpy = "^1.25.2" +numpy = "^1.26.1" wandb = "^0.15.12" openai = "^0.28.1" tiktoken = "^0.5.1" pandas = "^2.0.3" -unstructured = "^0.10.5" +unstructured = "^0.10.26" pydantic-settings = "^2.0.3" -gitpython = "^3.1.32" +gitpython = "^3.1.40" giturlparse = "^0.12.0" -scikit-learn = "^1.3.0" +scikit-learn = "^1.3.2" python-dotenv = "^1.0.0" faiss-cpu = "^1.7.4" slack-bolt = "^1.18.0" slack-sdk = "^3.21.3" discord = "^2.3.2" -markdown = "^3.4.4" +markdown = "^3.5" fastapi = "^0.103.1" langdetect = "^1.0.9" -llama-index = "^0.8.36" -tree-sitter-languages = "^1.7.0" -cohere = "^4.27" +llama-index = "^0.8.42" +tree-sitter-languages = "^1.7.1" +cohere = "^4.32" markdownify = "^0.11.6" uvicorn = "^0.23.2" From 37e86e4b32fbe109658f4461878a686b66425abc Mon Sep 17 00:00:00 2001 From: Bharat Ramanathan Date: Thu, 9 Nov 2023 19:02:09 +0530 Subject: [PATCH 6/7] fix: remove poetry run from replit deployment command --- run.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/run.sh b/run.sh index abe8e3c..746ac5c 100644 --- a/run.sh +++ b/run.sh @@ -1,3 +1,3 @@ -(poetry run uvicorn wandbot.api.app:app --host="0.0.0.0" --port=8000) & \ -(poetry run python -m wandbot.apps.slack) & \ -(poetry run python -m wandbot.apps.discord) +(uvicorn wandbot.api.app:app --host="0.0.0.0" --port=8000) & \ +(python -m wandbot.apps.slack) & \ +(python -m wandbot.apps.discord) From e01836dc2e1a972548365193fa22678166ddbca4 Mon Sep 17 00:00:00 2001 From: Bharat Ramanathan Date: Thu, 9 Nov 2023 19:03:01 +0530 Subject: [PATCH 7/7] fix: remove installation from replit run command --- .replit | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.replit b/.replit index 2f58dfd..93cbd6d 100644 --- a/.replit +++ b/.replit @@ -1,4 +1,4 @@ -run = "poetry lock --no-update && poetry install && bash run.sh" +run = "bash run.sh" entrypoint = "main.py" modules = ["python-3.10:v18-20230807-322e88b"]