From 896906cd3fcc61274e75cbf0f8fd4e4f0d033878 Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Tue, 8 Oct 2024 16:55:33 +0100 Subject: [PATCH 01/25] Save topic info and produce names and descriptions for clusters --- .../pipeline/name_clusters.py | 80 +++++++++++++++++++ .../pipeline/topic_modelling.py | 11 ++- 2 files changed, 85 insertions(+), 6 deletions(-) create mode 100644 dsp_interview_transcripts/pipeline/name_clusters.py diff --git a/dsp_interview_transcripts/pipeline/name_clusters.py b/dsp_interview_transcripts/pipeline/name_clusters.py new file mode 100644 index 0000000..cbd7052 --- /dev/null +++ b/dsp_interview_transcripts/pipeline/name_clusters.py @@ -0,0 +1,80 @@ +import ast +from langchain_community.chat_models import ChatOllama +import pandas as pd + +from langchain_core.output_parsers import JsonOutputParser +from pydantic import BaseModel, Field +from langchain.prompts import PromptTemplate + +from dsp_interview_transcripts import logger, PROJECT_DIR + +class NameDescription(BaseModel): + name: str = Field(description="Informative name for this group of documents") + description: str = Field(description="Description of this group of documents") + +prompt = """ + I have performed text clustering on some interviews where users were asked about their knowledge of + and opinions on different home heating options. + \n + One of the clusters contains the following user responses: + {docs} + The cluster is described by the following keywords: {keywords} + \n + Based on the information above, please provide a name and description for the cluster as a JSON object with two fields: + - name (a short, informative name for the cluster) + - description (a brief description of the cluster). + Example: + {{ + "name": "Energy Efficiency", + "description": "This cluster focuses on users' concerns about energy efficiency when choosing home heating options." + }} + """ + +parser = JsonOutputParser(pydantic_object=NameDescription) + +final_prompt = PromptTemplate( + template=prompt, + input_variables=["docs", "keywords"], + partial_variables={"format_instructions": parser.get_format_instructions()} + ) + +if __name__ == "__main__": + topic_info = pd.read_csv(PROJECT_DIR / "outputs/user_messages_cluster_size_10_topic_info.csv") + + for col in ['Representation', 'KeyBERT', 'MMR', 'Representative_Docs']: + topic_info[col] = topic_info[col].apply(ast.literal_eval) + + + topics = topic_info["Topic"].unique() + + + for model in ["llama3.1", "llama3.2", "phi3"]: + + logger.info(f"Obtaining names and descriptions with model: {model}") + + ollama_model = ChatOllama( + model=model, temperature=0 + ) + llm_chain = final_prompt | ollama_model | parser + + results = {} + + for topic in topics: + if topic==-1: + continue + else: + logger.info(f"Processing topic {topic} with model {model}") + temp_df = topic_info[topic_info["Topic"] == topic] + docs = temp_df['Representative_Docs'].values[0] + keywords = temp_df['MMR'].values[0] + output = llm_chain.invoke({"docs":docs, "keywords":keywords}) + results[topic] = output + + topic_info[f'{model}_name'] = topic_info['Topic'].map(lambda x: results[x]['name'] if x in results else None) + topic_info[f'{model}_description'] = topic_info['Topic'].map(lambda x: results[x]['description'] if x in results else None) + + logger.info("Savng output...") + topic_info.to_csv(PROJECT_DIR / "outputs/user_messages_cluster_size_10_topic_info_with_names_descriptions.csv", index=False) + logger.info("Done!") + + \ No newline at end of file diff --git a/dsp_interview_transcripts/pipeline/topic_modelling.py b/dsp_interview_transcripts/pipeline/topic_modelling.py index b63ed79..5500571 100644 --- a/dsp_interview_transcripts/pipeline/topic_modelling.py +++ b/dsp_interview_transcripts/pipeline/topic_modelling.py @@ -77,15 +77,11 @@ # MMR mmr_model = MaximalMarginalRelevance(diversity=0.3) - # GPT-3.5 - # openai_model = get_openai_model() # All representation models representation_model = { "KeyBERT": keybert_model, - # "OpenAI": openai_model, # Uncomment if you will use OpenAI "MMR": mmr_model, - # "POS": pos_model, } topic_model = BERTopic( @@ -108,8 +104,6 @@ topics, probs = topic_model.fit_transform(docs, embeddings) - # save topics - # save probs # save topic model topic_model.save( PROJECT_DIR / f"outputs/topic_model_{source}_cluster_size_{cluster_size}", @@ -119,6 +113,11 @@ ) rep_docs = topic_model.get_representative_docs() + + # Save representative docs and keywords + # (These will be used to obtain summaries) + topic_info = pd.DataFrame(topic_model.get_topic_info()) + topic_info.to_csv(PROJECT_DIR / f"outputs/{source}_cluster_size_{cluster_size}_topic_info.csv", index=False) umap_2d = UMAP(random_state=RANDOM_SEED, n_components=2) embeddings_2d = umap_2d.fit_transform(embeddings) From 866bcb2ba4d5dfb6612203555bb81763a5a7cb60 Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Tue, 8 Oct 2024 16:56:21 +0100 Subject: [PATCH 02/25] Lint files --- dsp_interview_transcripts/pipeline/name_clusters.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dsp_interview_transcripts/pipeline/name_clusters.py b/dsp_interview_transcripts/pipeline/name_clusters.py index cbd7052..6784cca 100644 --- a/dsp_interview_transcripts/pipeline/name_clusters.py +++ b/dsp_interview_transcripts/pipeline/name_clusters.py @@ -77,4 +77,4 @@ class NameDescription(BaseModel): topic_info.to_csv(PROJECT_DIR / "outputs/user_messages_cluster_size_10_topic_info_with_names_descriptions.csv", index=False) logger.info("Done!") - \ No newline at end of file + From 3f104f9a1a0467322c27bc7b732c86dea3745657 Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Wed, 9 Oct 2024 09:08:58 +0100 Subject: [PATCH 03/25] Add notebook on semantic chunking --- .../notebooks/semantic_chunking.ipynb | 157 ++++++++++++++++++ 1 file changed, 157 insertions(+) create mode 100644 dsp_interview_transcripts/notebooks/semantic_chunking.ipynb diff --git a/dsp_interview_transcripts/notebooks/semantic_chunking.ipynb b/dsp_interview_transcripts/notebooks/semantic_chunking.ipynb new file mode 100644 index 0000000..d10d87b --- /dev/null +++ b/dsp_interview_transcripts/notebooks/semantic_chunking.ipynb @@ -0,0 +1,157 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This notebook shows how we could use the langchain SemanticChunker to split up text data." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_experimental.text_splitter import SemanticChunker\n", + "from langchain_huggingface import HuggingFaceEmbeddings\n", + "from langchain.schema import Document\n", + "\n", + "import pandas as pd\n", + "\n", + "from dsp_interview_transcripts import PROJECT_DIR\n", + "from dsp_interview_transcripts.utils.data_cleaning import clean_data, convert_timestamp" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Read in the raw data\n", + "data = pd.read_csv(PROJECT_DIR / 'data/qual_af_transcripts.csv')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Clean up the text a little and move audio transcriptions to the text column\n", + "interviews_df = clean_data(data)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Make sure the conversations are sorted by time, so that the replies go in the right order\n", + "interviews_df['timestamp_clean'] = interviews_df['timestamp'].apply(convert_timestamp)\n", + "interviews_df = interviews_df.groupby('conversation', group_keys=False).apply(lambda x: x.sort_values('timestamp_clean'))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Turn every conversation into one big block of text (mimics the format of other interview/focus group transcripts we might see)\n", + "df_grouped = interviews_df.groupby('conversation')['text_clean'].apply(lambda x: '. '.join(x)).reset_index()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Take the first conversation as a guinea pig\n", + "text1 = df_grouped['text_clean'][0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Turn it into a langchain document\n", + "doc = Document(page_content=text1)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Define the model we'll use to generate embeddings. The SemanticChunker documentation suggests OpenAI embeddings\n", + "# but we can just use HF embeddings\n", + "model_name = \"sentence-transformers/all-MiniLM-L6-v2\"\n", + "\n", + "chunker = SemanticChunker(HuggingFaceEmbeddings(model_name=model_name), breakpoint_threshold_type=\"percentile\")\n", + "\n", + "chunked_docs = chunker.split_documents([doc])\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "See [the documentation](https://python.langchain.com/docs/how_to/semantic-chunker/) for info on different breakpoints. Percentile is the default.\n", + "\n", + "See also [this notebook](https://github.com/FullStackRetrieval-com/RetrievalTutorials/blob/main/tutorials/LevelsOfTextSplitting/5_Levels_Of_Text_Splitting.ipynb).\n", + "\n", + "Note that [the documentation](https://api.python.langchain.com/en/latest/text_splitter/langchain_experimental.text_splitter.SemanticChunker.html) suggests you can manipulate:\n", + "* the exact numerical value of the breakpoint threshold\n", + "* the regex for sentence delimiters\n", + "* the number of chunks if you have a sense of what this would be for your document\n", + "\n", + "\n", + "Also, the documentation says that the chunker will look at windows of 3 sentences, but the [source code](https://github.com/langchain-ai/langchain-experimental/blob/main/libs/experimental/langchain_experimental/text_splitter.py) makes it look like it just takes 1 sentence at a time by default?" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "chunked_docs" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From 98b08c26dae1dbe2bbdb594d71642d2973dd3a9b Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Wed, 9 Oct 2024 17:30:24 +0100 Subject: [PATCH 04/25] Check buffer sizes --- .../notebooks/semantic_chunking.ipynb | 117 ++++++++++++++---- 1 file changed, 96 insertions(+), 21 deletions(-) diff --git a/dsp_interview_transcripts/notebooks/semantic_chunking.ipynb b/dsp_interview_transcripts/notebooks/semantic_chunking.ipynb index d10d87b..6a67a04 100644 --- a/dsp_interview_transcripts/notebooks/semantic_chunking.ipynb +++ b/dsp_interview_transcripts/notebooks/semantic_chunking.ipynb @@ -17,12 +17,52 @@ "from langchain_huggingface import HuggingFaceEmbeddings\n", "from langchain.schema import Document\n", "\n", + "import matplotlib.pyplot as plt\n", "import pandas as pd\n", "\n", "from dsp_interview_transcripts import PROJECT_DIR\n", "from dsp_interview_transcripts.utils.data_cleaning import clean_data, convert_timestamp" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from sentence_transformers import SentenceTransformer\n", + "from sklearn.metrics.pairwise import cosine_similarity\n", + "\n", + "# Load model\n", + "small_model = SentenceTransformer('paraphrase-MiniLM-L3-v2')\n", + "\n", + "# Embed the target sentence\n", + "target_sentence = \"Are these instructions clear or do you need any further clarification?\"\n", + "target_embedding = small_model.encode([target_sentence])\n", + "\n", + "# Function to process each conversation\n", + "def remove_preamble(df, target_embedding=target_embedding, model=small_model):\n", + " \"\"\"Get rid of everything up until the bot asks if the instructions are clear\n", + " \"\"\"\n", + " # Filter BOT messages\n", + " bot_messages = df[df['role'] == 'BOT']\n", + " \n", + " # Embed BOT messages\n", + " bot_embeddings = model.encode(bot_messages['text_clean'].tolist())\n", + " \n", + " # Calculate cosine similarity\n", + " similarities = cosine_similarity(target_embedding, bot_embeddings).flatten()\n", + " \n", + " # Find the index of the most similar BOT message\n", + " most_similar_idx = similarities.argmax()\n", + " \n", + " # Get the timestamp of that message\n", + " cutoff_timestamp = bot_messages.iloc[most_similar_idx]['timestamp_clean']\n", + " \n", + " # Filter out messages prior to the cutoff timestamp\n", + " return df[df['timestamp_clean'] > cutoff_timestamp]" + ] + }, { "cell_type": "code", "execution_count": null, @@ -60,8 +100,25 @@ "metadata": {}, "outputs": [], "source": [ - "# Turn every conversation into one big block of text (mimics the format of other interview/focus group transcripts we might see)\n", - "df_grouped = interviews_df.groupby('conversation')['text_clean'].apply(lambda x: '. '.join(x)).reset_index()" + "interviews_cleaned_df = interviews_df.groupby('conversation').apply(remove_preamble).reset_index(drop=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "len(interviews_df) - len(interviews_cleaned_df)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "len(interviews_cleaned_df)" ] }, { @@ -70,8 +127,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Take the first conversation as a guinea pig\n", - "text1 = df_grouped['text_clean'][0]" + "interviews_cleaned_df.head()" ] }, { @@ -80,8 +136,8 @@ "metadata": {}, "outputs": [], "source": [ - "# Turn it into a langchain document\n", - "doc = Document(page_content=text1)" + "# Turn every conversation into one big block of text (mimics the format of other interview/focus group transcripts we might see)\n", + "df_grouped = interviews_df.groupby('conversation')['text_clean'].apply(lambda x: '. '.join(x)).reset_index()" ] }, { @@ -90,13 +146,42 @@ "metadata": {}, "outputs": [], "source": [ - "# Define the model we'll use to generate embeddings. The SemanticChunker documentation suggests OpenAI embeddings\n", - "# but we can just use HF embeddings\n", "model_name = \"sentence-transformers/all-MiniLM-L6-v2\"\n", + "buffer_sizes = [1, 2, 3]\n", + "\n", + "all_results = {}\n", + "actual_chunks = {}\n", + "\n", + "for buffer_size in buffer_sizes:\n", + " chunker = SemanticChunker(HuggingFaceEmbeddings(model_name=model_name), \n", + " breakpoint_threshold_type=\"percentile\", \n", + " buffer_size=buffer_size)\n", + " results = {}\n", "\n", - "chunker = SemanticChunker(HuggingFaceEmbeddings(model_name=model_name), breakpoint_threshold_type=\"percentile\")\n", + " for idx, row in df_grouped.iterrows():\n", + " text = row['text_clean']\n", + " conv_id = row['conversation']\n", + " # Turn it into a langchain document\n", + " doc = Document(page_content=text)\n", + " chunked_docs = chunker.split_documents([doc])\n", + " results[conv_id] = [x.model_dump() for x in chunked_docs]\n", + " \n", + " \n", + " all_results[buffer_size] = [len(chunk_list) for chunk_list in results.values()]\n", + " actual_chunks[buffer_size] = results\n", "\n", - "chunked_docs = chunker.split_documents([doc])\n" + "fig, axes = plt.subplots(nrows=1, ncols=len(buffer_sizes), figsize=(15, 5), sharey=True)\n", + "\n", + "for ax, buffer_size in zip(axes, buffer_sizes):\n", + " lengths = all_results[buffer_size]\n", + " ax.hist(lengths, bins=20, alpha=0.7)\n", + " ax.set_title(f'Buffer Size {buffer_size}')\n", + " ax.set_xlabel('Number of Chunks')\n", + " ax.set_ylabel('Frequency')\n", + "\n", + "plt.suptitle('Distribution of List Lengths for Different Buffer Sizes', fontsize=16)\n", + "plt.tight_layout(rect=[0, 0, 1, 0.95])\n", + "plt.show()" ] }, { @@ -112,17 +197,7 @@ "* the regex for sentence delimiters\n", "* the number of chunks if you have a sense of what this would be for your document\n", "\n", - "\n", - "Also, the documentation says that the chunker will look at windows of 3 sentences, but the [source code](https://github.com/langchain-ai/langchain-experimental/blob/main/libs/experimental/langchain_experimental/text_splitter.py) makes it look like it just takes 1 sentence at a time by default?" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "chunked_docs" + "`buffer_size` = the number of sentences either side to include. So if `buffer_size` is 1, you will get 3 sentences in each group." ] }, { From ab0be6378b904022ce10b26c90425fcdbc478d0a Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Fri, 11 Oct 2024 17:56:07 +0100 Subject: [PATCH 05/25] Expand the pipeline: * Add cluster naming/summarisation step * Add basic semantic chunking script for comparison with other datasets --- dsp_interview_transcripts/pipeline/app.py | 37 ++++++-- .../pipeline/chunk_interviews.py | 86 +++++++++++++++++ .../compare_models_for_topic_naming.py | 80 ++++++++++++++++ .../pipeline/name_clusters.py | 95 ++++++++++++++----- .../pipeline/process_data.py | 5 +- .../pipeline/run_pipeline.py | 2 + .../pipeline/topic_modelling.py | 14 ++- .../utils/data_cleaning.py | 31 ++++++ 8 files changed, 314 insertions(+), 36 deletions(-) create mode 100644 dsp_interview_transcripts/pipeline/chunk_interviews.py create mode 100644 dsp_interview_transcripts/pipeline/compare_models_for_topic_naming.py diff --git a/dsp_interview_transcripts/pipeline/app.py b/dsp_interview_transcripts/pipeline/app.py index 73bda9a..48ef079 100644 --- a/dsp_interview_transcripts/pipeline/app.py +++ b/dsp_interview_transcripts/pipeline/app.py @@ -5,7 +5,7 @@ streamlit run dsp_interview_transcripts/pipeline/app.py ``` """ - +import ast import streamlit as st import pandas as pd import altair as alt @@ -18,33 +18,46 @@ INTERVIEW_SECTIONS = [f"interview_q_{i}" for i in range(-1, 10) if i!=4] DATA_SOURCES = ["user_messages", "q_and_a", + "interviews_chunked", ] + INTERVIEW_SECTIONS # Input options data_source = st.selectbox("Select Data Source", DATA_SOURCES) -minimum_cluster_size = st.selectbox("Select Minimum Cluster Size", [10, 50, 100]) +minimum_cluster_size = st.selectbox("Select Minimum Cluster Size", [5, 10, 20, 50, 100]) # Validation: Prevent selection of '100' and 'home_upgrades_user' together if data_source in INTERVIEW_SECTIONS and minimum_cluster_size > 10: st.warning("For one of the 'interview_q' sources, you can only select a min. cluster size of 10. Please select a different combination.") +elif data_source == "interviews_chunked" and minimum_cluster_size > 20: + st.warning("For 'interviews_chunked', you can only select a min. cluster size of 20. Please select a different combination.") else: # Filepath based on user inputs - file_path = PROJECT_DIR / f"outputs/{data_source}_cluster_size_{minimum_cluster_size}_vis.csv" - + vis_file_path = PROJECT_DIR / f"outputs/{data_source}_cluster_size_{minimum_cluster_size}_vis.csv" + topic_file_path = PROJECT_DIR / f"outputs/{data_source}_cluster_size_{minimum_cluster_size}_topic_info_with_names_descriptions.csv" + # Load the CSV data @st.cache def load_data(file_path): return pd.read_csv(file_path) # Load the data and display it - if file_path.exists(): - df_vis = load_data(file_path) + if vis_file_path.exists(): + df_vis = load_data(vis_file_path) else: - st.error(f"File not found: {file_path}") - + st.error(f"File not found: {vis_file_path}") + + if topic_file_path.exists(): + topic_info = load_data(topic_file_path) + topic_info['Representative_Docs'] = topic_info['Representative_Docs'].apply(ast.literal_eval) + else: + st.error(f"File not found: {topic_file_path}") + # Altair plotting if not df_vis.empty: + + topic_list = list(df_vis["topic"].unique()) + # Define opacity condition opacity_condition = alt.condition( alt.datum.topic == -1, alt.value(0.1), alt.value(0.4) @@ -65,7 +78,7 @@ def load_data(file_path): ), color=alt.Color("Name:N"), opacity=opacity_condition, - tooltip=["Name:N", "doc:N"], + tooltip=["Name:N", "Topic:N","doc:N"], ) .properties(width=900, height=600) .interactive() @@ -73,3 +86,9 @@ def load_data(file_path): # Display the plot in the Streamlit app st.altair_chart(fig, use_container_width=True) + + selected_topic = st.selectbox("Select topic", topic_list) + + st.table(topic_info[topic_info["Topic"] == selected_topic][["llama3.2_name", "llama3.2_description", "Name", "MMR"]]) + + st.table(topic_info[topic_info["Topic"] == selected_topic][["Representative_Docs"]].explode("Representative_Docs")) diff --git a/dsp_interview_transcripts/pipeline/chunk_interviews.py b/dsp_interview_transcripts/pipeline/chunk_interviews.py new file mode 100644 index 0000000..55a7527 --- /dev/null +++ b/dsp_interview_transcripts/pipeline/chunk_interviews.py @@ -0,0 +1,86 @@ +from langchain_experimental.text_splitter import SemanticChunker +from langchain_huggingface import HuggingFaceEmbeddings +from langchain.schema import Document + +import matplotlib.pyplot as plt +import pandas as pd + +from dsp_interview_transcripts import PROJECT_DIR +from dsp_interview_transcripts.utils.data_cleaning import clean_data, convert_timestamp, remove_preamble, add_text_length + +MODEL = "sentence-transformers/all-MiniLM-L6-v2" +BUFFER_SIZE = 1 + +chunker = SemanticChunker(HuggingFaceEmbeddings(model_name=MODEL), + breakpoint_threshold_type="percentile", + buffer_size=BUFFER_SIZE) + +def chunk_with_uuids(pairs): + # Turn every conversation into one big block of text (take the second item from every uuid-text tuple) + text = '. '.join([t[1] for t in pairs]) + + # Apply the semantic chunker to the text + doc = Document(page_content=text) + chunks = chunker.split_documents([doc]) + + # For each chunk, find the corresponding UUIDs + chunk_uuid_mapping = [] + + for chunk in chunks: + # For each chunk, identify the corresponding UUID(s) + uuids_in_chunk = [] + chunk_text = chunk.page_content + + # Iterate through the original pairs to find corresponding UUIDs + for uuid, text_segment in pairs: + if text_segment in chunk_text: + uuids_in_chunk.append(uuid) + + # Append to the mapping (chunk text -> corresponding UUIDs) + chunk_uuid_mapping.append({ + 'chunk_text': chunk_text, + 'uuids': uuids_in_chunk + }) + + return chunk_uuid_mapping + +if __name__ == "__main__": + # Read in the raw data + data = pd.read_csv(PROJECT_DIR / 'data/qual_af_transcripts.csv') + + # Clean up the text a little and move audio transcriptions to the text column + interviews_df = clean_data(data) + + # Make sure the conversations are sorted by time, so that the replies go in the right order + interviews_df['timestamp_clean'] = interviews_df['timestamp'].apply(convert_timestamp) + interviews_df = interviews_df.groupby('conversation', group_keys=False).apply(lambda x: x.sort_values('timestamp_clean')) + + # Remove everything up to when bot asks if the instructions are clear - everything before is just noise + interviews_cleaned_df = interviews_df.groupby('conversation').apply(remove_preamble).reset_index(drop=True) + + df_grouped = interviews_cleaned_df.groupby('conversation').apply(lambda x: list(zip(x['uuid'], x['text_clean']))).reset_index(name='uuid_text_pairs') + + results = {} + + for idx, row in df_grouped.iterrows(): + conv = row['conversation'] + pairs = row['uuid_text_pairs'] + chunk_mapping = chunk_with_uuids(pairs) + results[conv] = chunk_mapping + + flattened_data = [] + for conversation, chunks in results.items(): + for chunk in chunks: + flattened_data.append({ + 'conversation': conversation, + 'chunk_text': chunk['chunk_text'], + 'uuids': chunk['uuids'] + }) + + df = pd.DataFrame(flattened_data) + + df = add_text_length(df, "chunk_text") + # Remove chunks that are too short + df = df[df['text_length'] > 4] + + df.to_csv(PROJECT_DIR / 'data/interviews_chunked.csv', index=False) \ No newline at end of file diff --git a/dsp_interview_transcripts/pipeline/compare_models_for_topic_naming.py b/dsp_interview_transcripts/pipeline/compare_models_for_topic_naming.py new file mode 100644 index 0000000..6784cca --- /dev/null +++ b/dsp_interview_transcripts/pipeline/compare_models_for_topic_naming.py @@ -0,0 +1,80 @@ +import ast +from langchain_community.chat_models import ChatOllama +import pandas as pd + +from langchain_core.output_parsers import JsonOutputParser +from pydantic import BaseModel, Field +from langchain.prompts import PromptTemplate + +from dsp_interview_transcripts import logger, PROJECT_DIR + +class NameDescription(BaseModel): + name: str = Field(description="Informative name for this group of documents") + description: str = Field(description="Description of this group of documents") + +prompt = """ + I have performed text clustering on some interviews where users were asked about their knowledge of + and opinions on different home heating options. + \n + One of the clusters contains the following user responses: + {docs} + The cluster is described by the following keywords: {keywords} + \n + Based on the information above, please provide a name and description for the cluster as a JSON object with two fields: + - name (a short, informative name for the cluster) + - description (a brief description of the cluster). + Example: + {{ + "name": "Energy Efficiency", + "description": "This cluster focuses on users' concerns about energy efficiency when choosing home heating options." + }} + """ + +parser = JsonOutputParser(pydantic_object=NameDescription) + +final_prompt = PromptTemplate( + template=prompt, + input_variables=["docs", "keywords"], + partial_variables={"format_instructions": parser.get_format_instructions()} + ) + +if __name__ == "__main__": + topic_info = pd.read_csv(PROJECT_DIR / "outputs/user_messages_cluster_size_10_topic_info.csv") + + for col in ['Representation', 'KeyBERT', 'MMR', 'Representative_Docs']: + topic_info[col] = topic_info[col].apply(ast.literal_eval) + + + topics = topic_info["Topic"].unique() + + + for model in ["llama3.1", "llama3.2", "phi3"]: + + logger.info(f"Obtaining names and descriptions with model: {model}") + + ollama_model = ChatOllama( + model=model, temperature=0 + ) + llm_chain = final_prompt | ollama_model | parser + + results = {} + + for topic in topics: + if topic==-1: + continue + else: + logger.info(f"Processing topic {topic} with model {model}") + temp_df = topic_info[topic_info["Topic"] == topic] + docs = temp_df['Representative_Docs'].values[0] + keywords = temp_df['MMR'].values[0] + output = llm_chain.invoke({"docs":docs, "keywords":keywords}) + results[topic] = output + + topic_info[f'{model}_name'] = topic_info['Topic'].map(lambda x: results[x]['name'] if x in results else None) + topic_info[f'{model}_description'] = topic_info['Topic'].map(lambda x: results[x]['description'] if x in results else None) + + logger.info("Savng output...") + topic_info.to_csv(PROJECT_DIR / "outputs/user_messages_cluster_size_10_topic_info_with_names_descriptions.csv", index=False) + logger.info("Done!") + + diff --git a/dsp_interview_transcripts/pipeline/name_clusters.py b/dsp_interview_transcripts/pipeline/name_clusters.py index 6784cca..75db276 100644 --- a/dsp_interview_transcripts/pipeline/name_clusters.py +++ b/dsp_interview_transcripts/pipeline/name_clusters.py @@ -1,6 +1,10 @@ import ast +import glob +import json from langchain_community.chat_models import ChatOllama +import os import pandas as pd +import re from langchain_core.output_parsers import JsonOutputParser from pydantic import BaseModel, Field @@ -14,15 +18,19 @@ class NameDescription(BaseModel): prompt = """ I have performed text clustering on some interviews where users were asked about their knowledge of - and opinions on different home heating options. + and opinions on different home heating options. These texts may contain user responses, interviewer questions, + or both. \n - One of the clusters contains the following user responses: + One of the clusters contains the following texts from the interviews: {docs} The cluster is described by the following keywords: {keywords} \n Based on the information above, please provide a name and description for the cluster as a JSON object with two fields: - name (a short, informative name for the cluster) - description (a brief description of the cluster). + \n + Provide nothing except for this JSON dict. + \n Example: {{ "name": "Energy Efficiency", @@ -38,24 +46,55 @@ class NameDescription(BaseModel): partial_variables={"format_instructions": parser.get_format_instructions()} ) +model="llama3.2" + +ollama_model = ChatOllama( + model=model, temperature=0 + ) + +llm_chain = final_prompt | ollama_model | parser + +INPUT_PATH = PROJECT_DIR / "outputs" + + +def save_list_of_dicts_as_jsonl(data, output_path): + with open(output_path, "w") as f: + for entry in data: + json_line = json.dumps(entry) + f.write(json_line + "\n") + if __name__ == "__main__": - topic_info = pd.read_csv(PROJECT_DIR / "outputs/user_messages_cluster_size_10_topic_info.csv") - for col in ['Representation', 'KeyBERT', 'MMR', 'Representative_Docs']: - topic_info[col] = topic_info[col].apply(ast.literal_eval) - - - topics = topic_info["Topic"].unique() + files = glob.glob(os.path.join(INPUT_PATH, '**', '*topic_info.csv'), recursive=True) - - for model in ["llama3.1", "llama3.2", "phi3"]: + filelist = [] + + for file in files: + filelist.append(os.path.basename(file)) - logger.info(f"Obtaining names and descriptions with model: {model}") + output = [] + + for file in filelist: + split1 = re.split('_cluster_size', file) + source = split1[0] + split2 = re.split('_', split1[1]) + cluster_size = split2[1] + output.append({'filename': file, 'source': source, 'cluster_size': cluster_size}) - ollama_model = ChatOllama( - model=model, temperature=0 - ) - llm_chain = final_prompt | ollama_model | parser + files_df = pd.DataFrame(output) + + errors = [] + + for file in filelist: + topic_info = pd.read_csv(PROJECT_DIR / f"outputs/{file}") + source = files_df[files_df['filename']==file]['source'].values[0] + cluster_size = files_df[files_df['filename']==file]['cluster_size'].values[0] + print(f"Processing {source} with cluster size {cluster_size}") + + for col in ['Representation', 'KeyBERT', 'MMR', 'Representative_Docs']: + topic_info[col] = topic_info[col].apply(ast.literal_eval) + + topics = topic_info["Topic"].unique() results = {} @@ -67,14 +106,22 @@ class NameDescription(BaseModel): temp_df = topic_info[topic_info["Topic"] == topic] docs = temp_df['Representative_Docs'].values[0] keywords = temp_df['MMR'].values[0] - output = llm_chain.invoke({"docs":docs, "keywords":keywords}) - results[topic] = output - - topic_info[f'{model}_name'] = topic_info['Topic'].map(lambda x: results[x]['name'] if x in results else None) - topic_info[f'{model}_description'] = topic_info['Topic'].map(lambda x: results[x]['description'] if x in results else None) - - logger.info("Savng output...") - topic_info.to_csv(PROJECT_DIR / "outputs/user_messages_cluster_size_10_topic_info_with_names_descriptions.csv", index=False) - logger.info("Done!") + + try: + output = llm_chain.invoke({"docs": docs, "keywords": keywords}) + results[topic] = output + + except Exception as e: + logger.error(f"Error processing topic {topic}: {str(e)}") + errors.append({"file": file, "source": source, "cluster_size": cluster_size, "topic": topic, "error": str(e)}) + ## Some complicated conditionals to check that what's in `results` can be parsed ## + topic_info[f'{model}_name'] = topic_info['Topic'].map(lambda x: results[x]['name'] if x in results and isinstance(results[x], dict) and 'name' in results[x] else None) + topic_info[f'{model}_description'] = topic_info['Topic'].map(lambda x: results[x]['description'] if x in results and isinstance(results[x], dict) and 'description' in results[x] else None) + + logger.info("Saving output...") + topic_info.to_csv(PROJECT_DIR / f"outputs/{source}_cluster_size_{cluster_size}_topic_info_with_names_descriptions.csv", index=False) + logger.info("Done!") + ## Save errors so that they can be loaded in and manually reprocessed if necessary ## + save_list_of_dicts_as_jsonl(errors, PROJECT_DIR/"outputs/interim/errors.jsonl") \ No newline at end of file diff --git a/dsp_interview_transcripts/pipeline/process_data.py b/dsp_interview_transcripts/pipeline/process_data.py index a62a294..261738a 100644 --- a/dsp_interview_transcripts/pipeline/process_data.py +++ b/dsp_interview_transcripts/pipeline/process_data.py @@ -1,7 +1,7 @@ import pandas as pd from dsp_interview_transcripts import PROJECT_DIR, logger -from dsp_interview_transcripts.utils.data_cleaning import clean_data, convert_timestamp, add_text_length +from dsp_interview_transcripts.utils.data_cleaning import clean_data, convert_timestamp, add_text_length, remove_preamble DATA_PATH = PROJECT_DIR / "data/qual_af_transcripts.csv" @@ -82,6 +82,9 @@ def create_q_and_a_column(data_df, text_col = 'text_clean', conversation_col='co interviews_df['timestamp_clean'] = interviews_df['timestamp'].apply(convert_timestamp) interviews_df = interviews_df.groupby('conversation', group_keys=False).apply(lambda x: x.sort_values('timestamp_clean')) + # Remove everything up to when bot asks if the instructions are clear - everything before is just noise + interviews_cleaned_df = interviews_df.groupby('conversation').apply(remove_preamble).reset_index(drop=True) + # Group together consecutive responses by the same role interviews_df = concatenate_consecutive_roles(interviews_df) diff --git a/dsp_interview_transcripts/pipeline/run_pipeline.py b/dsp_interview_transcripts/pipeline/run_pipeline.py index 456fc82..bb8a365 100644 --- a/dsp_interview_transcripts/pipeline/run_pipeline.py +++ b/dsp_interview_transcripts/pipeline/run_pipeline.py @@ -5,5 +5,7 @@ script_parent = Path(__file__).parent subprocess.run(f"python {script_parent / 'process_data.py'}", shell=True) +subprocess.run(f"python {script_parent / 'chunk_interviews.py'}", shell=True) subprocess.run(f"python {script_parent / 'segment_interviews.py'}", shell=True) subprocess.run(f"python {script_parent / 'topic_modelling.py'}", shell=True) +subprocess.run(f"python {script_parent / 'tname_clusters.py'}", shell=True) \ No newline at end of file diff --git a/dsp_interview_transcripts/pipeline/topic_modelling.py b/dsp_interview_transcripts/pipeline/topic_modelling.py index 5500571..b68ec71 100644 --- a/dsp_interview_transcripts/pipeline/topic_modelling.py +++ b/dsp_interview_transcripts/pipeline/topic_modelling.py @@ -30,9 +30,10 @@ INTERVIEW_SECTIONS = [f"interview_q_{i}" for i in range(-1, 10) if i != 4] # there were no hits for q4 for some reason DATA_SOURCES = ["user_messages", "q_and_a", + "interviews_chunked" ] + INTERVIEW_SECTIONS -MIN_CLUSTER_SIZES = [10, 50, 100] +MIN_CLUSTER_SIZES = [5, 10, 20, 50, 100] if __name__ == "__main__": @@ -40,11 +41,15 @@ for cluster_size in MIN_CLUSTER_SIZES: if source in INTERVIEW_SECTIONS and cluster_size>10: continue + if source=="interviews_chunked" and cluster_size>20: + continue df = pd.read_csv(PROJECT_DIR / f"data/{source}.csv") if source=="user_messages" or source in INTERVIEW_SECTIONS: text_col = "text_clean" + elif source=="interviews_chunked": + text_col = "chunk_text" else: text_col = "q_and_a" @@ -129,8 +134,13 @@ df_vis = df_vis.merge(topic_lookup, left_on="topic", right_on="Topic", how="left") df_vis["doc"] = docs + if source=="interviews_chunked": + base_df = df[['conversation',text_col]] + else: + base_df = df[["uuid","conversation",text_col]] + df_vis = pd.merge( - df[["uuid","conversation",text_col]], + base_df, df_vis, left_on=text_col, right_on="doc", diff --git a/dsp_interview_transcripts/utils/data_cleaning.py b/dsp_interview_transcripts/utils/data_cleaning.py index 2f99922..30ac581 100644 --- a/dsp_interview_transcripts/utils/data_cleaning.py +++ b/dsp_interview_transcripts/utils/data_cleaning.py @@ -3,6 +3,37 @@ import pandas as pd import re +from sentence_transformers import SentenceTransformer +from sklearn.metrics.pairwise import cosine_similarity + +# Load model +small_model = SentenceTransformer('paraphrase-MiniLM-L3-v2') + +# Embed the target sentence +TARGET_SENTENCE = "Are these instructions clear or do you need any further clarification?" +target_embedding = small_model.encode([TARGET_SENTENCE]) + +def remove_preamble(df, target_embedding=target_embedding, model=small_model): + """Get rid of everything up until the bot asks if the instructions are clear + """ + # Filter BOT messages + bot_messages = df[df['role'] == 'BOT'] + + # Embed BOT messages + bot_embeddings = model.encode(bot_messages['text_clean'].tolist()) + + # Calculate cosine similarity + similarities = cosine_similarity(target_embedding, bot_embeddings).flatten() + + # Find the index of the most similar BOT message + most_similar_idx = similarities.argmax() + + # Get the timestamp of that message + cutoff_timestamp = bot_messages.iloc[most_similar_idx]['timestamp_clean'] + + # Filter out messages prior to the cutoff timestamp + return df[df['timestamp_clean'] > cutoff_timestamp] + def convert_timestamp(timestamp): # Remove the daylight saving time '+01:00' and trailing whitespace cleaned_timestamp = re.sub('\+01[\:]?00$', '', timestamp) From 1b03982c64806e1e0543f05fcaab68a7d9580694 Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Fri, 11 Oct 2024 17:56:49 +0100 Subject: [PATCH 06/25] Lint files --- dsp_interview_transcripts/pipeline/chunk_interviews.py | 2 +- dsp_interview_transcripts/pipeline/name_clusters.py | 2 +- dsp_interview_transcripts/pipeline/run_pipeline.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/dsp_interview_transcripts/pipeline/chunk_interviews.py b/dsp_interview_transcripts/pipeline/chunk_interviews.py index 55a7527..91e3781 100644 --- a/dsp_interview_transcripts/pipeline/chunk_interviews.py +++ b/dsp_interview_transcripts/pipeline/chunk_interviews.py @@ -83,4 +83,4 @@ def chunk_with_uuids(pairs): # Remove chunks that are too short df = df[df['text_length'] > 4] - df.to_csv(PROJECT_DIR / 'data/interviews_chunked.csv', index=False) \ No newline at end of file + df.to_csv(PROJECT_DIR / 'data/interviews_chunked.csv', index=False) diff --git a/dsp_interview_transcripts/pipeline/name_clusters.py b/dsp_interview_transcripts/pipeline/name_clusters.py index 75db276..609781e 100644 --- a/dsp_interview_transcripts/pipeline/name_clusters.py +++ b/dsp_interview_transcripts/pipeline/name_clusters.py @@ -124,4 +124,4 @@ def save_list_of_dicts_as_jsonl(data, output_path): logger.info("Done!") ## Save errors so that they can be loaded in and manually reprocessed if necessary ## - save_list_of_dicts_as_jsonl(errors, PROJECT_DIR/"outputs/interim/errors.jsonl") \ No newline at end of file + save_list_of_dicts_as_jsonl(errors, PROJECT_DIR/"outputs/interim/errors.jsonl") diff --git a/dsp_interview_transcripts/pipeline/run_pipeline.py b/dsp_interview_transcripts/pipeline/run_pipeline.py index bb8a365..7bfe8f2 100644 --- a/dsp_interview_transcripts/pipeline/run_pipeline.py +++ b/dsp_interview_transcripts/pipeline/run_pipeline.py @@ -8,4 +8,4 @@ subprocess.run(f"python {script_parent / 'chunk_interviews.py'}", shell=True) subprocess.run(f"python {script_parent / 'segment_interviews.py'}", shell=True) subprocess.run(f"python {script_parent / 'topic_modelling.py'}", shell=True) -subprocess.run(f"python {script_parent / 'tname_clusters.py'}", shell=True) \ No newline at end of file +subprocess.run(f"python {script_parent / 'tname_clusters.py'}", shell=True) From 756eac093f4b956d3fb4c1edba31d42ff46c7f55 Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Fri, 11 Oct 2024 18:20:58 +0100 Subject: [PATCH 07/25] Link interview prompt questions in app --- dsp_interview_transcripts/pipeline/app.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/dsp_interview_transcripts/pipeline/app.py b/dsp_interview_transcripts/pipeline/app.py index 48ef079..78cf0bf 100644 --- a/dsp_interview_transcripts/pipeline/app.py +++ b/dsp_interview_transcripts/pipeline/app.py @@ -12,6 +12,18 @@ from dsp_interview_transcripts import PROJECT_DIR +QUESTIONS = ["What, if anything, do you know about the Boiler Upgrade Scheme? If you don't know anything about the scheme, just give it your best guess.", + "What, if anything, do you know about the process of applying for Boiler Upgrade Scheme funding?", + "What do you think are the eligibility requirements for someone to use this scheme?", + "How would you go about finding out more about the Boiler Upgrade Scheme", + "What do you think about there being eligiblity requirements for a scheme like this?", + "As a homeowner, where do you see yourself in relation to the eligibility requirements?", + "What, if any, type of work do you think needs to be done to a house to replace fossil fuel heating systems?", + "What types of home upgrades would you consider getting done to your house to improve the efficiency of your heating system?", + "What types of work to your house wouldn't you consider?", + "What are some energy-efficient heating systems that you could consider, apart from the one currently in use at your home?", + "Is there anything we've talked about you'd like to discuss further?"] + # Define the app layout st.title("Topic Modeling Visualization") @@ -23,6 +35,14 @@ # Input options data_source = st.selectbox("Select Data Source", DATA_SOURCES) + +if data_source in INTERVIEW_SECTIONS: + index = data_source.split("_")[-1] + if index=="-1": + st.info("These responses could not be matched to a question from the prompt.") + else: + st.info(QUESTIONS[int(index)]) + minimum_cluster_size = st.selectbox("Select Minimum Cluster Size", [5, 10, 20, 50, 100]) # Validation: Prevent selection of '100' and 'home_upgrades_user' together From ae5bec7e437fd909b25d6a212466b31fc3cca8d6 Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Tue, 22 Oct 2024 15:18:59 +0100 Subject: [PATCH 08/25] Reorganise pipeline: * There is now one set of scripts for a bottom-up analysis * A streamlit app to visualise the output of the bottom-up analysis * One script to execute a top-down analysis --- dsp_interview_transcripts/pipeline/app.py | 200 +++++----- .../{ => archive}/chunk_interviews.py | 0 .../compare_models_for_topic_naming.py | 0 .../pipeline/archive/process_data.py | 99 +++++ .../{ => archive}/segment_interviews.py | 2 +- .../pipeline/archive/topic_modelling.py | 150 +++++++ .../pipeline/name_clusters.py | 107 +++-- .../pipeline/prep_output_tables.py | 95 +++++ .../pipeline/process_data.py | 191 ++++++++- .../pipeline/run_pipeline.py | 18 +- .../pipeline/top_down_analysis.py | 251 ++++++++++++ .../pipeline/topic_modelling.py | 198 +++++----- poetry.lock | 365 ++++++++++++------ pyproject.toml | 5 + tests/test_concatenate_consecutive_roles.py | 2 +- tests/test_create_q_and_a_column.py | 2 +- 16 files changed, 1274 insertions(+), 411 deletions(-) rename dsp_interview_transcripts/pipeline/{ => archive}/chunk_interviews.py (100%) rename dsp_interview_transcripts/pipeline/{ => archive}/compare_models_for_topic_naming.py (100%) create mode 100644 dsp_interview_transcripts/pipeline/archive/process_data.py rename dsp_interview_transcripts/pipeline/{ => archive}/segment_interviews.py (98%) create mode 100644 dsp_interview_transcripts/pipeline/archive/topic_modelling.py create mode 100644 dsp_interview_transcripts/pipeline/prep_output_tables.py create mode 100644 dsp_interview_transcripts/pipeline/top_down_analysis.py diff --git a/dsp_interview_transcripts/pipeline/app.py b/dsp_interview_transcripts/pipeline/app.py index 78cf0bf..277122c 100644 --- a/dsp_interview_transcripts/pipeline/app.py +++ b/dsp_interview_transcripts/pipeline/app.py @@ -5,110 +5,106 @@ streamlit run dsp_interview_transcripts/pipeline/app.py ``` """ -import ast import streamlit as st import pandas as pd -import altair as alt +import seaborn as sns +import matplotlib.pyplot as plt from dsp_interview_transcripts import PROJECT_DIR -QUESTIONS = ["What, if anything, do you know about the Boiler Upgrade Scheme? If you don't know anything about the scheme, just give it your best guess.", - "What, if anything, do you know about the process of applying for Boiler Upgrade Scheme funding?", - "What do you think are the eligibility requirements for someone to use this scheme?", - "How would you go about finding out more about the Boiler Upgrade Scheme", - "What do you think about there being eligiblity requirements for a scheme like this?", - "As a homeowner, where do you see yourself in relation to the eligibility requirements?", - "What, if any, type of work do you think needs to be done to a house to replace fossil fuel heating systems?", - "What types of home upgrades would you consider getting done to your house to improve the efficiency of your heating system?", - "What types of work to your house wouldn't you consider?", - "What are some energy-efficient heating systems that you could consider, apart from the one currently in use at your home?", - "Is there anything we've talked about you'd like to discuss further?"] - -# Define the app layout -st.title("Topic Modeling Visualization") - -INTERVIEW_SECTIONS = [f"interview_q_{i}" for i in range(-1, 10) if i!=4] -DATA_SOURCES = ["user_messages", - "q_and_a", - "interviews_chunked", - ] + INTERVIEW_SECTIONS - -# Input options -data_source = st.selectbox("Select Data Source", DATA_SOURCES) - -if data_source in INTERVIEW_SECTIONS: - index = data_source.split("_")[-1] - if index=="-1": - st.info("These responses could not be matched to a question from the prompt.") - else: - st.info(QUESTIONS[int(index)]) - -minimum_cluster_size = st.selectbox("Select Minimum Cluster Size", [5, 10, 20, 50, 100]) - -# Validation: Prevent selection of '100' and 'home_upgrades_user' together -if data_source in INTERVIEW_SECTIONS and minimum_cluster_size > 10: - st.warning("For one of the 'interview_q' sources, you can only select a min. cluster size of 10. Please select a different combination.") -elif data_source == "interviews_chunked" and minimum_cluster_size > 20: - st.warning("For 'interviews_chunked', you can only select a min. cluster size of 20. Please select a different combination.") -else: - - # Filepath based on user inputs - vis_file_path = PROJECT_DIR / f"outputs/{data_source}_cluster_size_{minimum_cluster_size}_vis.csv" - topic_file_path = PROJECT_DIR / f"outputs/{data_source}_cluster_size_{minimum_cluster_size}_topic_info_with_names_descriptions.csv" - - # Load the CSV data - @st.cache - def load_data(file_path): - return pd.read_csv(file_path) - - # Load the data and display it - if vis_file_path.exists(): - df_vis = load_data(vis_file_path) - else: - st.error(f"File not found: {vis_file_path}") - - if topic_file_path.exists(): - topic_info = load_data(topic_file_path) - topic_info['Representative_Docs'] = topic_info['Representative_Docs'].apply(ast.literal_eval) - else: - st.error(f"File not found: {topic_file_path}") - - # Altair plotting - if not df_vis.empty: - - topic_list = list(df_vis["topic"].unique()) - - # Define opacity condition - opacity_condition = alt.condition( - alt.datum.topic == -1, alt.value(0.1), alt.value(0.4) - ) - - # Create plot - fig = ( - alt.Chart(df_vis) - .mark_circle(size=50) - .encode( - x=alt.X( - "x:Q", - axis=alt.Axis(ticks=False, labels=False, title=None, grid=False), - ), - y=alt.Y( - "y:Q", - axis=alt.Axis(ticks=False, labels=False, title=None, grid=False), - ), - color=alt.Color("Name:N"), - opacity=opacity_condition, - tooltip=["Name:N", "Topic:N","doc:N"], - ) - .properties(width=900, height=600) - .interactive() - ) - - # Display the plot in the Streamlit app - st.altair_chart(fig, use_container_width=True) - - selected_topic = st.selectbox("Select topic", topic_list) - - st.table(topic_info[topic_info["Topic"] == selected_topic][["llama3.2_name", "llama3.2_description", "Name", "MMR"]]) - - st.table(topic_info[topic_info["Topic"] == selected_topic][["Representative_Docs"]].explode("Representative_Docs")) +rep_docs = pd.read_csv(PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics_representative_docs.csv") +data = pd.read_csv(PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics.csv") +data_w_names = pd.read_csv(PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics_with_names_descriptions.csv") + +rep_docs = pd.merge(rep_docs, data_w_names[['Cluster', 'llama3.2_name', 'llama3.2_description']], on="Cluster", how="left") + +data_viz = pd.merge(data, data_w_names[['Cluster', 'llama3.2_name', 'llama3.2_description']], on="Cluster", how="left") + +data_viz['Name'] = data_viz['llama3.2_name'].fillna("None") +data_viz['Description'] = data_viz['llama3.2_description'].fillna("None") + +# Set up the Streamlit app layout +st.title("Prevalence and Sentiment Analysis") + +# # Sidebar for user input +# st.sidebar.header("Select Options") + +# First section: Displaying the side-by-side bar plots +# st.subheader("Side-by-side Bar Plots") + +# # Creating the bar plot for the prevalence of each 'Name' using Seaborn +# fig, ax = plt.subplots(1, 2, figsize=(12, 6)) + +# # Prevalence of each 'Name' +# sns.countplot(x='Name', data=data_viz, ax=ax[0]) +# ax[0].set_title('Prevalence of Each Name') +# ax[0].set_xlabel('Name') +# ax[0].set_ylabel('Count') + +# # Percentage of 'sentiment' under each value of 'Name' using Seaborn +# sentiment_percentage = pd.crosstab(data_viz['Name'], data_viz['sentiment'], normalize='index') * 100 +# sentiment_percentage = sentiment_percentage.reset_index().melt(id_vars='Name', var_name='sentiment', value_name='percentage') +# sns.barplot(x='Name', y='percentage', hue='sentiment', data=sentiment_percentage, ax=ax[1], estimator=sum) +# ax[1].set_title('Percentage of Sentiment under Each Name') +# ax[1].set_xlabel('Name') +# ax[1].set_ylabel('Percentage') + +# # Displaying the plots side by side +# st.pyplot(fig) + +# Creating the bar plot for the prevalence of each 'Name' using Seaborn +fig, ax = plt.subplots(1, 2, figsize=(12, 6)) + +# Prevalence of each 'Name' with 'Name' on the vertical axis +sns.countplot(y='Name', data=data_viz, ax=ax[0]) # Changed 'x' to 'y' to flip the axis +ax[0].set_title('Prevalence of Each Topic') +ax[0].set_ylabel(None) +ax[0].set_xlabel('Count') + +# Percentage of 'sentiment' under each value of 'Name' using Seaborn (stacked) +sentiment_percentage = pd.crosstab(data_viz['Name'], data_viz['sentiment'], normalize='index') * 100 +sentiment_percentage = sentiment_percentage.reset_index() + +# Plotting the stacked bar chart with 'Name' on the vertical axis +sentiment_percentage.plot( + kind='barh', stacked=True, x='Name', ax=ax[1], color=sns.color_palette("viridis",3) +) +ax[1].set_title('Percentage of Sentiment under Each Topic') +ax[1].set_ylabel(None) +ax[1].set_xlabel('Percentage') + +# Displaying the plots side by side +plt.tight_layout() +st.pyplot(fig) + +# Second section: User selects a 'Name' and displays a table +st.subheader("Select a Topic and Display Corresponding Texts") + +# Dropdown for selecting a 'Name' +selected_name = st.selectbox("Select a Topic", data_viz['Name'].unique()) + +if selected_name != "None": + st.markdown(f"**Description of the selected topic:** \n {data_viz[data_viz['Name'] == selected_name]['Description'].values[0]}") + + st.markdown(f"**Keywords for the selected topic:** \n {rep_docs[rep_docs['llama3.2_name']==selected_name]['Top Words'].drop_duplicates().values[0]}") + +# Filtering the dataframe to display the corresponding texts for the selected 'Name' +temp_docs = rep_docs[rep_docs['llama3.2_name'] == selected_name] +filtered_table = temp_docs[['conversation','context', 'text_clean']] +st.table(filtered_table) + +# Third section: User selects a 'question' and displays a bar plot +st.subheader("Select a Question and Display Prevalence of Different Topics") + +# Dropdown for selecting a 'question' +selected_question = st.selectbox("Select a Question", data_viz['question'].unique()) + +# Creating a bar plot for the prevalence of 'Name' for the selected question using Seaborn +fig2, ax2 = plt.subplots(figsize=(8, 6)) +sns.countplot(y='Name', data=data_viz[data_viz['question'] == selected_question], ax=ax2) +ax2.set_title(f'Prevalence of topics for "{selected_question}"') +ax2.set_xlabel(None) +ax2.set_ylabel(None) + +# Displaying the bar plot +st.pyplot(fig2) diff --git a/dsp_interview_transcripts/pipeline/chunk_interviews.py b/dsp_interview_transcripts/pipeline/archive/chunk_interviews.py similarity index 100% rename from dsp_interview_transcripts/pipeline/chunk_interviews.py rename to dsp_interview_transcripts/pipeline/archive/chunk_interviews.py diff --git a/dsp_interview_transcripts/pipeline/compare_models_for_topic_naming.py b/dsp_interview_transcripts/pipeline/archive/compare_models_for_topic_naming.py similarity index 100% rename from dsp_interview_transcripts/pipeline/compare_models_for_topic_naming.py rename to dsp_interview_transcripts/pipeline/archive/compare_models_for_topic_naming.py diff --git a/dsp_interview_transcripts/pipeline/archive/process_data.py b/dsp_interview_transcripts/pipeline/archive/process_data.py new file mode 100644 index 0000000..261738a --- /dev/null +++ b/dsp_interview_transcripts/pipeline/archive/process_data.py @@ -0,0 +1,99 @@ +import pandas as pd + +from dsp_interview_transcripts import PROJECT_DIR, logger +from dsp_interview_transcripts.utils.data_cleaning import clean_data, convert_timestamp, add_text_length, remove_preamble + +DATA_PATH = PROJECT_DIR / "data/qual_af_transcripts.csv" + +def concatenate_consecutive_roles(df, text_col='text_clean', conversation_col='conversation', role_col='role'): + """Concatenates consecutive rows with the same role within a conversation.""" + # Sort the dataframe to ensure correct order (if it's not already sorted) + df = df.sort_values(by=[conversation_col, 'timestamp']).reset_index(drop=True) + logger.info(f"Number of turns before concatenating consecutive roles: {len(df)}") + + # Create a mask to identify where the role changes or a new conversation starts + df['role_change'] = (df[conversation_col] != df[conversation_col].shift(1)) | (df[role_col] != df[role_col].shift(1)) + + # Assign group numbers to consecutive rows with the same role within the same conversation + df['turn'] = df['role_change'].cumsum() + + # Group by 'conversation' and 'group' to concatenate text + grouped = df.groupby([conversation_col, 'turn']).agg({ + 'timestamp': 'first', # Keep the first timestamp in each group + text_col: ' '.join, # Concatenate the 'text_clean' column + role_col: 'first', # Keep the role (either BOT or USER) + 'uuid': 'first' # Keep the first UUID in each group + }).reset_index() + + logger.info(f"Number of turns after concatenating consecutive roles: {len(grouped)}") + + grouped = grouped.drop(columns=['turn']) + + return grouped + +def create_q_and_a_column(data_df, text_col = 'text_clean', conversation_col='conversation'): + """Concatenates each user message with the immediately preceding bot message. + In cases where the user has given a really short response, this should provide some + helpful context. + """ + # Initialize a list to store the Q&A + q_and_a_list = [] + + # Variable to keep track of the most recent BOT text + last_bot_text = "" + + df = data_df.copy() + + # Track the current conversation ID to know when it changes + current_conversation = None + + # Iterate over the rows of the DataFrame + for _, row in df.iterrows(): + + # Check if the conversation has changed + if current_conversation != row[conversation_col]: + current_conversation = row[conversation_col] + last_bot_text = "" # Reset the last bot text for a new conversation + + if row['role'] == 'BOT': + # Update the last bot text + last_bot_text = row[text_col] + q_and_a_list.append('') # No Q&A for BOT rows + elif row['role'] == 'USER': + # Combine the last bot text and current user text if there is a preceding bot text, + # otherwise just use the user text + q_and_a_list.append(f"{last_bot_text}\n{row[text_col]}" if last_bot_text else row[text_col]) + else: + # In case there's any other role, we leave it empty + q_and_a_list.append('') + + # Add the new 'q_and_a' column to the DataFrame + df['q_and_a'] = q_and_a_list + + return df + +if __name__ == "__main__": + interviews_df = pd.read_csv(DATA_PATH) + interviews_df = clean_data(interviews_df) + + logger.info(f"Number of interviews: {len(interviews_df['conversation'].unique())}") + + # Make sure the conversations are sorted by time, so that the replies go in the right order + interviews_df['timestamp_clean'] = interviews_df['timestamp'].apply(convert_timestamp) + interviews_df = interviews_df.groupby('conversation', group_keys=False).apply(lambda x: x.sort_values('timestamp_clean')) + + # Remove everything up to when bot asks if the instructions are clear - everything before is just noise + interviews_cleaned_df = interviews_df.groupby('conversation').apply(remove_preamble).reset_index(drop=True) + + # Group together consecutive responses by the same role + interviews_df = concatenate_consecutive_roles(interviews_df) + + interviews_df = create_q_and_a_column(interviews_df) + + interviews_df = add_text_length(interviews_df, 'text_clean') + + q_and_a_df = interviews_df[interviews_df['q_and_a'] != ''] + q_and_a_df.to_csv(PROJECT_DIR / "data/q_and_a.csv", index=False) + + user_messages = interviews_df[(interviews_df['role']=='USER') & (interviews_df['text_length'] >= 4)] # only answers at least 4 words long + user_messages.to_csv(PROJECT_DIR / "data/user_messages.csv", index=False) diff --git a/dsp_interview_transcripts/pipeline/segment_interviews.py b/dsp_interview_transcripts/pipeline/archive/segment_interviews.py similarity index 98% rename from dsp_interview_transcripts/pipeline/segment_interviews.py rename to dsp_interview_transcripts/pipeline/archive/segment_interviews.py index 377f9fe..020f6ef 100644 --- a/dsp_interview_transcripts/pipeline/segment_interviews.py +++ b/dsp_interview_transcripts/pipeline/archive/segment_interviews.py @@ -12,7 +12,7 @@ from dsp_interview_transcripts import PROJECT_DIR, logger from dsp_interview_transcripts.utils.data_cleaning import clean_data, convert_timestamp, add_text_length -from dsp_interview_transcripts.pipeline.process_data import concatenate_consecutive_roles +from dsp_interview_transcripts.pipeline.archive.process_data import concatenate_consecutive_roles RANDOM_SEED = 42 DATA_PATH = PROJECT_DIR / "data/qual_af_transcripts.csv" diff --git a/dsp_interview_transcripts/pipeline/archive/topic_modelling.py b/dsp_interview_transcripts/pipeline/archive/topic_modelling.py new file mode 100644 index 0000000..b68ec71 --- /dev/null +++ b/dsp_interview_transcripts/pipeline/archive/topic_modelling.py @@ -0,0 +1,150 @@ +from bertopic import BERTopic +import numpy as np +import pandas as pd +import random +from sentence_transformers import SentenceTransformer + +from hdbscan import HDBSCAN +from umap import UMAP +from sklearn.feature_extraction.text import CountVectorizer +from bertopic.representation import ( + KeyBERTInspired, + MaximalMarginalRelevance, + # OpenAI, + PartOfSpeech, +) + +from dsp_interview_transcripts import PROJECT_DIR + +# Set random seeds +RANDOM_SEED = 42 +np.random.seed(RANDOM_SEED) +random.seed(RANDOM_SEED) +# PyTorch seed (used by SentenceTransformer) +import torch + +torch.manual_seed(RANDOM_SEED) + +SENTENCE_MODEL = SentenceTransformer("all-MiniLM-L6-v2") + +INTERVIEW_SECTIONS = [f"interview_q_{i}" for i in range(-1, 10) if i != 4] # there were no hits for q4 for some reason +DATA_SOURCES = ["user_messages", + "q_and_a", + "interviews_chunked" + ] + INTERVIEW_SECTIONS + +MIN_CLUSTER_SIZES = [5, 10, 20, 50, 100] + +if __name__ == "__main__": + + for source in DATA_SOURCES: + for cluster_size in MIN_CLUSTER_SIZES: + if source in INTERVIEW_SECTIONS and cluster_size>10: + continue + if source=="interviews_chunked" and cluster_size>20: + continue + + df = pd.read_csv(PROJECT_DIR / f"data/{source}.csv") + + if source=="user_messages" or source in INTERVIEW_SECTIONS: + text_col = "text_clean" + elif source=="interviews_chunked": + text_col = "chunk_text" + else: + text_col = "q_and_a" + + umap_model = UMAP( + n_neighbors=15, + n_components=50, + min_dist=0.1, + metric="cosine", + random_state=RANDOM_SEED, + ) + + hdbscan_model = HDBSCAN( + min_cluster_size=cluster_size, + min_samples=1, + metric="euclidean", + cluster_selection_method="eom", + prediction_data=True, + ) + + vectorizer_model = CountVectorizer( + stop_words="english", + min_df=1, + max_df=0.85, + ngram_range=(1, 3), + ) + + # KeyBERT + keybert_model = KeyBERTInspired() + + # MMR + mmr_model = MaximalMarginalRelevance(diversity=0.3) + + + # All representation models + representation_model = { + "KeyBERT": keybert_model, + "MMR": mmr_model, + } + + topic_model = BERTopic( + # Pipeline models + embedding_model="sentence-transformers/all-MiniLM-L6-v2", + umap_model=umap_model, + hdbscan_model=hdbscan_model, + vectorizer_model=vectorizer_model, + representation_model=representation_model, + # Hyperparameters + top_n_words=10, + verbose=True, + calculate_probabilities=True, + ) + + # Convert NaNs to empty strings + df[text_col] = df[text_col].astype(str) + docs = df[text_col].tolist() + embeddings = SENTENCE_MODEL.encode(docs, show_progress_bar=True) + + topics, probs = topic_model.fit_transform(docs, embeddings) + + # save topic model + topic_model.save( + PROJECT_DIR / f"outputs/topic_model_{source}_cluster_size_{cluster_size}", + serialization="pytorch", + save_ctfidf=True, + save_embedding_model=SENTENCE_MODEL, + ) + + rep_docs = topic_model.get_representative_docs() + + # Save representative docs and keywords + # (These will be used to obtain summaries) + topic_info = pd.DataFrame(topic_model.get_topic_info()) + topic_info.to_csv(PROJECT_DIR / f"outputs/{source}_cluster_size_{cluster_size}_topic_info.csv", index=False) + + umap_2d = UMAP(random_state=RANDOM_SEED, n_components=2) + embeddings_2d = umap_2d.fit_transform(embeddings) + + topic_lookup = topic_model.get_topic_info()[["Topic", "Name"]] + + df_vis = pd.DataFrame(embeddings_2d, columns=["x", "y"]) + df_vis["topic"] = topics + df_vis = df_vis.merge(topic_lookup, left_on="topic", right_on="Topic", how="left") + df_vis["doc"] = docs + + if source=="interviews_chunked": + base_df = df[['conversation',text_col]] + else: + base_df = df[["uuid","conversation",text_col]] + + df_vis = pd.merge( + base_df, + df_vis, + left_on=text_col, + right_on="doc", + how="outer", + ) + + df_vis.to_csv(PROJECT_DIR / f"outputs/{source}_cluster_size_{cluster_size}_vis.csv", index=False) diff --git a/dsp_interview_transcripts/pipeline/name_clusters.py b/dsp_interview_transcripts/pipeline/name_clusters.py index 609781e..c73e4b6 100644 --- a/dsp_interview_transcripts/pipeline/name_clusters.py +++ b/dsp_interview_transcripts/pipeline/name_clusters.py @@ -3,8 +3,10 @@ import json from langchain_community.chat_models import ChatOllama import os +import numpy as np import pandas as pd import re +from typing import List from langchain_core.output_parsers import JsonOutputParser from pydantic import BaseModel, Field @@ -18,23 +20,24 @@ class NameDescription(BaseModel): prompt = """ I have performed text clustering on some interviews where users were asked about their knowledge of - and opinions on different home heating options. These texts may contain user responses, interviewer questions, - or both. + and opinions on different home heating options. In the interview, users were asked about their knowledge + of the Boiler Upgrade Scheme, a scheme that provides a subsidy to homeowners wishing to install a heatpump + instead of getting a new gas boiler for their home. \n - One of the clusters contains the following texts from the interviews: + One of the clusters contains the following user responses from the interviews: {docs} The cluster is described by the following keywords: {keywords} \n - Based on the information above, please provide a name and description for the cluster as a JSON object with two fields: - - name (a short, informative name for the cluster) - - description (a brief description of the cluster). + Based on the information above, please provide a name and summary for the cluster as a JSON object with two fields: + - name: A short, informative name for the cluster + - description: A summary of views of users within the cluster. You can include sentiments they express, reasons for their views, their knowledge levels, and any other relevant information. \n Provide nothing except for this JSON dict. \n Example: {{ "name": "Energy Efficiency", - "description": "This cluster focuses on users' concerns about energy efficiency when choosing home heating options." + "description": "This cluster contains users responses about energy efficiency when choosing home heating options. The users have varying degrees of knowledge about the efficiency of different systems. Some reasons for wanting to improve efficiency include environmental concerns and cost concerns." }} """ @@ -54,74 +57,58 @@ class NameDescription(BaseModel): llm_chain = final_prompt | ollama_model | parser -INPUT_PATH = PROJECT_DIR / "outputs" - +INPUT_PATH = PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics_representative_docs.csv" def save_list_of_dicts_as_jsonl(data, output_path): with open(output_path, "w") as f: for entry in data: json_line = json.dumps(entry) f.write(json_line + "\n") + +def name_topics(topic_info: pd.DataFrame, llm_chain, topics: List[str], text_col='text_clean', top_words_col = 'Top Words', + topic_label_col='Cluster') -> dict: + results = {} + + for topic in topics: + logger.info(f"Processing topic {topic}") + temp_df = topic_info[topic_info[topic_label_col] == topic] + docs = temp_df[text_col].values[0] + logger.info(f"Docs: {docs}") + keywords = temp_df[top_words_col].values[0] + logger.info(f"Keywords: {keywords}") + + try: + output = llm_chain.invoke({"docs": docs, "keywords": keywords}) + print(output['name'], output['description']) + results[topic] = output + + except Exception as e: + logger.error(f"Error processing topic {topic}: {str(e)}") + errors.append({"topic": topic, "error": str(e)}) + + return results if __name__ == "__main__": - files = glob.glob(os.path.join(INPUT_PATH, '**', '*topic_info.csv'), recursive=True) - - filelist = [] - - for file in files: - filelist.append(os.path.basename(file)) - - output = [] - - for file in filelist: - split1 = re.split('_cluster_size', file) - source = split1[0] - split2 = re.split('_', split1[1]) - cluster_size = split2[1] - output.append({'filename': file, 'source': source, 'cluster_size': cluster_size}) - - files_df = pd.DataFrame(output) - errors = [] - for file in filelist: - topic_info = pd.read_csv(PROJECT_DIR / f"outputs/{file}") - source = files_df[files_df['filename']==file]['source'].values[0] - cluster_size = files_df[files_df['filename']==file]['cluster_size'].values[0] - print(f"Processing {source} with cluster size {cluster_size}") - - for col in ['Representation', 'KeyBERT', 'MMR', 'Representative_Docs']: - topic_info[col] = topic_info[col].apply(ast.literal_eval) + topic_info = pd.read_csv(INPUT_PATH) + + topic_info = topic_info.groupby(['Cluster', 'Top Words'])['text_clean'].apply(list).reset_index() + topic_info['Cluster'] = topic_info['Cluster'].astype(str) - topics = topic_info["Topic"].unique() - - results = {} + topics = topic_info["Cluster"].unique().tolist() - for topic in topics: - if topic==-1: - continue - else: - logger.info(f"Processing topic {topic} with model {model}") - temp_df = topic_info[topic_info["Topic"] == topic] - docs = temp_df['Representative_Docs'].values[0] - keywords = temp_df['MMR'].values[0] - - try: - output = llm_chain.invoke({"docs": docs, "keywords": keywords}) - results[topic] = output - - except Exception as e: - logger.error(f"Error processing topic {topic}: {str(e)}") - errors.append({"file": file, "source": source, "cluster_size": cluster_size, "topic": topic, "error": str(e)}) + results = name_topics(topic_info, llm_chain, topics, text_col='text_clean', top_words_col = 'Top Words', + topic_label_col='Cluster') - ## Some complicated conditionals to check that what's in `results` can be parsed ## - topic_info[f'{model}_name'] = topic_info['Topic'].map(lambda x: results[x]['name'] if x in results and isinstance(results[x], dict) and 'name' in results[x] else None) - topic_info[f'{model}_description'] = topic_info['Topic'].map(lambda x: results[x]['description'] if x in results and isinstance(results[x], dict) and 'description' in results[x] else None) + ## Some complicated conditionals to check that what's in `results` can be parsed ## + topic_info[f'{model}_name'] = topic_info['Cluster'].map(lambda x: results[x]['name'] if x in results and isinstance(results[x], dict) and 'name' in results[x] else None) + topic_info[f'{model}_description'] = topic_info['Cluster'].map(lambda x: results[x]['description'] if x in results and isinstance(results[x], dict) and 'description' in results[x] else None) - logger.info("Saving output...") - topic_info.to_csv(PROJECT_DIR / f"outputs/{source}_cluster_size_{cluster_size}_topic_info_with_names_descriptions.csv", index=False) - logger.info("Done!") + logger.info("Saving output...") + topic_info.to_csv(PROJECT_DIR / f"outputs/user_messages_min_len_9_w_sentiment_topics_with_names_descriptions.csv", index=False) + logger.info("Done!") ## Save errors so that they can be loaded in and manually reprocessed if necessary ## save_list_of_dicts_as_jsonl(errors, PROJECT_DIR/"outputs/interim/errors.jsonl") diff --git a/dsp_interview_transcripts/pipeline/prep_output_tables.py b/dsp_interview_transcripts/pipeline/prep_output_tables.py new file mode 100644 index 0000000..7b461f5 --- /dev/null +++ b/dsp_interview_transcripts/pipeline/prep_output_tables.py @@ -0,0 +1,95 @@ +import altair as alt +import matplotlib.pyplot as plt +import pandas as pd +import seaborn as sns + +from dsp_interview_transcripts import PROJECT_DIR + +OUTPUT_PATH_FULL_DATA = PROJECT_DIR / "outputs/final/final_df.csv" +OUTPUT_PATH_SUMMARY = PROJECT_DIR / "outputs/final/summary_info.csv" + +opacity_condition = alt.condition( + alt.datum.Name == "None", alt.value(0.1), alt.value(0.6) + ) + +def create_scatterplot(data_viz, color="Name:N", tooltip=["Name:N","Description:N", "question:N","text_clean:N"], + domain=None, range_=None): + + if domain is not None and range_ is not None: + color = alt.Color(color, scale=alt.Scale(domain=domain, range=range_)) + else: + color = alt.Color(color) + + fig = ( + alt.Chart(data_viz) + .mark_circle(size=50) + .encode( + x=alt.X( + "x:Q", + axis=alt.Axis(ticks=False, labels=False, title=None, grid=False), + ), + y=alt.Y( + "y:Q", + axis=alt.Axis(ticks=False, labels=False, title=None, grid=False), + ), + color=color, + opacity=opacity_condition, + tooltip=tooltip, + ) + .properties(width=900, height=600) + .interactive() + ) + + return fig + +if __name__ == "__main__": + rep_docs = pd.read_csv(PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics_representative_docs.csv") + data = pd.read_csv(PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics.csv") + data_w_names = pd.read_csv(PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics_with_names_descriptions.csv") + + topic_counts = pd.DataFrame(data['Cluster'].value_counts()).reset_index() + topic_counts = topic_counts.rename(columns={'count': 'N responses in topic'}) + + data_w_names = data_w_names.rename(columns={"llama3.2_name": "Name", + "llama3.2_description": "Description"}) + data_w_names = pd.merge(data_w_names, topic_counts, left_on='Cluster', right_on='Cluster', how='left') + + data_w_names[['Name', 'Description', 'Top Words', 'N responses in topic']].to_csv(OUTPUT_PATH_SUMMARY, index=False) + + rep_docs = pd.merge(rep_docs, data_w_names[['Cluster', 'Name', 'Description', 'N responses in topic']], on="Cluster", how="left") + + data_viz = pd.merge(data, data_w_names[['Cluster', 'Name', 'Description']], on="Cluster", how="left") + + data_viz['Name'] = data_viz['Name'].fillna("None") + data_viz['Description'] = data_viz['Description'].fillna("None") + + # Create binary column to indicate whether the user response is representative of the topic + merged_df = data_viz.merge(rep_docs[['conversation', 'uuid', 'Name']], + on=['conversation', 'uuid', 'Name'], + how='left', + indicator=True) + + merged_df['Representative of topic'] = (merged_df['_merge'] == 'both').astype(int) + + merged_df = merged_df.drop(columns=['_merge']) + + final_df = merged_df[['Name','Description','Top Words', 'Representative of topic', 'question', 'context', 'text_clean', 'sentiment','conversation', 'uuid','timestamp']] + final_df = final_df.rename(columns={'text_clean': 'user_response', + 'question': 'probable question', + 'sentiment': 'predicted_sentiment', + 'Name': 'Topic Name', + 'Description': 'Topic Description', + 'Top Words': 'Topic Top Words',}) + + final_df.sort_values(['conversation', 'timestamp']).to_csv(OUTPUT_PATH_FULL_DATA, index=False) + + ### Visualise clusters ### + + fig = create_scatterplot() + fig.save(PROJECT_DIR / "outputs/scatter_coloured_by_topic.html") + + fig_questions = create_scatterplot(data_viz=data_viz, color="question:N",) + fig_questions.save(PROJECT_DIR / "outputs/scatter_coloured_by_question.html") + + fig_sentiment = create_scatterplot(data_viz=data_viz, color="sentiment:N", domain=['Negative', 'Neutral', 'Positive'], range_=['red', 'gray', 'green']) + fig_sentiment.save(PROJECT_DIR / "outputs/scatter_coloured_by_sentiment.html") \ No newline at end of file diff --git a/dsp_interview_transcripts/pipeline/process_data.py b/dsp_interview_transcripts/pipeline/process_data.py index 261738a..d8be033 100644 --- a/dsp_interview_transcripts/pipeline/process_data.py +++ b/dsp_interview_transcripts/pipeline/process_data.py @@ -1,10 +1,52 @@ + import pandas as pd +import random +from transformers import AutoTokenizer, AutoModelForSequenceClassification +from scipy.special import softmax +from sklearn.metrics.pairwise import cosine_similarity + +import nltk + +from nltk.tokenize import sent_tokenize +from nltk.corpus import stopwords +nltk.download('stopwords') +nltk.download("punkt") +nltk.download('punkt_tab') + +stop_words = set(stopwords.words('english')) + +import numpy as np +from sentence_transformers import SentenceTransformer + +# Set random seeds +RANDOM_SEED = 42 +np.random.seed(RANDOM_SEED) +random.seed(RANDOM_SEED) +# PyTorch seed (used by SentenceTransformer) +import torch +torch.manual_seed(RANDOM_SEED) from dsp_interview_transcripts import PROJECT_DIR, logger from dsp_interview_transcripts.utils.data_cleaning import clean_data, convert_timestamp, add_text_length, remove_preamble +SENTENCE_MODEL = SentenceTransformer("all-MiniLM-L6-v2") + DATA_PATH = PROJECT_DIR / "data/qual_af_transcripts.csv" +QUESTIONS = ["What, if anything, do you know about the Boiler Upgrade Scheme? If you don't know anything about the scheme, just give it your best guess.", + "What, if anything, do you know about the process of applying for Boiler Upgrade Scheme funding?", + "What do you think are the eligibility requirements for someone to use this scheme?", + "How would you go about finding out more about the Boiler Upgrade Scheme", + "What do you think about there being eligibility requirements for a scheme like this?", + "As a homeowner, where do you see yourself in relation to the eligibility requirements?", + "What, if any, type of work do you think needs to be done to a house to replace fossil fuel heating systems?", + "What types of home upgrades would you consider getting done to your house to improve the efficiency of your heating system?", + "What types of work to your house wouldn't you consider?", + "What are some energy-efficient heating systems that you could consider, apart from the one currently in use at your home?", + "Is there anything we've talked about you'd like to discuss further?"] + +MIN_LEN = 9 + def concatenate_consecutive_roles(df, text_col='text_clean', conversation_col='conversation', role_col='role'): """Concatenates consecutive rows with the same role within a conversation.""" # Sort the dataframe to ensure correct order (if it's not already sorted) @@ -19,10 +61,10 @@ def concatenate_consecutive_roles(df, text_col='text_clean', conversation_col='c # Group by 'conversation' and 'group' to concatenate text grouped = df.groupby([conversation_col, 'turn']).agg({ - 'timestamp': 'first', # Keep the first timestamp in each group - text_col: ' '.join, # Concatenate the 'text_clean' column - role_col: 'first', # Keep the role (either BOT or USER) - 'uuid': 'first' # Keep the first UUID in each group + 'timestamp': 'first', + text_col: ' '.join, + role_col: 'first', + 'uuid': 'first' }).reset_index() logger.info(f"Number of turns after concatenating consecutive roles: {len(grouped)}") @@ -72,28 +114,149 @@ def create_q_and_a_column(data_df, text_col = 'text_clean', conversation_col='co return df +def process_bot_qs(interviews_df: pd.DataFrame) -> list: + """Produce a list of each unique sentence produced by the bot + """ + bot_qs = interviews_df[interviews_df['role'] == 'BOT'] + # split into individual sentences so that if the original question is contained within the utterance, + # we have a better chance of catching it + bot_qs["sentences"] = bot_qs['text_clean'].apply(lambda x: sent_tokenize(x)) + bot_qs = bot_qs.explode("sentences") + bot_qs_list = bot_qs['sentences'].unique().tolist() + return bot_qs_list, bot_qs + +def match_questions(bot_qs_list, questions, threshold=0.85): + """Find the best matches between the input questions from our interview template, + and the actual questions produced by the bot. + """ + bot_qs_embeddings = SENTENCE_MODEL.encode(bot_qs_list) + input_qs_embeddings = SENTENCE_MODEL.encode(questions) + + similarities = cosine_similarity(bot_qs_embeddings, input_qs_embeddings, ) + + # Find the index of the highest cosine similarity for each n-gram/lookup phrase combination + max_indices = np.argmax(similarities, axis=1) + + # Retrieve the text of the corresponding target phrases + most_similar_phrases = [questions[index] for index in max_indices] + + most_similar_similarities = [ + similarities[i, index] for i, index in enumerate(max_indices) + ] + + most_similar_pairs = list( + zip(bot_qs_list, most_similar_phrases, most_similar_similarities) + ) + + matches = pd.DataFrame( + most_similar_pairs, columns=["bot_q", "question", "cosine_similarity"] + ) + + final_matches = matches[matches['cosine_similarity'] > threshold] # temporary threshold until we've done some proper evaluation + + return final_matches + +def get_best_matches(bot_qs, final_matches, questions_df): + """ + Create a dataframe of the original bot utterances and the questions they were matched to + """ + questions_matched = pd.merge(bot_qs, final_matches, left_on='sentences', right_on='bot_q', how='inner') + # merge in the df that has the question number + questions_matched = pd.merge(questions_matched, questions_df, left_on='question', right_on='question', how='left') + + uuid_question_counts = questions_matched.groupby('uuid')['question'].nunique() + logger.info(f"The following utterances match to more than once question: {uuid_question_counts[uuid_question_counts > 1]}") + + # Group by 'uuid' and keep the row with the highest 'cosine_similarity' + questions_highest_similarity = questions_matched.loc[questions_matched.groupby('uuid')['cosine_similarity'].idxmax()] + return questions_highest_similarity + +def get_sentiment(texts): + roberta = "cardiffnlp/twitter-roberta-base-sentiment-latest" + model = AutoModelForSequenceClassification.from_pretrained(roberta) + tokenizer = AutoTokenizer.from_pretrained(roberta) + + labels = ['Negative', 'Neutral', 'Positive'] + + # Tokenize all texts at once + encoded_texts = tokenizer(texts, return_tensors='pt', padding=True, truncation=True) + + # Pass all encoded texts through the model at once + with torch.no_grad(): # Disable gradient computation for faster inference + output = model(**encoded_texts) + + # Apply softmax to the scores + scores = output.logits.detach().numpy() + probabilities = softmax(scores, axis=1) + + # Get the label with the highest probability for each text + predicted_labels = [] + for prob in probabilities: + max_index = prob.argmax() # Get the index of the highest probability + predicted_labels.append(labels[max_index]) # Get the corresponding label + + return predicted_labels + +def create_context(row, df): + # Only proceed if the row is a USER entry + if row['role'] == 'USER': + idx = row.name # Current row index + if idx >= 3: # We need at least 3 previous rows to build context + prev_rows = df.iloc[idx-3:idx] # Take previous 3 rows + else: + prev_rows = df.iloc[:idx] + + return ' | '.join(prev_rows['text_clean']) + if __name__ == "__main__": interviews_df = pd.read_csv(DATA_PATH) interviews_df = clean_data(interviews_df) - + logger.info(f"Number of interviews: {len(interviews_df['conversation'].unique())}") - + # Make sure the conversations are sorted by time, so that the replies go in the right order interviews_df['timestamp_clean'] = interviews_df['timestamp'].apply(convert_timestamp) interviews_df = interviews_df.groupby('conversation', group_keys=False).apply(lambda x: x.sort_values('timestamp_clean')) - + # Remove everything up to when bot asks if the instructions are clear - everything before is just noise interviews_cleaned_df = interviews_df.groupby('conversation').apply(remove_preamble).reset_index(drop=True) - + # Group together consecutive responses by the same role interviews_df = concatenate_consecutive_roles(interviews_df) + + questions_df = pd.DataFrame(enumerate(QUESTIONS), columns=["q_number", "question"]) + + bot_qs_list, bot_qs = process_bot_qs(interviews_df) + + final_matches = match_questions(bot_qs_list, QUESTIONS) + + questions_highest_similarity = get_best_matches(bot_qs, final_matches, questions_df) + + # Merge back into the original df + interviews_df = pd.merge(interviews_df, questions_highest_similarity[['uuid', 'question', 'q_number', 'cosine_similarity']], on='uuid', how='left') + + # Forward fill the matched questions and their question numbers + interviews_q_filled = interviews_df.copy() + interviews_q_filled['question'] = interviews_q_filled.groupby('conversation')['question'].ffill() + interviews_q_filled['q_number'] = interviews_q_filled.groupby('conversation')['q_number'].ffill() + + interviews_q_filled = add_text_length(interviews_q_filled) + + interviews_q_filled['context'] = interviews_q_filled.apply(create_context, df=interviews_q_filled, axis=1) + + user_messages = interviews_q_filled[(interviews_q_filled['role']=='USER')& (interviews_q_filled['text_length'] > MIN_LEN)] + + # Get sentiments!! + texts = user_messages['text_clean'].tolist() + sentiments = get_sentiment(texts) + + user_messages['sentiment'] = sentiments - interviews_df = create_q_and_a_column(interviews_df) + logger.info(user_messages['sentiment'].value_counts()) - interviews_df = add_text_length(interviews_df, 'text_clean') + logger.info(f"Number of user messages: {len(user_messages)}") - q_and_a_df = interviews_df[interviews_df['q_and_a'] != ''] - q_and_a_df.to_csv(PROJECT_DIR / "data/q_and_a.csv", index=False) + logger.info("Saving data...") + user_messages.to_csv(PROJECT_DIR / f"data/user_messages_min_len_{MIN_LEN}_w_sentiment.csv", index=False) - user_messages = interviews_df[(interviews_df['role']=='USER') & (interviews_df['text_length'] >= 4)] # only answers at least 4 words long - user_messages.to_csv(PROJECT_DIR / "data/user_messages.csv", index=False) + logger.info("Done!") \ No newline at end of file diff --git a/dsp_interview_transcripts/pipeline/run_pipeline.py b/dsp_interview_transcripts/pipeline/run_pipeline.py index 7bfe8f2..6a8e2b6 100644 --- a/dsp_interview_transcripts/pipeline/run_pipeline.py +++ b/dsp_interview_transcripts/pipeline/run_pipeline.py @@ -1,11 +1,23 @@ +""" +Before running these scripts, you will need to make sure that Ollama is available. + +You can do this by running the following from the terminal: +``` +ollama serve +``` +Alternatively check the bar at the top of your screen. A llama icon indicates that Ollama is running. +""" from pathlib import Path import subprocess import os script_parent = Path(__file__).parent +# All of these scripts cover topic modelling across *all* responses. The top down analysis is separate subprocess.run(f"python {script_parent / 'process_data.py'}", shell=True) -subprocess.run(f"python {script_parent / 'chunk_interviews.py'}", shell=True) -subprocess.run(f"python {script_parent / 'segment_interviews.py'}", shell=True) subprocess.run(f"python {script_parent / 'topic_modelling.py'}", shell=True) -subprocess.run(f"python {script_parent / 'tname_clusters.py'}", shell=True) +subprocess.run(f"python {script_parent / 'name_clusters.py'}", shell=True) +subprocess.run(f"python {script_parent / 'prep_output_tables.py'}", shell=True) + +# Top down approach +subprocess.run(f"python {script_parent / 'top_down_analysis.py'}", shell=True) \ No newline at end of file diff --git a/dsp_interview_transcripts/pipeline/top_down_analysis.py b/dsp_interview_transcripts/pipeline/top_down_analysis.py new file mode 100644 index 0000000..a550e27 --- /dev/null +++ b/dsp_interview_transcripts/pipeline/top_down_analysis.py @@ -0,0 +1,251 @@ +import altair as alt +from bertopic import BERTopic +import numpy as np +import pandas as pd +import random +from sentence_transformers import SentenceTransformer +from openpyxl import Workbook + +from hdbscan import HDBSCAN +from umap import UMAP +from sklearn.feature_extraction.text import CountVectorizer +from bertopic.representation import ( + KeyBERTInspired, + MaximalMarginalRelevance, + # OpenAI, + PartOfSpeech, +) + +from langchain_core.output_parsers import JsonOutputParser +from pydantic import BaseModel, Field +from langchain.prompts import PromptTemplate +from langchain_community.chat_models import ChatOllama + +from dsp_interview_transcripts import PROJECT_DIR, logger + +pd.set_option('display.max_colwidth', 500) + +# Set random seeds +RANDOM_SEED = 42 +np.random.seed(RANDOM_SEED) +random.seed(RANDOM_SEED) +# PyTorch seed (used by SentenceTransformer) +import torch + +torch.manual_seed(RANDOM_SEED) + +SENTENCE_MODEL = SentenceTransformer("all-MiniLM-L6-v2") + +umap_model = UMAP( + n_neighbors=15, + n_components=50, + min_dist=0.1, + metric="cosine", + random_state=RANDOM_SEED, + ) + +hdbscan_model = HDBSCAN( + min_cluster_size=5, # setting this really small because some questions don't have many responses + min_samples=1, + metric="euclidean", + cluster_selection_method="eom", + prediction_data=True, + ) + +vectorizer_model = CountVectorizer( + stop_words="english", + min_df=1, + max_df=0.85, + ngram_range=(1, 3), + ) + +# KeyBERT +keybert_model = KeyBERTInspired() + +# MMR +mmr_model = MaximalMarginalRelevance(diversity=0.3) + + +# All representation models +representation_model = { + "KeyBERT": keybert_model, + # "OpenAI": openai_model, # Uncomment if you will use OpenAI + "MMR": mmr_model, + # "POS": pos_model, + } + +topic_model = BERTopic( + # Pipeline models + embedding_model="sentence-transformers/all-MiniLM-L6-v2", + umap_model=umap_model, + hdbscan_model=hdbscan_model, + vectorizer_model=vectorizer_model, + representation_model=representation_model, + # Hyperparameters + top_n_words=10, + verbose=True, + calculate_probabilities=True, + ) + +class NameDescription(BaseModel): + name: str = Field(description="Informative name for this group of documents") + description: str = Field(description="Description of this group of documents") + +prompt = """ + I have data for some interviews where users were asked about their knowledge of + and opinions on different home heating options. In the interview, users were asked about their knowledge + of the Boiler Upgrade Scheme, a scheme that provides a subsidy to homeowners wishing to install a heatpump + instead of getting a new gas boiler for their home. + \n + I have clustered user responses to the question {question}. + \n + One of the clusters contains the following user responses: + {docs} + The cluster is described by the following keywords: {keywords} + \n + Based on the information above, please provide a name and summary for the cluster as a JSON object with two fields: + - name: A short, informative name for the cluster + - description: A summary of views of users within the cluster. You can include sentiments they express, reasons for their views, their knowledge levels, and any other relevant information. + \n + Provide nothing except for this JSON dict. + \n + Example: + {{ + "name": "Radiator upgrades", + "description": "This cluster contains users who mention changing their radiators when questioned about home upgrades they would consider. Some express reluctance to alter the appearance of their home with larger radiators." + }} + """ + +parser = JsonOutputParser(pydantic_object=NameDescription) + +final_prompt = PromptTemplate( + template=prompt, + input_variables=["question","docs", "keywords"], + partial_variables={"format_instructions": parser.get_format_instructions()} + ) + +model="llama3.2" + +ollama_model = ChatOllama( + model=model, temperature=0 + ) + +llm_chain = final_prompt | ollama_model | parser + +def name_topics(topic_info: pd.DataFrame, llm_chain, topics, text_col='text_clean', top_words_col = 'Top Words', + topic_label_col='Cluster', question='') -> dict: + results = {} + + for topic in topics: + logger.info(f"Processing topic {topic}") + temp_df = topic_info[topic_info[topic_label_col] == topic] + docs = temp_df[text_col].values[0] + logger.info(f"Docs: {docs}") + keywords = temp_df[top_words_col].values[0] + logger.info(f"Keywords: {keywords}") + + try: + output = llm_chain.invoke({ "question": question, "docs": docs, "keywords": keywords}) + print(output['name'], output['description']) + results[topic] = output + + except Exception as e: + logger.error(f"Error processing topic {topic}: {str(e)}") + # errors.append({"topic": topic, "error": str(e)}) + + return results + +if __name__ == "__main__": + data = pd.read_csv(PROJECT_DIR / "data/user_messages_min_len_9_w_sentiment.csv") + + data_dict = {} + + for q in data['q_number'].unique().tolist(): + df = data.copy() + data_dict[q] = df[df['q_number'] == q] + + full_dfs = {} + + summary_dfs = {} + + for key in [0, 1,4,6,7,9]: # these questions are the ones with >100 data points in each + temp_df = data_dict[key] + + question = temp_df['question'].unique()[0] + + temp_df['text_clean'] = temp_df['text_clean'].astype(str) + docs = temp_df['text_clean'].tolist() + embeddings = SENTENCE_MODEL.encode(docs, show_progress_bar=True) + + topics, probs = topic_model.fit_transform(docs, embeddings) + + rep_docs = topic_model.get_representative_docs() + + umap_2d = UMAP(random_state=RANDOM_SEED, n_components=2) + embeddings_2d = umap_2d.fit_transform(embeddings) + + topic_lookup = topic_model.get_topic_info()[["Topic", "Name"]] + + temp_df['x'] = embeddings_2d[:,0] + temp_df['y'] = embeddings_2d[:,1] + temp_df['topic'] = topics + temp_df['doc'] = docs + df_vis = temp_df.merge(topic_lookup, left_on="topic", right_on="Topic", how="left") + + df_for_summarisation = pd.DataFrame(topic_model.get_topic_info()) + df_for_summarisation = df_for_summarisation[df_for_summarisation['Topic'] != -1] + + topic_list = df_for_summarisation['Topic'].to_list() + + results = name_topics(df_for_summarisation, llm_chain, topic_list, text_col='Representative_Docs', top_words_col = 'Representation', + topic_label_col='Topic', question=temp_df['question'].unique()[0]) + + df_for_summarisation['name'] = df_for_summarisation['Topic'].map(lambda x: results[x]['name'] if x in results and isinstance(results[x], dict) and 'name' in results[x] else None) + df_for_summarisation['description'] = df_for_summarisation['Topic'].map(lambda x: results[x]['description'] if x in results and isinstance(results[x], dict) and 'description' in results[x] else None) + df_for_summarisation['question'] = question + summary_dfs[key] = df_for_summarisation + + df_vis = df_vis.merge(df_for_summarisation[['Topic', 'name', 'description']], left_on='Topic', right_on='Topic', how='left') + full_dfs[key] = df_vis + + # save two bar charts per question + for key in full_dfs.keys(): + question = full_dfs[key]['question'].unique()[0] + + df = full_dfs[key] + + name_count_chart = alt.Chart(df).mark_bar().encode( + y=alt.Y('Name:N', title=None), + x=alt.X('count()', title='Count') + ).properties( + #title='Count of Values of Name' + ) + + sentiment_proportion_chart = alt.Chart(df).mark_bar().encode( + y=alt.Y('Name:N', title='Name'), + x=alt.X('count()', stack='normalize', title='Proportion'), + color=alt.Color('sentiment:N', title='Sentiment', scale=alt.Scale(domain=['Negative', 'Neutral', 'Positive'], range=['red', 'gray', 'green'])) + ).properties( + # title='Proportion of Sentiment within Name' + ) + + # Combine the charts and add a title + combined_chart = (name_count_chart | sentiment_proportion_chart).properties( + title=f"{question}" + ) + combined_chart.save(PROJECT_DIR / f"outputs/by_question/{question}_name_sentiment_chart.html") + + excel_file = PROJECT_DIR / 'outputs/by_question/question_topic_models.xlsx' + with pd.ExcelWriter(excel_file, engine='openpyxl') as writer: + + # Iterate over unique values of 'question' + for key in summary_dfs.keys(): + df = summary_dfs[key] + df = df.explode('Representative_Docs') + + question = df['question'].unique()[0] + + # Save each summary table as a separate sheet in the Excel workbook + sheet_name = question[:30] # Excel sheet names are limited to 31 characters + df[['question','Topic', 'Count', 'Name', 'Representation', 'name','description','Representative_Docs']].to_excel(writer, sheet_name=sheet_name, index=False) + \ No newline at end of file diff --git a/dsp_interview_transcripts/pipeline/topic_modelling.py b/dsp_interview_transcripts/pipeline/topic_modelling.py index b68ec71..16883b3 100644 --- a/dsp_interview_transcripts/pipeline/topic_modelling.py +++ b/dsp_interview_transcripts/pipeline/topic_modelling.py @@ -1,20 +1,15 @@ -from bertopic import BERTopic import numpy as np import pandas as pd import random from sentence_transformers import SentenceTransformer +from collections import defaultdict from hdbscan import HDBSCAN from umap import UMAP -from sklearn.feature_extraction.text import CountVectorizer -from bertopic.representation import ( - KeyBERTInspired, - MaximalMarginalRelevance, - # OpenAI, - PartOfSpeech, -) +from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer +from sklearn.preprocessing import StandardScaler -from dsp_interview_transcripts import PROJECT_DIR +from dsp_interview_transcripts import PROJECT_DIR, logger # Set random seeds RANDOM_SEED = 42 @@ -27,33 +22,11 @@ SENTENCE_MODEL = SentenceTransformer("all-MiniLM-L6-v2") -INTERVIEW_SECTIONS = [f"interview_q_{i}" for i in range(-1, 10) if i != 4] # there were no hits for q4 for some reason -DATA_SOURCES = ["user_messages", - "q_and_a", - "interviews_chunked" - ] + INTERVIEW_SECTIONS +DATA_PATH = PROJECT_DIR / "data/user_messages_min_len_9_w_sentiment.csv" -MIN_CLUSTER_SIZES = [5, 10, 20, 50, 100] +MIN_CLUSTER_SIZE = 20 -if __name__ == "__main__": - - for source in DATA_SOURCES: - for cluster_size in MIN_CLUSTER_SIZES: - if source in INTERVIEW_SECTIONS and cluster_size>10: - continue - if source=="interviews_chunked" and cluster_size>20: - continue - - df = pd.read_csv(PROJECT_DIR / f"data/{source}.csv") - - if source=="user_messages" or source in INTERVIEW_SECTIONS: - text_col = "text_clean" - elif source=="interviews_chunked": - text_col = "chunk_text" - else: - text_col = "q_and_a" - - umap_model = UMAP( +umap_model = UMAP( n_neighbors=15, n_components=50, min_dist=0.1, @@ -61,90 +34,97 @@ random_state=RANDOM_SEED, ) - hdbscan_model = HDBSCAN( - min_cluster_size=cluster_size, - min_samples=1, +hdbscan_model = HDBSCAN( + min_cluster_size=20, metric="euclidean", cluster_selection_method="eom", prediction_data=True, ) - vectorizer_model = CountVectorizer( +vectorizer_model = TfidfVectorizer( stop_words="english", min_df=1, max_df=0.85, ngram_range=(1, 3), ) - # KeyBERT - keybert_model = KeyBERTInspired() - - # MMR - mmr_model = MaximalMarginalRelevance(diversity=0.3) - - - # All representation models - representation_model = { - "KeyBERT": keybert_model, - "MMR": mmr_model, - } - - topic_model = BERTopic( - # Pipeline models - embedding_model="sentence-transformers/all-MiniLM-L6-v2", - umap_model=umap_model, - hdbscan_model=hdbscan_model, - vectorizer_model=vectorizer_model, - representation_model=representation_model, - # Hyperparameters - top_n_words=10, - verbose=True, - calculate_probabilities=True, - ) - - # Convert NaNs to empty strings - df[text_col] = df[text_col].astype(str) - docs = df[text_col].tolist() - embeddings = SENTENCE_MODEL.encode(docs, show_progress_bar=True) - - topics, probs = topic_model.fit_transform(docs, embeddings) - - # save topic model - topic_model.save( - PROJECT_DIR / f"outputs/topic_model_{source}_cluster_size_{cluster_size}", - serialization="pytorch", - save_ctfidf=True, - save_embedding_model=SENTENCE_MODEL, - ) +def stratified_sample(group, n=10): + # Calculate the sample size for each combination of 'question' and 'sentiment' + stratified_sample = group.groupby(['question', 'sentiment']).apply(lambda x: x.sample(frac=min(1, n / len(group)), random_state=42)) + + # Reset the index to tidy up the resulting DataFrame + return stratified_sample.reset_index(drop=True) + +if __name__ == "__main__": + user_messages = pd.read_csv(DATA_PATH) + + docs = user_messages['text_clean'].tolist() + embeddings = SENTENCE_MODEL.encode(docs, show_progress_bar=True) + + umap_vectors = umap_model.fit_transform(embeddings) + + umap_df = pd.DataFrame(umap_vectors) + umap_df.columns = umap_df.columns.map(str) + user_messages_w_umap = pd.concat([user_messages.reset_index(), umap_df], axis=1) + + umap_vars = [f'{i}' for i in range(50)] + model_vars = umap_vars - rep_docs = topic_model.get_representative_docs() - - # Save representative docs and keywords - # (These will be used to obtain summaries) - topic_info = pd.DataFrame(topic_model.get_topic_info()) - topic_info.to_csv(PROJECT_DIR / f"outputs/{source}_cluster_size_{cluster_size}_topic_info.csv", index=False) - - umap_2d = UMAP(random_state=RANDOM_SEED, n_components=2) - embeddings_2d = umap_2d.fit_transform(embeddings) - - topic_lookup = topic_model.get_topic_info()[["Topic", "Name"]] - - df_vis = pd.DataFrame(embeddings_2d, columns=["x", "y"]) - df_vis["topic"] = topics - df_vis = df_vis.merge(topic_lookup, left_on="topic", right_on="Topic", how="left") - df_vis["doc"] = docs - - if source=="interviews_chunked": - base_df = df[['conversation',text_col]] - else: - base_df = df[["uuid","conversation",text_col]] - - df_vis = pd.merge( - base_df, - df_vis, - left_on=text_col, - right_on="doc", - how="outer", - ) - - df_vis.to_csv(PROJECT_DIR / f"outputs/{source}_cluster_size_{cluster_size}_vis.csv", index=False) + # Normalise the umap vectors + scaler = StandardScaler() + df_normalized = scaler.fit_transform(user_messages_w_umap[model_vars]) + + clusters = hdbscan_model.fit_predict(df_normalized) + cluster_probabilities = hdbscan_model.probabilities_ + user_messages_w_umap['label'] = clusters + user_messages_w_umap['probs'] = cluster_probabilities + logger.info(user_messages_w_umap['label'].value_counts()) + + # 2d embeddings for visualisation + umap_2d = UMAP(random_state=RANDOM_SEED, n_components=2) + embeddings_2d = umap_2d.fit_transform(embeddings) + + ### topic representations ### + cluster_groups = user_messages_w_umap.groupby('label').agg({'text_clean': ' '.join}).reset_index() + + tfidf_matrix = vectorizer_model.fit_transform(cluster_groups['text_clean'].to_list()) + + feature_names = vectorizer_model.get_feature_names_out() + + # Create a dictionary to hold top words for each cluster + top_words_per_cluster = defaultdict(list) + + # Number of top words you want to display per cluster + n_top_words = 10 + + # Iterate over each cluster and get top words + for cluster_idx, tfidf_scores in enumerate(tfidf_matrix): + # Get indices of top n words within the cluster + top_word_indices = tfidf_scores.toarray()[0].argsort()[:-n_top_words - 1:-1] + + # Get the top words corresponding to the top indices + top_words = [feature_names[i] for i in top_word_indices] + + # Append the words to the dictionary + top_words_per_cluster[cluster_groups.iloc[cluster_idx]['label']] = ', '.join(top_words) + + top_words_df = pd.DataFrame(list(top_words_per_cluster.items()), columns=['Cluster', 'Top Words']) + + user_messages_w_umap_topics = pd.merge(user_messages_w_umap, top_words_df, left_on='label', right_on='Cluster', how='left') + user_messages_w_umap_topics['x'] = embeddings_2d[:, 0] + user_messages_w_umap_topics['y'] = embeddings_2d[:, 1] + + user_messages_w_umap_topics.to_csv(PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics.csv", index=False) + + + ### save most representative documents ### + + filtered_df = user_messages_w_umap_topics[(user_messages_w_umap_topics['probs'] >= 0.5) & (user_messages_w_umap_topics['Cluster'] != -1)] + + # Group by 'Cluster' and apply the stratified sampling + sampled_texts = filtered_df.groupby('Cluster').apply(stratified_sample).reset_index(drop=True) + + # Keep only the first 10 samples per cluster + sampled_texts = sampled_texts.groupby('Cluster').head(10) + + sampled_texts[['Cluster', 'Top Words', 'text_clean', 'sentiment', 'question', 'context','conversation', 'uuid', 'probs']].to_csv(PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics_representative_docs.csv", index=False) \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 425b32d..39561a1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -932,6 +932,17 @@ files = [ [package.extras] dev = ["coverage", "pytest (>=7.4.4)"] +[[package]] +name = "et-xmlfile" +version = "1.1.0" +description = "An implementation of lxml.xmlfile for the standard library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, + {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, +] + [[package]] name = "executing" version = "2.1.0" @@ -2736,6 +2747,20 @@ files = [ {file = "nvidia_nvtx_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:65f4d98982b31b60026e0e6de73fbdfc09d08a96f4656dd3665ca616a11e1e82"}, ] +[[package]] +name = "openpyxl" +version = "3.1.5" +description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"}, + {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"}, +] + +[package.dependencies] +et-xmlfile = "*" + [[package]] name = "overrides" version = "7.7.0" @@ -4214,6 +4239,27 @@ dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodest doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.13.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<=7.3.7)", "sphinx-design (>=0.4.0)"] test = ["Cython", "array-api-strict (>=2.0)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +[[package]] +name = "seaborn" +version = "0.13.2" +description = "Statistical data visualization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "seaborn-0.13.2-py3-none-any.whl", hash = "sha256:636f8336facf092165e27924f223d3c62ca560b1f2bb5dff7ab7fad265361987"}, + {file = "seaborn-0.13.2.tar.gz", hash = "sha256:93e60a40988f4d65e9f4885df477e2fdaff6b73a9ded434c1ab356dd57eefff7"}, +] + +[package.dependencies] +matplotlib = ">=3.4,<3.6.1 || >3.6.1" +numpy = ">=1.20,<1.24.0 || >1.24.0" +pandas = ">=1.2" + +[package.extras] +dev = ["flake8", "flit", "mypy", "pandas-stubs", "pre-commit", "pytest", "pytest-cov", "pytest-xdist"] +docs = ["ipykernel", "nbconvert", "numpydoc", "pydata_sphinx_theme (==0.10.0rc2)", "pyyaml", "sphinx (<6.0.0)", "sphinx-copybutton", "sphinx-design", "sphinx-issues"] +stats = ["scipy (>=1.7)", "statsmodels (>=0.12)"] + [[package]] name = "send2trash" version = "1.8.3" @@ -4481,111 +4527,111 @@ test = ["pytest", "ruff"] [[package]] name = "tokenizers" -version = "0.19.1" +version = "0.20.1" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "tokenizers-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:952078130b3d101e05ecfc7fc3640282d74ed26bcf691400f872563fca15ac97"}, - {file = "tokenizers-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82c8b8063de6c0468f08e82c4e198763e7b97aabfe573fd4cf7b33930ca4df77"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f03727225feaf340ceeb7e00604825addef622d551cbd46b7b775ac834c1e1c4"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:453e4422efdfc9c6b6bf2eae00d5e323f263fff62b29a8c9cd526c5003f3f642"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02e81bf089ebf0e7f4df34fa0207519f07e66d8491d963618252f2e0729e0b46"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b07c538ba956843833fee1190cf769c60dc62e1cf934ed50d77d5502194d63b1"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28cab1582e0eec38b1f38c1c1fb2e56bce5dc180acb1724574fc5f47da2a4fe"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b01afb7193d47439f091cd8f070a1ced347ad0f9144952a30a41836902fe09e"}, - {file = "tokenizers-0.19.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7fb297edec6c6841ab2e4e8f357209519188e4a59b557ea4fafcf4691d1b4c98"}, - {file = "tokenizers-0.19.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e8a3dd055e515df7054378dc9d6fa8c8c34e1f32777fb9a01fea81496b3f9d3"}, - {file = "tokenizers-0.19.1-cp310-none-win32.whl", hash = "sha256:7ff898780a155ea053f5d934925f3902be2ed1f4d916461e1a93019cc7250837"}, - {file = "tokenizers-0.19.1-cp310-none-win_amd64.whl", hash = "sha256:bea6f9947e9419c2fda21ae6c32871e3d398cba549b93f4a65a2d369662d9403"}, - {file = "tokenizers-0.19.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5c88d1481f1882c2e53e6bb06491e474e420d9ac7bdff172610c4f9ad3898059"}, - {file = "tokenizers-0.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddf672ed719b4ed82b51499100f5417d7d9f6fb05a65e232249268f35de5ed14"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dadc509cc8a9fe460bd274c0e16ac4184d0958117cf026e0ea8b32b438171594"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfedf31824ca4915b511b03441784ff640378191918264268e6923da48104acc"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac11016d0a04aa6487b1513a3a36e7bee7eec0e5d30057c9c0408067345c48d2"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76951121890fea8330d3a0df9a954b3f2a37e3ec20e5b0530e9a0044ca2e11fe"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b342d2ce8fc8d00f376af068e3274e2e8649562e3bc6ae4a67784ded6b99428d"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d16ff18907f4909dca9b076b9c2d899114dd6abceeb074eca0c93e2353f943aa"}, - {file = "tokenizers-0.19.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:706a37cc5332f85f26efbe2bdc9ef8a9b372b77e4645331a405073e4b3a8c1c6"}, - {file = "tokenizers-0.19.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16baac68651701364b0289979ecec728546133e8e8fe38f66fe48ad07996b88b"}, - {file = "tokenizers-0.19.1-cp311-none-win32.whl", hash = "sha256:9ed240c56b4403e22b9584ee37d87b8bfa14865134e3e1c3fb4b2c42fafd3256"}, - {file = "tokenizers-0.19.1-cp311-none-win_amd64.whl", hash = "sha256:ad57d59341710b94a7d9dbea13f5c1e7d76fd8d9bcd944a7a6ab0b0da6e0cc66"}, - {file = "tokenizers-0.19.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:621d670e1b1c281a1c9698ed89451395d318802ff88d1fc1accff0867a06f153"}, - {file = "tokenizers-0.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d924204a3dbe50b75630bd16f821ebda6a5f729928df30f582fb5aade90c818a"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4f3fefdc0446b1a1e6d81cd4c07088ac015665d2e812f6dbba4a06267d1a2c95"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9620b78e0b2d52ef07b0d428323fb34e8ea1219c5eac98c2596311f20f1f9266"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04ce49e82d100594715ac1b2ce87d1a36e61891a91de774755f743babcd0dd52"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5c2ff13d157afe413bf7e25789879dd463e5a4abfb529a2d8f8473d8042e28f"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3174c76efd9d08f836bfccaca7cfec3f4d1c0a4cf3acbc7236ad577cc423c840"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9d5b6c0e7a1e979bec10ff960fae925e947aab95619a6fdb4c1d8ff3708ce3"}, - {file = "tokenizers-0.19.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a179856d1caee06577220ebcfa332af046d576fb73454b8f4d4b0ba8324423ea"}, - {file = "tokenizers-0.19.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:952b80dac1a6492170f8c2429bd11fcaa14377e097d12a1dbe0ef2fb2241e16c"}, - {file = "tokenizers-0.19.1-cp312-none-win32.whl", hash = "sha256:01d62812454c188306755c94755465505836fd616f75067abcae529c35edeb57"}, - {file = "tokenizers-0.19.1-cp312-none-win_amd64.whl", hash = "sha256:b70bfbe3a82d3e3fb2a5e9b22a39f8d1740c96c68b6ace0086b39074f08ab89a"}, - {file = "tokenizers-0.19.1-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:bb9dfe7dae85bc6119d705a76dc068c062b8b575abe3595e3c6276480e67e3f1"}, - {file = "tokenizers-0.19.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:1f0360cbea28ea99944ac089c00de7b2e3e1c58f479fb8613b6d8d511ce98267"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:71e3ec71f0e78780851fef28c2a9babe20270404c921b756d7c532d280349214"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b82931fa619dbad979c0ee8e54dd5278acc418209cc897e42fac041f5366d626"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e8ff5b90eabdcdaa19af697885f70fe0b714ce16709cf43d4952f1f85299e73a"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e742d76ad84acbdb1a8e4694f915fe59ff6edc381c97d6dfdd054954e3478ad4"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d8c5d59d7b59885eab559d5bc082b2985555a54cda04dda4c65528d90ad252ad"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b2da5c32ed869bebd990c9420df49813709e953674c0722ff471a116d97b22d"}, - {file = "tokenizers-0.19.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:638e43936cc8b2cbb9f9d8dde0fe5e7e30766a3318d2342999ae27f68fdc9bd6"}, - {file = "tokenizers-0.19.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:78e769eb3b2c79687d9cb0f89ef77223e8e279b75c0a968e637ca7043a84463f"}, - {file = "tokenizers-0.19.1-cp37-none-win32.whl", hash = "sha256:72791f9bb1ca78e3ae525d4782e85272c63faaef9940d92142aa3eb79f3407a3"}, - {file = "tokenizers-0.19.1-cp37-none-win_amd64.whl", hash = "sha256:f3bbb7a0c5fcb692950b041ae11067ac54826204318922da754f908d95619fbc"}, - {file = "tokenizers-0.19.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:07f9295349bbbcedae8cefdbcfa7f686aa420be8aca5d4f7d1ae6016c128c0c5"}, - {file = "tokenizers-0.19.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10a707cc6c4b6b183ec5dbfc5c34f3064e18cf62b4a938cb41699e33a99e03c1"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6309271f57b397aa0aff0cbbe632ca9d70430839ca3178bf0f06f825924eca22"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ad23d37d68cf00d54af184586d79b84075ada495e7c5c0f601f051b162112dc"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:427c4f0f3df9109314d4f75b8d1f65d9477033e67ffaec4bca53293d3aca286d"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e83a31c9cf181a0a3ef0abad2b5f6b43399faf5da7e696196ddd110d332519ee"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c27b99889bd58b7e301468c0838c5ed75e60c66df0d4db80c08f43462f82e0d3"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bac0b0eb952412b0b196ca7a40e7dce4ed6f6926489313414010f2e6b9ec2adf"}, - {file = "tokenizers-0.19.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8a6298bde623725ca31c9035a04bf2ef63208d266acd2bed8c2cb7d2b7d53ce6"}, - {file = "tokenizers-0.19.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:08a44864e42fa6d7d76d7be4bec62c9982f6f6248b4aa42f7302aa01e0abfd26"}, - {file = "tokenizers-0.19.1-cp38-none-win32.whl", hash = "sha256:1de5bc8652252d9357a666e609cb1453d4f8e160eb1fb2830ee369dd658e8975"}, - {file = "tokenizers-0.19.1-cp38-none-win_amd64.whl", hash = "sha256:0bcce02bf1ad9882345b34d5bd25ed4949a480cf0e656bbd468f4d8986f7a3f1"}, - {file = "tokenizers-0.19.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0b9394bd204842a2a1fd37fe29935353742be4a3460b6ccbaefa93f58a8df43d"}, - {file = "tokenizers-0.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4692ab92f91b87769d950ca14dbb61f8a9ef36a62f94bad6c82cc84a51f76f6a"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6258c2ef6f06259f70a682491c78561d492e885adeaf9f64f5389f78aa49a051"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c85cf76561fbd01e0d9ea2d1cbe711a65400092bc52b5242b16cfd22e51f0c58"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:670b802d4d82bbbb832ddb0d41df7015b3e549714c0e77f9bed3e74d42400fbe"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85aa3ab4b03d5e99fdd31660872249df5e855334b6c333e0bc13032ff4469c4a"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbf001afbbed111a79ca47d75941e9e5361297a87d186cbfc11ed45e30b5daba"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c89aa46c269e4e70c4d4f9d6bc644fcc39bb409cb2a81227923404dd6f5227"}, - {file = "tokenizers-0.19.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:39c1ec76ea1027438fafe16ecb0fb84795e62e9d643444c1090179e63808c69d"}, - {file = "tokenizers-0.19.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c2a0d47a89b48d7daa241e004e71fb5a50533718897a4cd6235cb846d511a478"}, - {file = "tokenizers-0.19.1-cp39-none-win32.whl", hash = "sha256:61b7fe8886f2e104d4caf9218b157b106207e0f2a4905c9c7ac98890688aabeb"}, - {file = "tokenizers-0.19.1-cp39-none-win_amd64.whl", hash = "sha256:f97660f6c43efd3e0bfd3f2e3e5615bf215680bad6ee3d469df6454b8c6e8256"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b11853f17b54c2fe47742c56d8a33bf49ce31caf531e87ac0d7d13d327c9334"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d26194ef6c13302f446d39972aaa36a1dda6450bc8949f5eb4c27f51191375bd"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e8d1ed93beda54bbd6131a2cb363a576eac746d5c26ba5b7556bc6f964425594"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca407133536f19bdec44b3da117ef0d12e43f6d4b56ac4c765f37eca501c7bda"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce05fde79d2bc2e46ac08aacbc142bead21614d937aac950be88dc79f9db9022"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:35583cd46d16f07c054efd18b5d46af4a2f070a2dd0a47914e66f3ff5efb2b1e"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:43350270bfc16b06ad3f6f07eab21f089adb835544417afda0f83256a8bf8b75"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b4399b59d1af5645bcee2072a463318114c39b8547437a7c2d6a186a1b5a0e2d"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6852c5b2a853b8b0ddc5993cd4f33bfffdca4fcc5d52f89dd4b8eada99379285"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcd266ae85c3d39df2f7e7d0e07f6c41a55e9a3123bb11f854412952deacd828"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecb2651956eea2aa0a2d099434134b1b68f1c31f9a5084d6d53f08ed43d45ff2"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:b279ab506ec4445166ac476fb4d3cc383accde1ea152998509a94d82547c8e2a"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:89183e55fb86e61d848ff83753f64cded119f5d6e1f553d14ffee3700d0a4a49"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2edbc75744235eea94d595a8b70fe279dd42f3296f76d5a86dde1d46e35f574"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0e64bfde9a723274e9a71630c3e9494ed7b4c0f76a1faacf7fe294cd26f7ae7c"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0b5ca92bfa717759c052e345770792d02d1f43b06f9e790ca0a1db62838816f3"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f8a20266e695ec9d7a946a019c1d5ca4eddb6613d4f466888eee04f16eedb85"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63c38f45d8f2a2ec0f3a20073cccb335b9f99f73b3c69483cd52ebc75369d8a1"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dd26e3afe8a7b61422df3176e06664503d3f5973b94f45d5c45987e1cb711876"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:eddd5783a4a6309ce23432353cdb36220e25cbb779bfa9122320666508b44b88"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:56ae39d4036b753994476a1b935584071093b55c7a72e3b8288e68c313ca26e7"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f9939ca7e58c2758c01b40324a59c034ce0cebad18e0d4563a9b1beab3018243"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c330c0eb815d212893c67a032e9dc1b38a803eccb32f3e8172c19cc69fbb439"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec11802450a2487cdf0e634b750a04cbdc1c4d066b97d94ce7dd2cb51ebb325b"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b718f316b596f36e1dae097a7d5b91fc5b85e90bf08b01ff139bd8953b25af"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ed69af290c2b65169f0ba9034d1dc39a5db9459b32f1dd8b5f3f32a3fcf06eab"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f8a9c828277133af13f3859d1b6bf1c3cb6e9e1637df0e45312e6b7c2e622b1f"}, - {file = "tokenizers-0.19.1.tar.gz", hash = "sha256:ee59e6680ed0fdbe6b724cf38bd70400a0c1dd623b07ac729087270caeac88e3"}, + {file = "tokenizers-0.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:439261da7c0a5c88bda97acb284d49fbdaf67e9d3b623c0bfd107512d22787a9"}, + {file = "tokenizers-0.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:03dae629d99068b1ea5416d50de0fea13008f04129cc79af77a2a6392792d93c"}, + {file = "tokenizers-0.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b61f561f329ffe4b28367798b89d60c4abf3f815d37413b6352bc6412a359867"}, + {file = "tokenizers-0.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec870fce1ee5248a10be69f7a8408a234d6f2109f8ea827b4f7ecdbf08c9fd15"}, + {file = "tokenizers-0.20.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d388d1ea8b7447da784e32e3b86a75cce55887e3b22b31c19d0b186b1c677800"}, + {file = "tokenizers-0.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:299c85c1d21135bc01542237979bf25c32efa0d66595dd0069ae259b97fb2dbe"}, + {file = "tokenizers-0.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e96f6c14c9752bb82145636b614d5a78e9cde95edfbe0a85dad0dd5ddd6ec95c"}, + {file = "tokenizers-0.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc9e95ad49c932b80abfbfeaf63b155761e695ad9f8a58c52a47d962d76e310f"}, + {file = "tokenizers-0.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f22dee205329a636148c325921c73cf3e412e87d31f4d9c3153b302a0200057b"}, + {file = "tokenizers-0.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2ffd9a8895575ac636d44500c66dffaef133823b6b25067604fa73bbc5ec09d"}, + {file = "tokenizers-0.20.1-cp310-none-win32.whl", hash = "sha256:2847843c53f445e0f19ea842a4e48b89dd0db4e62ba6e1e47a2749d6ec11f50d"}, + {file = "tokenizers-0.20.1-cp310-none-win_amd64.whl", hash = "sha256:f9aa93eacd865f2798b9e62f7ce4533cfff4f5fbd50c02926a78e81c74e432cd"}, + {file = "tokenizers-0.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4a717dcb08f2dabbf27ae4b6b20cbbb2ad7ed78ce05a829fae100ff4b3c7ff15"}, + {file = "tokenizers-0.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f84dad1ff1863c648d80628b1b55353d16303431283e4efbb6ab1af56a75832"}, + {file = "tokenizers-0.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:929c8f3afa16a5130a81ab5079c589226273ec618949cce79b46d96e59a84f61"}, + {file = "tokenizers-0.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d10766473954397e2d370f215ebed1cc46dcf6fd3906a2a116aa1d6219bfedc3"}, + {file = "tokenizers-0.20.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9300fac73ddc7e4b0330acbdda4efaabf74929a4a61e119a32a181f534a11b47"}, + {file = "tokenizers-0.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ecaf7b0e39caeb1aa6dd6e0975c405716c82c1312b55ac4f716ef563a906969"}, + {file = "tokenizers-0.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5170be9ec942f3d1d317817ced8d749b3e1202670865e4fd465e35d8c259de83"}, + {file = "tokenizers-0.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f1ae08fa9aea5891cbd69df29913e11d3841798e0bfb1ff78b78e4e7ea0a4"}, + {file = "tokenizers-0.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ee86d4095d3542d73579e953c2e5e07d9321af2ffea6ecc097d16d538a2dea16"}, + {file = "tokenizers-0.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:86dcd08da163912e17b27bbaba5efdc71b4fbffb841530fdb74c5707f3c49216"}, + {file = "tokenizers-0.20.1-cp311-none-win32.whl", hash = "sha256:9af2dc4ee97d037bc6b05fa4429ddc87532c706316c5e11ce2f0596dfcfa77af"}, + {file = "tokenizers-0.20.1-cp311-none-win_amd64.whl", hash = "sha256:899152a78b095559c287b4c6d0099469573bb2055347bb8154db106651296f39"}, + {file = "tokenizers-0.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:407ab666b38e02228fa785e81f7cf79ef929f104bcccf68a64525a54a93ceac9"}, + {file = "tokenizers-0.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f13a2d16032ebc8bd812eb8099b035ac65887d8f0c207261472803b9633cf3e"}, + {file = "tokenizers-0.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e98eee4dca22849fbb56a80acaa899eec5b72055d79637dd6aa15d5e4b8628c9"}, + {file = "tokenizers-0.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47c1bcdd61e61136087459cb9e0b069ff23b5568b008265e5cbc927eae3387ce"}, + {file = "tokenizers-0.20.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:128c1110e950534426e2274837fc06b118ab5f2fa61c3436e60e0aada0ccfd67"}, + {file = "tokenizers-0.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2e2d47a819d2954f2c1cd0ad51bb58ffac6f53a872d5d82d65d79bf76b9896d"}, + {file = "tokenizers-0.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bdd67a0e3503a9a7cf8bc5a4a49cdde5fa5bada09a51e4c7e1c73900297539bd"}, + {file = "tokenizers-0.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689b93d2e26d04da337ac407acec8b5d081d8d135e3e5066a88edd5bdb5aff89"}, + {file = "tokenizers-0.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0c6a796ddcd9a19ad13cf146997cd5895a421fe6aec8fd970d69f9117bddb45c"}, + {file = "tokenizers-0.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3ea919687aa7001a8ff1ba36ac64f165c4e89035f57998fa6cedcfd877be619d"}, + {file = "tokenizers-0.20.1-cp312-none-win32.whl", hash = "sha256:6d3ac5c1f48358ffe20086bf065e843c0d0a9fce0d7f0f45d5f2f9fba3609ca5"}, + {file = "tokenizers-0.20.1-cp312-none-win_amd64.whl", hash = "sha256:b0874481aea54a178f2bccc45aa2d0c99cd3f79143a0948af6a9a21dcc49173b"}, + {file = "tokenizers-0.20.1-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:96af92e833bd44760fb17f23f402e07a66339c1dcbe17d79a9b55bb0cc4f038e"}, + {file = "tokenizers-0.20.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:65f34e5b731a262dfa562820818533c38ce32a45864437f3d9c82f26c139ca7f"}, + {file = "tokenizers-0.20.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17f98fccb5c12ab1ce1f471731a9cd86df5d4bd2cf2880c5a66b229802d96145"}, + {file = "tokenizers-0.20.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b8c0fc3542cf9370bf92c932eb71bdeb33d2d4aeeb4126d9fd567b60bd04cb30"}, + {file = "tokenizers-0.20.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b39356df4575d37f9b187bb623aab5abb7b62c8cb702867a1768002f814800c"}, + {file = "tokenizers-0.20.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfdad27b0e50544f6b838895a373db6114b85112ba5c0cefadffa78d6daae563"}, + {file = "tokenizers-0.20.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:094663dd0e85ee2e573126918747bdb40044a848fde388efb5b09d57bc74c680"}, + {file = "tokenizers-0.20.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14e4cf033a2aa207d7ac790e91adca598b679999710a632c4a494aab0fc3a1b2"}, + {file = "tokenizers-0.20.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9310951c92c9fb91660de0c19a923c432f110dbfad1a2d429fbc44fa956bf64f"}, + {file = "tokenizers-0.20.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:05e41e302c315bd2ed86c02e917bf03a6cf7d2f652c9cee1a0eb0d0f1ca0d32c"}, + {file = "tokenizers-0.20.1-cp37-none-win32.whl", hash = "sha256:212231ab7dfcdc879baf4892ca87c726259fa7c887e1688e3f3cead384d8c305"}, + {file = "tokenizers-0.20.1-cp37-none-win_amd64.whl", hash = "sha256:896195eb9dfdc85c8c052e29947169c1fcbe75a254c4b5792cdbd451587bce85"}, + {file = "tokenizers-0.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:741fb22788482d09d68e73ece1495cfc6d9b29a06c37b3df90564a9cfa688e6d"}, + {file = "tokenizers-0.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10be14ebd8082086a342d969e17fc2d6edc856c59dbdbddd25f158fa40eaf043"}, + {file = "tokenizers-0.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:514cf279b22fa1ae0bc08e143458c74ad3b56cd078b319464959685a35c53d5e"}, + {file = "tokenizers-0.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a647c5b7cb896d6430cf3e01b4e9a2d77f719c84cefcef825d404830c2071da2"}, + {file = "tokenizers-0.20.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cdf379219e1e1dd432091058dab325a2e6235ebb23e0aec8d0508567c90cd01"}, + {file = "tokenizers-0.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ba72260449e16c4c2f6f3252823b059fbf2d31b32617e582003f2b18b415c39"}, + {file = "tokenizers-0.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:910b96ed87316e4277b23c7bcaf667ce849c7cc379a453fa179e7e09290eeb25"}, + {file = "tokenizers-0.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e53975a6694428a0586534cc1354b2408d4e010a3103117f617cbb550299797c"}, + {file = "tokenizers-0.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:07c4b7be58da142b0730cc4e5fd66bb7bf6f57f4986ddda73833cd39efef8a01"}, + {file = "tokenizers-0.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b605c540753e62199bf15cf69c333e934077ef2350262af2ccada46026f83d1c"}, + {file = "tokenizers-0.20.1-cp38-none-win32.whl", hash = "sha256:88b3bc76ab4db1ab95ead623d49c95205411e26302cf9f74203e762ac7e85685"}, + {file = "tokenizers-0.20.1-cp38-none-win_amd64.whl", hash = "sha256:d412a74cf5b3f68a90c615611a5aa4478bb303d1c65961d22db45001df68afcb"}, + {file = "tokenizers-0.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a25dcb2f41a0a6aac31999e6c96a75e9152fa0127af8ece46c2f784f23b8197a"}, + {file = "tokenizers-0.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a12c3cebb8c92e9c35a23ab10d3852aee522f385c28d0b4fe48c0b7527d59762"}, + {file = "tokenizers-0.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02e18da58cf115b7c40de973609c35bde95856012ba42a41ee919c77935af251"}, + {file = "tokenizers-0.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f326a1ac51ae909b9760e34671c26cd0dfe15662f447302a9d5bb2d872bab8ab"}, + {file = "tokenizers-0.20.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b4872647ea6f25224e2833b044b0b19084e39400e8ead3cfe751238b0802140"}, + {file = "tokenizers-0.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce6238a3311bb8e4c15b12600927d35c267b92a52c881ef5717a900ca14793f7"}, + {file = "tokenizers-0.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57b7a8880b208866508b06ce365dc631e7a2472a3faa24daa430d046fb56c885"}, + {file = "tokenizers-0.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a908c69c2897a68f412aa05ba38bfa87a02980df70f5a72fa8490479308b1f2d"}, + {file = "tokenizers-0.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:da1001aa46f4490099c82e2facc4fbc06a6a32bf7de3918ba798010954b775e0"}, + {file = "tokenizers-0.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:42c097390e2f0ed0a5c5d569e6669dd4e9fff7b31c6a5ce6e9c66a61687197de"}, + {file = "tokenizers-0.20.1-cp39-none-win32.whl", hash = "sha256:3d4d218573a3d8b121a1f8c801029d70444ffb6d8f129d4cca1c7b672ee4a24c"}, + {file = "tokenizers-0.20.1-cp39-none-win_amd64.whl", hash = "sha256:37d1e6f616c84fceefa7c6484a01df05caf1e207669121c66213cb5b2911d653"}, + {file = "tokenizers-0.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48689da7a395df41114f516208d6550e3e905e1239cc5ad386686d9358e9cef0"}, + {file = "tokenizers-0.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:712f90ea33f9bd2586b4a90d697c26d56d0a22fd3c91104c5858c4b5b6489a79"}, + {file = "tokenizers-0.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:359eceb6a620c965988fc559cebc0a98db26713758ec4df43fb76d41486a8ed5"}, + {file = "tokenizers-0.20.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d3caf244ce89d24c87545aafc3448be15870096e796c703a0d68547187192e1"}, + {file = "tokenizers-0.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03b03cf8b9a32254b1bf8a305fb95c6daf1baae0c1f93b27f2b08c9759f41dee"}, + {file = "tokenizers-0.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:218e5a3561561ea0f0ef1559c6d95b825308dbec23fb55b70b92589e7ff2e1e8"}, + {file = "tokenizers-0.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f40df5e0294a95131cc5f0e0eb91fe86d88837abfbee46b9b3610b09860195a7"}, + {file = "tokenizers-0.20.1-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:08aaa0d72bb65058e8c4b0455f61b840b156c557e2aca57627056624c3a93976"}, + {file = "tokenizers-0.20.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:998700177b45f70afeb206ad22c08d9e5f3a80639dae1032bf41e8cbc4dada4b"}, + {file = "tokenizers-0.20.1-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62f7fbd3c2c38b179556d879edae442b45f68312019c3a6013e56c3947a4e648"}, + {file = "tokenizers-0.20.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31e87fca4f6bbf5cc67481b562147fe932f73d5602734de7dd18a8f2eee9c6dd"}, + {file = "tokenizers-0.20.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:956f21d359ae29dd51ca5726d2c9a44ffafa041c623f5aa33749da87cfa809b9"}, + {file = "tokenizers-0.20.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1fbbaf17a393c78d8aedb6a334097c91cb4119a9ced4764ab8cfdc8d254dc9f9"}, + {file = "tokenizers-0.20.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ebe63e31f9c1a970c53866d814e35ec2ec26fda03097c486f82f3891cee60830"}, + {file = "tokenizers-0.20.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:81970b80b8ac126910295f8aab2d7ef962009ea39e0d86d304769493f69aaa1e"}, + {file = "tokenizers-0.20.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130e35e76f9337ed6c31be386e75d4925ea807055acf18ca1a9b0eec03d8fe23"}, + {file = "tokenizers-0.20.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd28a8614f5c82a54ab2463554e84ad79526c5184cf4573bbac2efbbbcead457"}, + {file = "tokenizers-0.20.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9041ee665d0fa7f5c4ccf0f81f5e6b7087f797f85b143c094126fc2611fec9d0"}, + {file = "tokenizers-0.20.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:62eb9daea2a2c06bcd8113a5824af8ef8ee7405d3a71123ba4d52c79bb3d9f1a"}, + {file = "tokenizers-0.20.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f861889707b54a9ab1204030b65fd6c22bdd4a95205deec7994dc22a8baa2ea4"}, + {file = "tokenizers-0.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:89d5c337d74ea6e5e7dc8af124cf177be843bbb9ca6e58c01f75ea103c12c8a9"}, + {file = "tokenizers-0.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0b7f515c83397e73292accdbbbedc62264e070bae9682f06061e2ddce67cacaf"}, + {file = "tokenizers-0.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e0305fc1ec6b1e5052d30d9c1d5c807081a7bd0cae46a33d03117082e91908c"}, + {file = "tokenizers-0.20.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dc611e6ac0fa00a41de19c3bf6391a05ea201d2d22b757d63f5491ec0e67faa"}, + {file = "tokenizers-0.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5ffe0d7f7bfcfa3b2585776ecf11da2e01c317027c8573c78ebcb8985279e23"}, + {file = "tokenizers-0.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e7edb8ec12c100d5458d15b1e47c0eb30ad606a05641f19af7563bc3d1608c14"}, + {file = "tokenizers-0.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:de291633fb9303555793cc544d4a86e858da529b7d0b752bcaf721ae1d74b2c9"}, + {file = "tokenizers-0.20.1.tar.gz", hash = "sha256:84edcc7cdeeee45ceedb65d518fffb77aec69311c9c8e30f77ad84da3025f002"}, ] [package.dependencies] @@ -4718,13 +4764,13 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "transformers" -version = "4.44.2" +version = "4.45.2" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" optional = false python-versions = ">=3.8.0" files = [ - {file = "transformers-4.44.2-py3-none-any.whl", hash = "sha256:1c02c65e7bfa5e52a634aff3da52138b583fc6f263c1f28d547dc144ba3d412d"}, - {file = "transformers-4.44.2.tar.gz", hash = "sha256:36aa17cc92ee154058e426d951684a2dab48751b35b49437896f898931270826"}, + {file = "transformers-4.45.2-py3-none-any.whl", hash = "sha256:c551b33660cfc815bae1f9f097ecfd1e65be623f13c6ee0dda372bd881460210"}, + {file = "transformers-4.45.2.tar.gz", hash = "sha256:72bc390f6b203892561f05f86bbfaa0e234aab8e927a83e62b9d92ea7e3ae101"}, ] [package.dependencies] @@ -4736,21 +4782,21 @@ pyyaml = ">=5.1" regex = "!=2019.12.17" requests = "*" safetensors = ">=0.4.1" -tokenizers = ">=0.19,<0.20" +tokenizers = ">=0.20,<0.21" tqdm = ">=4.27" [package.extras] -accelerate = ["accelerate (>=0.21.0)"] -agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch"] -all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision"] +accelerate = ["accelerate (>=0.26.0)"] +agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch"] +all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm (<=0.9.16)", "tokenizers (>=0.20,<0.21)", "torch", "torchaudio", "torchvision"] audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -benchmark = ["optimum-benchmark (>=0.2.0)"] +benchmark = ["optimum-benchmark (>=0.3.0)"] codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.21.0)", "deepspeed (>=0.9.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.19,<0.20)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +deepspeed = ["accelerate (>=0.26.0)", "deepspeed (>=0.9.3)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.26.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk (<=3.8.1)", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "libcst", "librosa", "nltk (<=3.8.1)", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.20,<0.21)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "libcst", "librosa", "nltk (<=3.8.1)", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.20,<0.21)", "urllib3 (<2.0.0)"] +dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "libcst", "librosa", "nltk (<=3.8.1)", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.20,<0.21)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)", "scipy (<1.13.0)"] flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] ftfy = ["ftfy"] @@ -4761,7 +4807,7 @@ natten = ["natten (>=0.14.6,<0.15.0)"] onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "isort (>=5.5.4)", "ruff (==0.5.1)", "urllib3 (<2.0.0)"] +quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "isort (>=5.5.4)", "libcst", "rich", "ruff (==0.5.1)", "urllib3 (<2.0.0)"] ray = ["ray[tune] (>=2.7.0)"] retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] ruff = ["ruff (==0.5.1)"] @@ -4771,16 +4817,17 @@ serving = ["fastapi", "pydantic", "starlette", "uvicorn"] sigopt = ["sigopt"] sklearn = ["scikit-learn"] speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk", "parameterized", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk (<=3.8.1)", "parameterized", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] tf = ["keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] tf-cpu = ["keras (>2.9,<2.16)", "keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow-cpu (>2.9,<2.16)", "tensorflow-probability (<0.24)", "tensorflow-text (<2.16)", "tf2onnx"] tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +tiktoken = ["blobfile", "tiktoken"] timm = ["timm (<=0.9.16)"] -tokenizers = ["tokenizers (>=0.19,<0.20)"] -torch = ["accelerate (>=0.21.0)", "torch"] +tokenizers = ["tokenizers (>=0.20,<0.21)"] +torch = ["accelerate (>=0.26.0)", "torch"] torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.23.2,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.19,<0.20)", "torch", "tqdm (>=4.27)"] +torchhub = ["filelock", "huggingface-hub (>=0.23.2,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.20,<0.21)", "torch", "tqdm (>=4.27)"] video = ["av (==9.2.0)", "decord (==0.6.0)"] vision = ["Pillow (>=10.0.1,<=15.0)"] @@ -5034,6 +5081,84 @@ files = [ {file = "widgetsnbextension-4.0.13.tar.gz", hash = "sha256:ffcb67bc9febd10234a362795f643927f4e0c05d9342c727b65d2384f8feacb6"}, ] +[[package]] +name = "wordcloud" +version = "1.9.3" +description = "A little word cloud generator" +optional = false +python-versions = ">=3.7" +files = [ + {file = "wordcloud-1.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5fce423a24e6ca1b89b2770a7c6917d6e26f04bcfefa601cf61819b2fc0770c4"}, + {file = "wordcloud-1.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3b6adfc1465b9176b8bc602745dd3ed8ea782b006a81cb59eab3dde92ad9f94c"}, + {file = "wordcloud-1.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad6db37a6f5abeba51a5d503228ea320d4f2fa774864103e7b24acd9dd86fd0e"}, + {file = "wordcloud-1.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e74ac99e9582873d7ee28bd03e125dcf73ae46666d55fb4c13e82e90c0e074a"}, + {file = "wordcloud-1.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4001317c0e3b5cb6fd106228ddcd27524d1caf9ae468b3c2c2fc571c6ce56b22"}, + {file = "wordcloud-1.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f86042e5ce12e2795798033a56f0246906b4d7d9027d554b6cd951ce2fd342a"}, + {file = "wordcloud-1.9.3-cp310-cp310-win32.whl", hash = "sha256:3b90f0390c0a05ba4b4580fb765a3d45d8d21519b50ca5006d6dbdc2a0b86507"}, + {file = "wordcloud-1.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:6f7977285df9254b8704d3f895c06814a6183c6c89e140d6281848c076635e91"}, + {file = "wordcloud-1.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ced0d5c946d82cfc778febafe3eedeb0bae07dd57ea4f21fe06b9ec8225ab31"}, + {file = "wordcloud-1.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6f5499e6360219e61808dc0d2b00cd5104f78a82d2ae8f7986df04731713835f"}, + {file = "wordcloud-1.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb1e8bb7d60f7a90fa8439c7b56dd1df60766115fd57480ac0d83ca5204e0117"}, + {file = "wordcloud-1.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e33328044db5c01487f2a3a023b5476947942dacd6a5dc8c217fa039f6c5bd9"}, + {file = "wordcloud-1.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:998dc0dc8fcbff88f566f17cb5e0eb3bb21fcafd387b0670be6c14feacaf4cdc"}, + {file = "wordcloud-1.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e1a1c3cfa86b605a19711ec58920ccb694dca9d5c9d00b373f4d5952d63793e9"}, + {file = "wordcloud-1.9.3-cp311-cp311-win32.whl", hash = "sha256:f504e3291256c0b6fca044602f8f0e5cb56b7c33724cde9d279c4077fa5b6d27"}, + {file = "wordcloud-1.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:103c9b0465e1cf5b7a38b49ab1c3a0b0301762fa56602ac79287f9d22b46ade3"}, + {file = "wordcloud-1.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dfea303fc1dec4811e4a5671a8021a89724b6fa70639d059ad30c492932be447"}, + {file = "wordcloud-1.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:512f3c9a2e8579269a33ac9219d042fd0cc5a3a524ee68079238a3e4efe2b879"}, + {file = "wordcloud-1.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d00d916509a17b432032161d492ed7f30b2ebd921303090fe1d2b57011a49cc0"}, + {file = "wordcloud-1.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5e0e7bbd269a62baa63ea2175faea4d74435c0ad828f3d5999fa4c33ebe0629"}, + {file = "wordcloud-1.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:483aa4f8d17b9744a3b238269593d1794b962fc757a72a9e7e8468c2665cffb7"}, + {file = "wordcloud-1.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:64b342a79553970fa04083761d041067323219ad62b5550a496e42436d23cbb3"}, + {file = "wordcloud-1.9.3-cp312-cp312-win32.whl", hash = "sha256:419acfe0b1d1227b9e3e14ec1bb6c40fd7fa652df4adf81f0ba3e00daca500b5"}, + {file = "wordcloud-1.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:2061a9978a6243107ce1a8a9fa24f421b03a0f7e620769b6f5075857e75aa615"}, + {file = "wordcloud-1.9.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:21f47fabe189f39532378759300a624ae166519dfafbd6a22cfe65b14a7d104d"}, + {file = "wordcloud-1.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:524065f8a5a79e00748f45efbeacd25ac1d15850e0d0588753b17a8b2de2a6a7"}, + {file = "wordcloud-1.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b2bb53492bc8663ba90a300bbd2da7be5059f9ad192ed1150e9bbbda8016c9a"}, + {file = "wordcloud-1.9.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:643243474faee460e7d08944d3e529c58d0cbf8be11626fbb918ee8ccb913a23"}, + {file = "wordcloud-1.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d95f44739a6972abfb97c12656999952dd28ed03700ee8b6efe35d688d489b36"}, + {file = "wordcloud-1.9.3-cp37-cp37m-win32.whl", hash = "sha256:e56364c8829d399397a649501f834c12751ab106cba488ba8d86d532889b528c"}, + {file = "wordcloud-1.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:78f4a3fd3526884e4f526ae070bcb47401766c48c9cb6488933f608f810fadae"}, + {file = "wordcloud-1.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0058cf08573c99283fe189e93354d20ca8c9a8aac7207d96e74b93aedd02cdcc"}, + {file = "wordcloud-1.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47d6918381a8a816141bdd391376bff703ec5aa3a6bd88631097a5e2963ebd1a"}, + {file = "wordcloud-1.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05aa3269c5af573cfb11e269de0fe73c2c72aefdd90cdb41368744e7d8bc7507"}, + {file = "wordcloud-1.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d74e206f42af172db4d3c0054853523bf46070b12f0626493a56599957dd2196"}, + {file = "wordcloud-1.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1932726635c8ed12bb74201d2a6b07f18c2f732aecadb9ae915832485241991f"}, + {file = "wordcloud-1.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:038de1701e7853c41850644453f1c9e69f878e480d42efae154684a47fd59f1a"}, + {file = "wordcloud-1.9.3-cp38-cp38-win32.whl", hash = "sha256:19aa05f60d9261301e4942fd1b1c4b458d903f24c12d2bd1c6ecbb752697a2f3"}, + {file = "wordcloud-1.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:ab5bae12cf27d8de986e4d4518d4778f2b56c660b250b631ff805024038311a1"}, + {file = "wordcloud-1.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:888d088f54a897b8597da2fae3954d74b1f7251f7d311bbcc30ec3c6987d3605"}, + {file = "wordcloud-1.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:daa6cfa11ce24e7eb4e42dc896dae4f74ae2166cf90ec997996300566e6811d1"}, + {file = "wordcloud-1.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387dc2bd528ff6bb661451f2a9fd4ccf74b86072d7a2c868285d4c0cf26abeb4"}, + {file = "wordcloud-1.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40c32a324319db610b40f387a2a0b42d091817958a5272e0a4c4eb6a158588b5"}, + {file = "wordcloud-1.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8078c6c58db4ccb893f120354e7e08bc48a5a5aac3e764f9008bc96a769b208c"}, + {file = "wordcloud-1.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81f15eb60abc1676808bb85e2edfdbdc0a9011383f2a729c1c2a0cb941516768"}, + {file = "wordcloud-1.9.3-cp39-cp39-win32.whl", hash = "sha256:1d1680bf6c3d1b2f8e3bd02ccfa868fee2655fe13cf5b9e9905251050448fbbd"}, + {file = "wordcloud-1.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:c0f458681e4d49be36064f21bfb1dc8d8c3021fe30e474ee634666b4f84fd851"}, + {file = "wordcloud-1.9.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:baea9ac88ec1ab317461c75834b64ad5dad12a02c4f2384dd546eac3c316dbbb"}, + {file = "wordcloud-1.9.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6956b9f0d0eb14a12f46d41aebb4e7ad2d4c2ec417cc7c586bebd2ddc9c8311"}, + {file = "wordcloud-1.9.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d221b4d0d1d2a1d79286c41d8a4c0ce70065488f153e5d81cc0be7fb494ff10f"}, + {file = "wordcloud-1.9.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:db39dbe91dd31ffb667edcd496f4eeb85ceea397fef4ad51d0766ab934088cc7"}, + {file = "wordcloud-1.9.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a6ae5db43807ca10f5c77dd2d22c78f8f9399758cc5ac6afd7f3c19e58b75d66"}, + {file = "wordcloud-1.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a1c431f20ee28a8840f2552a89bd8332c455c318f4de7b6c2ca3159b76df4f0"}, + {file = "wordcloud-1.9.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1847ca4466e2b1588478dd8eb87fa7baa28515b37ab7926471595e8ac81e6578"}, + {file = "wordcloud-1.9.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:7b0e14e4dfcff7dee331df7880a2031e352e95a7d30e74ff152f162488b04179"}, + {file = "wordcloud-1.9.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f1c0cff6037a3dc46437537a31925f3895d742fb6d67af71194149763de16a76"}, + {file = "wordcloud-1.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a36788c5c79604653327675023cbd97c68813640887b51ce651bb4f5c28c88b"}, + {file = "wordcloud-1.9.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e3907c6496e197a9c4be76770c5ff8a03eddbdfe5a151a55e4eedeaa45ab3ad"}, + {file = "wordcloud-1.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:65e6f6b68eecb85c326ae19729dd4151fcdebffc2142c9ee882dc2de955210d0"}, + {file = "wordcloud-1.9.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0c8e18c4afa025819332efffe8008267a83a9c54fe72ae1bc889ddce0eec470d"}, + {file = "wordcloud-1.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4df25cb5dd347e43d53e02a009418f5776e7651063aff991865da8f6336bf193"}, + {file = "wordcloud-1.9.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53489ad22d58be3896ec16ed47604832e393224c89f7d7eed040096b07141ac4"}, + {file = "wordcloud-1.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:61de4a5f3bfd33e0cb013cce6143bcf71959f3cd8536650b90134d745a553c2c"}, + {file = "wordcloud-1.9.3.tar.gz", hash = "sha256:a9aa738d63ed674a40f0cc31adb83f4ca5fc195f03a6aff6e010d1f5807d1c58"}, +] + +[package.dependencies] +matplotlib = "*" +numpy = ">=1.6.1" +pillow = "*" + [[package]] name = "xmltodict" version = "0.13.0" @@ -5066,4 +5191,4 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "77e24b6adebb30355bedf655b816026488dce12e0edd220e4960d7a1f02e10f3" +content-hash = "af5bb6f93e0ca00a16513276b649e2a3d17c390945fe36029e59232fd3a49258" diff --git a/pyproject.toml b/pyproject.toml index a5ca991..a65eb01 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,6 +23,11 @@ emoji = "^2.13.2" ftfy = "^6.2.3" streamlit = "^1.39.0" nltk = "^3.9.1" +transformers = "^4.45.2" +torch = "^2.4.1" +wordcloud = "^1.9.3" +seaborn = "^0.13.2" +openpyxl = "^3.1.5" [tool.poetry.group.test] diff --git a/tests/test_concatenate_consecutive_roles.py b/tests/test_concatenate_consecutive_roles.py index 25b5f03..fb35311 100644 --- a/tests/test_concatenate_consecutive_roles.py +++ b/tests/test_concatenate_consecutive_roles.py @@ -1,7 +1,7 @@ import pandas as pd import pytest -from dsp_interview_transcripts.pipeline.process_data import concatenate_consecutive_roles +from dsp_interview_transcripts.pipeline.archive.process_data import concatenate_consecutive_roles def test_concatenate_consecutive_roles(): diff --git a/tests/test_create_q_and_a_column.py b/tests/test_create_q_and_a_column.py index 7a05399..825441b 100644 --- a/tests/test_create_q_and_a_column.py +++ b/tests/test_create_q_and_a_column.py @@ -1,7 +1,7 @@ import pandas as pd import pytest -from dsp_interview_transcripts.pipeline.process_data import create_q_and_a_column, concatenate_consecutive_roles +from dsp_interview_transcripts.pipeline.archive.process_data import create_q_and_a_column, concatenate_consecutive_roles @pytest.fixture From 8030d228ca6fb502dcda3b05b0059c19503d1645 Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Wed, 30 Oct 2024 09:46:37 +0000 Subject: [PATCH 09/25] Small updates --- .../notebooks/0_data_exploration.ipynb | 2 +- .../notebooks/semantic_chunking.ipynb | 236 ++++++++++++++---- dsp_interview_transcripts/pipeline/app.py | 31 +-- 3 files changed, 198 insertions(+), 71 deletions(-) diff --git a/dsp_interview_transcripts/notebooks/0_data_exploration.ipynb b/dsp_interview_transcripts/notebooks/0_data_exploration.ipynb index 8e52770..c636742 100644 --- a/dsp_interview_transcripts/notebooks/0_data_exploration.ipynb +++ b/dsp_interview_transcripts/notebooks/0_data_exploration.ipynb @@ -242,7 +242,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.10" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/dsp_interview_transcripts/notebooks/semantic_chunking.ipynb b/dsp_interview_transcripts/notebooks/semantic_chunking.ipynb index 6a67a04..04a8cf9 100644 --- a/dsp_interview_transcripts/notebooks/semantic_chunking.ipynb +++ b/dsp_interview_transcripts/notebooks/semantic_chunking.ipynb @@ -21,46 +21,7 @@ "import pandas as pd\n", "\n", "from dsp_interview_transcripts import PROJECT_DIR\n", - "from dsp_interview_transcripts.utils.data_cleaning import clean_data, convert_timestamp" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from sentence_transformers import SentenceTransformer\n", - "from sklearn.metrics.pairwise import cosine_similarity\n", - "\n", - "# Load model\n", - "small_model = SentenceTransformer('paraphrase-MiniLM-L3-v2')\n", - "\n", - "# Embed the target sentence\n", - "target_sentence = \"Are these instructions clear or do you need any further clarification?\"\n", - "target_embedding = small_model.encode([target_sentence])\n", - "\n", - "# Function to process each conversation\n", - "def remove_preamble(df, target_embedding=target_embedding, model=small_model):\n", - " \"\"\"Get rid of everything up until the bot asks if the instructions are clear\n", - " \"\"\"\n", - " # Filter BOT messages\n", - " bot_messages = df[df['role'] == 'BOT']\n", - " \n", - " # Embed BOT messages\n", - " bot_embeddings = model.encode(bot_messages['text_clean'].tolist())\n", - " \n", - " # Calculate cosine similarity\n", - " similarities = cosine_similarity(target_embedding, bot_embeddings).flatten()\n", - " \n", - " # Find the index of the most similar BOT message\n", - " most_similar_idx = similarities.argmax()\n", - " \n", - " # Get the timestamp of that message\n", - " cutoff_timestamp = bot_messages.iloc[most_similar_idx]['timestamp_clean']\n", - " \n", - " # Filter out messages prior to the cutoff timestamp\n", - " return df[df['timestamp_clean'] > cutoff_timestamp]" + "from dsp_interview_transcripts.utils.data_cleaning import clean_data, convert_timestamp, remove_preamble" ] }, { @@ -137,7 +98,7 @@ "outputs": [], "source": [ "# Turn every conversation into one big block of text (mimics the format of other interview/focus group transcripts we might see)\n", - "df_grouped = interviews_df.groupby('conversation')['text_clean'].apply(lambda x: '. '.join(x)).reset_index()" + "df_grouped = interviews_cleaned_df.groupby('conversation')['text_clean'].apply(lambda x: '. '.join(x)).reset_index()" ] }, { @@ -147,10 +108,9 @@ "outputs": [], "source": [ "model_name = \"sentence-transformers/all-MiniLM-L6-v2\"\n", - "buffer_sizes = [1, 2, 3]\n", + "buffer_sizes = [1, 5, 10]\n", "\n", "all_results = {}\n", - "actual_chunks = {}\n", "\n", "for buffer_size in buffer_sizes:\n", " chunker = SemanticChunker(HuggingFaceEmbeddings(model_name=model_name), \n", @@ -168,7 +128,6 @@ " \n", " \n", " all_results[buffer_size] = [len(chunk_list) for chunk_list in results.values()]\n", - " actual_chunks[buffer_size] = results\n", "\n", "fig, axes = plt.subplots(nrows=1, ncols=len(buffer_sizes), figsize=(15, 5), sharey=True)\n", "\n", @@ -200,6 +159,195 @@ "`buffer_size` = the number of sentences either side to include. So if `buffer_size` is 1, you will get 3 sentences in each group." ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "model_name = \"sentence-transformers/all-MiniLM-L6-v2\"\n", + "\n", + "chunker = SemanticChunker(HuggingFaceEmbeddings(model_name=model_name), \n", + " breakpoint_threshold_type=\"percentile\", \n", + " buffer_size=1)\n", + "results = {}\n", + "\n", + "for idx, row in df_grouped.iterrows():\n", + " text = row['text_clean']\n", + " conv_id = row['conversation']\n", + " # Turn it into a langchain document\n", + " doc = Document(page_content=text)\n", + " chunked_docs = chunker.split_documents([doc])\n", + " results[conv_id] = [x.model_dump() for x in chunked_docs]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "results" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "new_results = {}\n", + "for conv in results.keys():\n", + " new_results[conv] = [x['page_content'] for x in results[conv]]\n", + " # print()\n", + " \n", + "new_results" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df = pd.DataFrame({\n", + " 'conversation': new_results.keys(),\n", + " 'content': new_results.values()\n", + "})" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df.explode('content')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df_grouped_new = interviews_cleaned_df.groupby('conversation').apply(lambda x: list(zip(x['uuid'], x['text_clean']))).reset_index(name='uuid_text_pairs')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df_grouped_new" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "def chunk_with_uuids(pairs):\n", + " # Extract the text for chunking\n", + " text = '. '.join([t[1] for t in pairs])\n", + " \n", + " # Apply the semantic chunker to the text\n", + " doc = Document(page_content=text)\n", + " chunks = chunker.split_documents([doc])\n", + " \n", + " # For each chunk, find the corresponding UUIDs\n", + " chunk_uuid_mapping = []\n", + " \n", + " for chunk in chunks:\n", + " # For each chunk, identify the corresponding UUID(s)\n", + " uuids_in_chunk = []\n", + " chunk_text = chunk.page_content\n", + " \n", + " # Iterate through the original pairs to find corresponding UUIDs\n", + " for uuid, text_segment in pairs:\n", + " if text_segment in chunk_text:\n", + " uuids_in_chunk.append(uuid)\n", + " \n", + " # Append to the mapping (chunk text -> corresponding UUIDs)\n", + " chunk_uuid_mapping.append({\n", + " 'chunk_text': chunk_text,\n", + " 'uuids': uuids_in_chunk\n", + " })\n", + " \n", + " return chunk_uuid_mapping" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df_grouped_new" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "output = {}\n", + "\n", + "for idx, row in df_grouped_new.iterrows():\n", + " conv = row['conversation']\n", + " output[conv] = {}\n", + " pairs = row['uuid_text_pairs']\n", + " chunk_mapping = chunk_with_uuids(pairs)\n", + " output[conv] = chunk_mapping" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "output" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "flattened_data = []\n", + "for conversation, chunks in output.items():\n", + " for chunk in chunks:\n", + " flattened_data.append({\n", + " 'conversation': conversation,\n", + " 'chunk_text': chunk['chunk_text'],\n", + " 'uuids': chunk['uuids']\n", + " })\n", + "\n", + "# Step 2: Convert the flattened list to a DataFrame\n", + "df = pd.DataFrame(flattened_data)\n", + "df" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, { "cell_type": "code", "execution_count": null, diff --git a/dsp_interview_transcripts/pipeline/app.py b/dsp_interview_transcripts/pipeline/app.py index 277122c..d5c34ac 100644 --- a/dsp_interview_transcripts/pipeline/app.py +++ b/dsp_interview_transcripts/pipeline/app.py @@ -5,6 +5,7 @@ streamlit run dsp_interview_transcripts/pipeline/app.py ``` """ +from io import BytesIO import streamlit as st import pandas as pd import seaborn as sns @@ -26,32 +27,6 @@ # Set up the Streamlit app layout st.title("Prevalence and Sentiment Analysis") -# # Sidebar for user input -# st.sidebar.header("Select Options") - -# First section: Displaying the side-by-side bar plots -# st.subheader("Side-by-side Bar Plots") - -# # Creating the bar plot for the prevalence of each 'Name' using Seaborn -# fig, ax = plt.subplots(1, 2, figsize=(12, 6)) - -# # Prevalence of each 'Name' -# sns.countplot(x='Name', data=data_viz, ax=ax[0]) -# ax[0].set_title('Prevalence of Each Name') -# ax[0].set_xlabel('Name') -# ax[0].set_ylabel('Count') - -# # Percentage of 'sentiment' under each value of 'Name' using Seaborn -# sentiment_percentage = pd.crosstab(data_viz['Name'], data_viz['sentiment'], normalize='index') * 100 -# sentiment_percentage = sentiment_percentage.reset_index().melt(id_vars='Name', var_name='sentiment', value_name='percentage') -# sns.barplot(x='Name', y='percentage', hue='sentiment', data=sentiment_percentage, ax=ax[1], estimator=sum) -# ax[1].set_title('Percentage of Sentiment under Each Name') -# ax[1].set_xlabel('Name') -# ax[1].set_ylabel('Percentage') - -# # Displaying the plots side by side -# st.pyplot(fig) - # Creating the bar plot for the prevalence of each 'Name' using Seaborn fig, ax = plt.subplots(1, 2, figsize=(12, 6)) @@ -76,6 +51,10 @@ # Displaying the plots side by side plt.tight_layout() st.pyplot(fig) +fig_buffer = BytesIO() +fig.savefig(fig_buffer, format='png') +fig_buffer.seek(0) +st.download_button(label="Download Plot as PNG", data=fig_buffer, file_name="topic_and_sentiment_plot.png", mime="image/png") # Second section: User selects a 'Name' and displays a table st.subheader("Select a Topic and Display Corresponding Texts") From 94e28e4bb0b7b1b2b3508bc0067a5a46c4f5045e Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Thu, 31 Oct 2024 14:09:12 +0000 Subject: [PATCH 10/25] Rename app --- .../pipeline/{app.py => compare_models_app.py} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename dsp_interview_transcripts/pipeline/{app.py => compare_models_app.py} (98%) diff --git a/dsp_interview_transcripts/pipeline/app.py b/dsp_interview_transcripts/pipeline/compare_models_app.py similarity index 98% rename from dsp_interview_transcripts/pipeline/app.py rename to dsp_interview_transcripts/pipeline/compare_models_app.py index d5c34ac..c75f42b 100644 --- a/dsp_interview_transcripts/pipeline/app.py +++ b/dsp_interview_transcripts/pipeline/compare_models_app.py @@ -2,7 +2,7 @@ """ Usage: ``` -streamlit run dsp_interview_transcripts/pipeline/app.py +streamlit run dsp_interview_transcripts/pipeline/compare_models_app.py ``` """ from io import BytesIO From 05b9b4bfaa49a4f39458e3df1ce18ea4bf03c999 Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Thu, 31 Oct 2024 14:10:16 +0000 Subject: [PATCH 11/25] Rename again because I was on the wrong branch --- .../pipeline/{compare_models_app.py => app.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename dsp_interview_transcripts/pipeline/{compare_models_app.py => app.py} (100%) diff --git a/dsp_interview_transcripts/pipeline/compare_models_app.py b/dsp_interview_transcripts/pipeline/app.py similarity index 100% rename from dsp_interview_transcripts/pipeline/compare_models_app.py rename to dsp_interview_transcripts/pipeline/app.py From 8a6315a2887551c54a4f72286e87513d790376e4 Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Tue, 5 Nov 2024 16:05:19 +0000 Subject: [PATCH 12/25] Add csvs to gitignore --- .gitignore | 3 +++ dsp_interview_transcripts/pipeline/topic_modelling.py | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/.gitignore b/.gitignore index ea55ce7..0533173 100644 --- a/.gitignore +++ b/.gitignore @@ -171,3 +171,6 @@ outputs/* !/outputs/*.md # DS store in any folder **/.DS_Store + +# data files +*.csv \ No newline at end of file diff --git a/dsp_interview_transcripts/pipeline/topic_modelling.py b/dsp_interview_transcripts/pipeline/topic_modelling.py index 16883b3..64f223d 100644 --- a/dsp_interview_transcripts/pipeline/topic_modelling.py +++ b/dsp_interview_transcripts/pipeline/topic_modelling.py @@ -49,6 +49,10 @@ ) def stratified_sample(group, n=10): + #TODO: filter based on hdbscan probability + #TODO: find docs from centre of the cluster + + # Calculate the sample size for each combination of 'question' and 'sentiment' stratified_sample = group.groupby(['question', 'sentiment']).apply(lambda x: x.sample(frac=min(1, n / len(group)), random_state=42)) From b4a73537a43f1f4499461991ceee5065684654da Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Wed, 6 Nov 2024 09:54:57 +0000 Subject: [PATCH 13/25] Add poetry.lock to gitignore --- .gitignore | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 0533173..e9d277e 100644 --- a/.gitignore +++ b/.gitignore @@ -99,7 +99,7 @@ ipython_config.py # This is especially recommended for binary packages to ensure reproducibility, and is more # commonly ignored for libraries. # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control -#poetry.lock +poetry.lock # pdm # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. @@ -173,4 +173,4 @@ outputs/* **/.DS_Store # data files -*.csv \ No newline at end of file +*.csv From 6e469ff6efe1dd247db2120d0cb3199ec6aeb2ce Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Wed, 6 Nov 2024 14:55:32 +0000 Subject: [PATCH 14/25] Add types and docstrings --- .gitignore | 3 + .pre-commit-config.yaml | 48 +-- dsp_interview_transcripts/__init__.py | 12 +- dsp_interview_transcripts/config/base.yaml | 22 +- .../pipeline/archive/chunk_interviews.py | 86 ----- .../pipeline/archive/compare_models_app.py | 86 ----- .../compare_models_for_topic_naming.py | 80 ----- .../pipeline/archive/process_data.py | 99 ------ .../pipeline/archive/segment_interviews.py | 119 ------- .../pipeline/archive/topic_modelling.py | 150 -------- .../pipeline/name_clusters.py | 159 ++++++--- .../pipeline/prep_output_tables.py | 197 +++++++---- .../pipeline/process_data.py | 333 ++++++++++-------- .../pipeline/run_pipeline.py | 7 +- .../pipeline/top_down_analysis.py | 327 ++++++++++------- .../pipeline/topic_modelling.py | 163 +++++---- pyproject.toml | 38 -- 17 files changed, 776 insertions(+), 1153 deletions(-) delete mode 100644 dsp_interview_transcripts/pipeline/archive/chunk_interviews.py delete mode 100644 dsp_interview_transcripts/pipeline/archive/compare_models_app.py delete mode 100644 dsp_interview_transcripts/pipeline/archive/compare_models_for_topic_naming.py delete mode 100644 dsp_interview_transcripts/pipeline/archive/process_data.py delete mode 100644 dsp_interview_transcripts/pipeline/archive/segment_interviews.py delete mode 100644 dsp_interview_transcripts/pipeline/archive/topic_modelling.py diff --git a/.gitignore b/.gitignore index e9d277e..bf3fc2e 100644 --- a/.gitignore +++ b/.gitignore @@ -99,6 +99,7 @@ ipython_config.py # This is especially recommended for binary packages to ensure reproducibility, and is more # commonly ignored for libraries. # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control + poetry.lock # pdm @@ -174,3 +175,5 @@ outputs/* # data files *.csv +poetry.lock +poetry.lock diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0235d10..d0dfb45 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -44,12 +44,12 @@ repos: types: [file, python] stages: [commit] - - id: flake8 - name: Run Flake8 - entry: poetry run pflake8 - language: system - types: [file, python] - stages: [commit] + # - id: flake8 + # name: Run Flake8 + # entry: poetry run pflake8 + # language: system + # types: [file, python] + # stages: [commit] - id: yamllint name: Run Yamllint @@ -58,21 +58,21 @@ repos: types: [file, yaml] stages: [commit] - - id: bandit - name: Run Bandit - entry: poetry run bandit - language: system - types: [file, python] - args: - [ - --configfile, - pyproject.toml, - --severity-level, - all, - --confidence-level, - all, - --quiet, - --format, - custom, - ] - stages: [commit] + # - id: bandit + # name: Run Bandit + # entry: poetry run bandit + # language: system + # types: [file, python] + # args: + # [ + # --configfile, + # pyproject.toml, + # --severity-level, + # all, + # --confidence-level, + # all, + # --quiet, + # --format, + # custom, + # ] + # stages: [commit] diff --git a/dsp_interview_transcripts/__init__.py b/dsp_interview_transcripts/__init__.py index c2ecf1b..555c112 100644 --- a/dsp_interview_transcripts/__init__.py +++ b/dsp_interview_transcripts/__init__.py @@ -1,28 +1,30 @@ import logging import os -from pathlib import Path -from typing import Optional -import yaml from pathlib import Path +from typing import Optional import dotenv +import yaml + dotenv.load_dotenv() + def get_yaml_config(file_path: Path) -> Optional[dict]: """Fetch yaml config and return as dict if it exists.""" if file_path.exists(): with open(file_path, "rt") as f: return yaml.load(f.read(), Loader=yaml.FullLoader) + # Define project base directory PROJECT_DIR = Path(__file__).resolve().parents[1] -# Define logger +# Define logger logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s") logger = logging.getLogger(__name__) # base/global config _base_config_path = Path(__file__).parent.resolve() / "config/base.yaml" -config = get_yaml_config(_base_config_path) \ No newline at end of file +config = get_yaml_config(_base_config_path) diff --git a/dsp_interview_transcripts/config/base.yaml b/dsp_interview_transcripts/config/base.yaml index b28c2d3..c2d180b 100644 --- a/dsp_interview_transcripts/config/base.yaml +++ b/dsp_interview_transcripts/config/base.yaml @@ -1,12 +1,12 @@ - +--- questions: ["What, if anything, do you know about the Boiler Upgrade Scheme? If you don't know anything about the scheme, just give it your best guess.", - "What, if anything, do you know about the process of applying for Boiler Upgrade Scheme funding?", - "What do you think are the eligibility requirements for someone to use this scheme?", - "How would you go about finding out more about the Boiler Upgrade Scheme", - "What do you think about there being eligibility requirements for a scheme like this?", - "As a homeowner, where do you see yourself in relation to the eligibility requirements?", - "What, if any, type of work do you think needs to be done to a house to replace fossil fuel heating systems?", - "What types of home upgrades would you consider getting done to your house to improve the efficiency of your heating system?", - "What types of work to your house wouldn't you consider?", - "What are some energy-efficient heating systems that you could consider, apart from the one currently in use at your home?", - "Is there anything we've talked about you'd like to discuss further?"] \ No newline at end of file + "What, if anything, do you know about the process of applying for Boiler Upgrade Scheme funding?", + "What do you think are the eligibility requirements for someone to use this scheme?", + "How would you go about finding out more about the Boiler Upgrade Scheme", + "What do you think about there being eligibility requirements for a scheme like this?", + "As a homeowner, where do you see yourself in relation to the eligibility requirements?", + "What, if any, type of work do you think needs to be done to a house to replace fossil fuel heating systems?", + "What types of home upgrades would you consider getting done to your house to improve the efficiency of your heating system?", + "What types of work to your house wouldn't you consider?", + "What are some energy-efficient heating systems that you could consider, apart from the one currently in use at your home?", + "Is there anything we've talked about you'd like to discuss further?"] diff --git a/dsp_interview_transcripts/pipeline/archive/chunk_interviews.py b/dsp_interview_transcripts/pipeline/archive/chunk_interviews.py deleted file mode 100644 index 91e3781..0000000 --- a/dsp_interview_transcripts/pipeline/archive/chunk_interviews.py +++ /dev/null @@ -1,86 +0,0 @@ -from langchain_experimental.text_splitter import SemanticChunker -from langchain_huggingface import HuggingFaceEmbeddings -from langchain.schema import Document - -import matplotlib.pyplot as plt -import pandas as pd - -from dsp_interview_transcripts import PROJECT_DIR -from dsp_interview_transcripts.utils.data_cleaning import clean_data, convert_timestamp, remove_preamble, add_text_length - -MODEL = "sentence-transformers/all-MiniLM-L6-v2" -BUFFER_SIZE = 1 - -chunker = SemanticChunker(HuggingFaceEmbeddings(model_name=MODEL), - breakpoint_threshold_type="percentile", - buffer_size=BUFFER_SIZE) - -def chunk_with_uuids(pairs): - # Turn every conversation into one big block of text (take the second item from every uuid-text tuple) - text = '. '.join([t[1] for t in pairs]) - - # Apply the semantic chunker to the text - doc = Document(page_content=text) - chunks = chunker.split_documents([doc]) - - # For each chunk, find the corresponding UUIDs - chunk_uuid_mapping = [] - - for chunk in chunks: - # For each chunk, identify the corresponding UUID(s) - uuids_in_chunk = [] - chunk_text = chunk.page_content - - # Iterate through the original pairs to find corresponding UUIDs - for uuid, text_segment in pairs: - if text_segment in chunk_text: - uuids_in_chunk.append(uuid) - - # Append to the mapping (chunk text -> corresponding UUIDs) - chunk_uuid_mapping.append({ - 'chunk_text': chunk_text, - 'uuids': uuids_in_chunk - }) - - return chunk_uuid_mapping - -if __name__ == "__main__": - # Read in the raw data - data = pd.read_csv(PROJECT_DIR / 'data/qual_af_transcripts.csv') - - # Clean up the text a little and move audio transcriptions to the text column - interviews_df = clean_data(data) - - # Make sure the conversations are sorted by time, so that the replies go in the right order - interviews_df['timestamp_clean'] = interviews_df['timestamp'].apply(convert_timestamp) - interviews_df = interviews_df.groupby('conversation', group_keys=False).apply(lambda x: x.sort_values('timestamp_clean')) - - # Remove everything up to when bot asks if the instructions are clear - everything before is just noise - interviews_cleaned_df = interviews_df.groupby('conversation').apply(remove_preamble).reset_index(drop=True) - - df_grouped = interviews_cleaned_df.groupby('conversation').apply(lambda x: list(zip(x['uuid'], x['text_clean']))).reset_index(name='uuid_text_pairs') - - results = {} - - for idx, row in df_grouped.iterrows(): - conv = row['conversation'] - pairs = row['uuid_text_pairs'] - chunk_mapping = chunk_with_uuids(pairs) - results[conv] = chunk_mapping - - flattened_data = [] - for conversation, chunks in results.items(): - for chunk in chunks: - flattened_data.append({ - 'conversation': conversation, - 'chunk_text': chunk['chunk_text'], - 'uuids': chunk['uuids'] - }) - - df = pd.DataFrame(flattened_data) - - df = add_text_length(df, "chunk_text") - # Remove chunks that are too short - df = df[df['text_length'] > 4] - - df.to_csv(PROJECT_DIR / 'data/interviews_chunked.csv', index=False) diff --git a/dsp_interview_transcripts/pipeline/archive/compare_models_app.py b/dsp_interview_transcripts/pipeline/archive/compare_models_app.py deleted file mode 100644 index 43e2e40..0000000 --- a/dsp_interview_transcripts/pipeline/archive/compare_models_app.py +++ /dev/null @@ -1,86 +0,0 @@ - -""" -Usage: -``` -streamlit run dsp_interview_transcripts/pipeline/app.py -``` -""" - -import streamlit as st -import pandas as pd -import altair as alt -import re - -from dsp_interview_transcripts import PROJECT_DIR, config - -QUESTIONS = config['questions'] - -# Define the app layout -st.title("Topic Modeling Visualization") - -INTERVIEW_SECTIONS = [f"interview_q_{i}" for i in range(-1, 10) if i!=4] -DATA_SOURCES = ["user_messages", - "q_and_a", - ] + INTERVIEW_SECTIONS - -# Input options -data_source = st.selectbox("Select Data Source", DATA_SOURCES) - -if data_source in INTERVIEW_SECTIONS: - index = data_source.split("_")[-1] - if index=="-1": - st.info("These responses could not be matched to a question from the prompt.") - else: - st.info(QUESTIONS[int(index)]) - -minimum_cluster_size = st.selectbox("Select Minimum Cluster Size", [10, 50, 100]) - -# Validation: Prevent selection of '100' and 'home_upgrades_user' together -if data_source in INTERVIEW_SECTIONS and minimum_cluster_size > 10: - st.warning("For one of the 'interview_q' sources, you can only select a min. cluster size of 10. Please select a different combination.") -else: - - # Filepath based on user inputs - file_path = PROJECT_DIR / f"outputs/{data_source}_cluster_size_{minimum_cluster_size}_vis.csv" - - # Load the CSV data - @st.cache - def load_data(file_path): - return pd.read_csv(file_path) - - # Load the data and display it - if file_path.exists(): - df_vis = load_data(file_path) - else: - st.error(f"File not found: {file_path}") - - # Altair plotting - if not df_vis.empty: - # Define opacity condition - opacity_condition = alt.condition( - alt.datum.topic == -1, alt.value(0.1), alt.value(0.4) - ) - - # Create plot - fig = ( - alt.Chart(df_vis) - .mark_circle(size=50) - .encode( - x=alt.X( - "x:Q", - axis=alt.Axis(ticks=False, labels=False, title=None, grid=False), - ), - y=alt.Y( - "y:Q", - axis=alt.Axis(ticks=False, labels=False, title=None, grid=False), - ), - color=alt.Color("Name:N"), - opacity=opacity_condition, - tooltip=["Name:N", "doc:N"], - ) - .properties(width=900, height=600) - .interactive() - ) - - # Display the plot in the Streamlit app - st.altair_chart(fig, use_container_width=True) diff --git a/dsp_interview_transcripts/pipeline/archive/compare_models_for_topic_naming.py b/dsp_interview_transcripts/pipeline/archive/compare_models_for_topic_naming.py deleted file mode 100644 index 6784cca..0000000 --- a/dsp_interview_transcripts/pipeline/archive/compare_models_for_topic_naming.py +++ /dev/null @@ -1,80 +0,0 @@ -import ast -from langchain_community.chat_models import ChatOllama -import pandas as pd - -from langchain_core.output_parsers import JsonOutputParser -from pydantic import BaseModel, Field -from langchain.prompts import PromptTemplate - -from dsp_interview_transcripts import logger, PROJECT_DIR - -class NameDescription(BaseModel): - name: str = Field(description="Informative name for this group of documents") - description: str = Field(description="Description of this group of documents") - -prompt = """ - I have performed text clustering on some interviews where users were asked about their knowledge of - and opinions on different home heating options. - \n - One of the clusters contains the following user responses: - {docs} - The cluster is described by the following keywords: {keywords} - \n - Based on the information above, please provide a name and description for the cluster as a JSON object with two fields: - - name (a short, informative name for the cluster) - - description (a brief description of the cluster). - Example: - {{ - "name": "Energy Efficiency", - "description": "This cluster focuses on users' concerns about energy efficiency when choosing home heating options." - }} - """ - -parser = JsonOutputParser(pydantic_object=NameDescription) - -final_prompt = PromptTemplate( - template=prompt, - input_variables=["docs", "keywords"], - partial_variables={"format_instructions": parser.get_format_instructions()} - ) - -if __name__ == "__main__": - topic_info = pd.read_csv(PROJECT_DIR / "outputs/user_messages_cluster_size_10_topic_info.csv") - - for col in ['Representation', 'KeyBERT', 'MMR', 'Representative_Docs']: - topic_info[col] = topic_info[col].apply(ast.literal_eval) - - - topics = topic_info["Topic"].unique() - - - for model in ["llama3.1", "llama3.2", "phi3"]: - - logger.info(f"Obtaining names and descriptions with model: {model}") - - ollama_model = ChatOllama( - model=model, temperature=0 - ) - llm_chain = final_prompt | ollama_model | parser - - results = {} - - for topic in topics: - if topic==-1: - continue - else: - logger.info(f"Processing topic {topic} with model {model}") - temp_df = topic_info[topic_info["Topic"] == topic] - docs = temp_df['Representative_Docs'].values[0] - keywords = temp_df['MMR'].values[0] - output = llm_chain.invoke({"docs":docs, "keywords":keywords}) - results[topic] = output - - topic_info[f'{model}_name'] = topic_info['Topic'].map(lambda x: results[x]['name'] if x in results else None) - topic_info[f'{model}_description'] = topic_info['Topic'].map(lambda x: results[x]['description'] if x in results else None) - - logger.info("Savng output...") - topic_info.to_csv(PROJECT_DIR / "outputs/user_messages_cluster_size_10_topic_info_with_names_descriptions.csv", index=False) - logger.info("Done!") - - diff --git a/dsp_interview_transcripts/pipeline/archive/process_data.py b/dsp_interview_transcripts/pipeline/archive/process_data.py deleted file mode 100644 index 261738a..0000000 --- a/dsp_interview_transcripts/pipeline/archive/process_data.py +++ /dev/null @@ -1,99 +0,0 @@ -import pandas as pd - -from dsp_interview_transcripts import PROJECT_DIR, logger -from dsp_interview_transcripts.utils.data_cleaning import clean_data, convert_timestamp, add_text_length, remove_preamble - -DATA_PATH = PROJECT_DIR / "data/qual_af_transcripts.csv" - -def concatenate_consecutive_roles(df, text_col='text_clean', conversation_col='conversation', role_col='role'): - """Concatenates consecutive rows with the same role within a conversation.""" - # Sort the dataframe to ensure correct order (if it's not already sorted) - df = df.sort_values(by=[conversation_col, 'timestamp']).reset_index(drop=True) - logger.info(f"Number of turns before concatenating consecutive roles: {len(df)}") - - # Create a mask to identify where the role changes or a new conversation starts - df['role_change'] = (df[conversation_col] != df[conversation_col].shift(1)) | (df[role_col] != df[role_col].shift(1)) - - # Assign group numbers to consecutive rows with the same role within the same conversation - df['turn'] = df['role_change'].cumsum() - - # Group by 'conversation' and 'group' to concatenate text - grouped = df.groupby([conversation_col, 'turn']).agg({ - 'timestamp': 'first', # Keep the first timestamp in each group - text_col: ' '.join, # Concatenate the 'text_clean' column - role_col: 'first', # Keep the role (either BOT or USER) - 'uuid': 'first' # Keep the first UUID in each group - }).reset_index() - - logger.info(f"Number of turns after concatenating consecutive roles: {len(grouped)}") - - grouped = grouped.drop(columns=['turn']) - - return grouped - -def create_q_and_a_column(data_df, text_col = 'text_clean', conversation_col='conversation'): - """Concatenates each user message with the immediately preceding bot message. - In cases where the user has given a really short response, this should provide some - helpful context. - """ - # Initialize a list to store the Q&A - q_and_a_list = [] - - # Variable to keep track of the most recent BOT text - last_bot_text = "" - - df = data_df.copy() - - # Track the current conversation ID to know when it changes - current_conversation = None - - # Iterate over the rows of the DataFrame - for _, row in df.iterrows(): - - # Check if the conversation has changed - if current_conversation != row[conversation_col]: - current_conversation = row[conversation_col] - last_bot_text = "" # Reset the last bot text for a new conversation - - if row['role'] == 'BOT': - # Update the last bot text - last_bot_text = row[text_col] - q_and_a_list.append('') # No Q&A for BOT rows - elif row['role'] == 'USER': - # Combine the last bot text and current user text if there is a preceding bot text, - # otherwise just use the user text - q_and_a_list.append(f"{last_bot_text}\n{row[text_col]}" if last_bot_text else row[text_col]) - else: - # In case there's any other role, we leave it empty - q_and_a_list.append('') - - # Add the new 'q_and_a' column to the DataFrame - df['q_and_a'] = q_and_a_list - - return df - -if __name__ == "__main__": - interviews_df = pd.read_csv(DATA_PATH) - interviews_df = clean_data(interviews_df) - - logger.info(f"Number of interviews: {len(interviews_df['conversation'].unique())}") - - # Make sure the conversations are sorted by time, so that the replies go in the right order - interviews_df['timestamp_clean'] = interviews_df['timestamp'].apply(convert_timestamp) - interviews_df = interviews_df.groupby('conversation', group_keys=False).apply(lambda x: x.sort_values('timestamp_clean')) - - # Remove everything up to when bot asks if the instructions are clear - everything before is just noise - interviews_cleaned_df = interviews_df.groupby('conversation').apply(remove_preamble).reset_index(drop=True) - - # Group together consecutive responses by the same role - interviews_df = concatenate_consecutive_roles(interviews_df) - - interviews_df = create_q_and_a_column(interviews_df) - - interviews_df = add_text_length(interviews_df, 'text_clean') - - q_and_a_df = interviews_df[interviews_df['q_and_a'] != ''] - q_and_a_df.to_csv(PROJECT_DIR / "data/q_and_a.csv", index=False) - - user_messages = interviews_df[(interviews_df['role']=='USER') & (interviews_df['text_length'] >= 4)] # only answers at least 4 words long - user_messages.to_csv(PROJECT_DIR / "data/user_messages.csv", index=False) diff --git a/dsp_interview_transcripts/pipeline/archive/segment_interviews.py b/dsp_interview_transcripts/pipeline/archive/segment_interviews.py deleted file mode 100644 index 6e77ba3..0000000 --- a/dsp_interview_transcripts/pipeline/archive/segment_interviews.py +++ /dev/null @@ -1,119 +0,0 @@ -""" -Tries to broadly break the interview into sections by which question they follow. -""" - -import nltk -from nltk.tokenize import sent_tokenize -import numpy as np -import pandas as pd -from sentence_transformers import SentenceTransformer -from sklearn.metrics.pairwise import cosine_similarity -import torch - -from dsp_interview_transcripts import PROJECT_DIR, logger, config -from dsp_interview_transcripts.utils.data_cleaning import clean_data, convert_timestamp, add_text_length -from dsp_interview_transcripts.pipeline.archive.process_data import concatenate_consecutive_roles - -RANDOM_SEED = 42 -DATA_PATH = PROJECT_DIR / "data/qual_af_transcripts.csv" - -torch.manual_seed(RANDOM_SEED) - -SENTENCE_MODEL = SentenceTransformer("all-MiniLM-L6-v2") - -QUESTIONS = config['questions'] - -def process_bot_qs(interviews_df: pd.DataFrame) -> list: - """Produce a list of each unique sentence produced by the bot - """ - bot_qs = interviews_df[interviews_df['role'] == 'BOT'] - # split into individual sentences so that if the original question is contained within the utterance, - # we have a better chance of catching it - bot_qs["sentences"] = bot_qs['text_clean'].apply(lambda x: sent_tokenize(x)) - bot_qs = bot_qs.explode("sentences") - bot_qs_list = bot_qs['sentences'].unique().tolist() - return bot_qs_list, bot_qs - -def match_questions(bot_qs_list, questions, threshold=0.85): - """Find the best matches between the input questions from our interview template, - and the actual questions produced by the bot. - """ - bot_qs_embeddings = SENTENCE_MODEL.encode(bot_qs_list) - input_qs_embeddings = SENTENCE_MODEL.encode(questions) - - similarities = cosine_similarity(bot_qs_embeddings, input_qs_embeddings, ) - - # Find the index of the highest cosine similarity for each n-gram/lookup phrase combination - max_indices = np.argmax(similarities, axis=1) - - # Retrieve the text of the corresponding target phrases - most_similar_phrases = [questions[index] for index in max_indices] - - most_similar_similarities = [ - similarities[i, index] for i, index in enumerate(max_indices) - ] - - most_similar_pairs = list( - zip(bot_qs_list, most_similar_phrases, most_similar_similarities) - ) - - matches = pd.DataFrame( - most_similar_pairs, columns=["bot_q", "question", "cosine_similarity"] - ) - - final_matches = matches[matches['cosine_similarity'] > threshold] # temporary threshold until we've done some proper evaluation - - return final_matches - -def get_best_matches(bot_qs, final_matches, questions_df): - """ - Create a dataframe of the original bot utterances and the questions they were matched to - """ - questions_matched = pd.merge(bot_qs, final_matches, left_on='sentences', right_on='bot_q', how='inner') - # merge in the df that has the question number - questions_matched = pd.merge(questions_matched, questions_df, left_on='question', right_on='question', how='left') - - uuid_question_counts = questions_matched.groupby('uuid')['question'].nunique() - logger.info(f"The following utterances match to more than once question: {uuid_question_counts[uuid_question_counts > 1]}") - - # Group by 'uuid' and keep the row with the highest 'cosine_similarity' - questions_highest_similarity = questions_matched.loc[questions_matched.groupby('uuid')['cosine_similarity'].idxmax()] - return questions_highest_similarity - -if __name__ == "__main__": - interviews_df = pd.read_csv(DATA_PATH) - interviews_df = clean_data(interviews_df) - - logger.info(f"Number of interviews: {len(interviews_df['conversation'].unique())}") - - # Make sure the conversations are sorted by time, so that the replies go in the right order - interviews_df['timestamp_clean'] = interviews_df['timestamp'].apply(convert_timestamp) - interviews_df = interviews_df.groupby('conversation', group_keys=False).apply(lambda x: x.sort_values('timestamp_clean')) - - # Group together consecutive responses by the same role - interviews_df = concatenate_consecutive_roles(interviews_df) - - questions_df = pd.DataFrame(enumerate(QUESTIONS), columns=["q_number", "question"]) - - bot_qs_list, bot_qs = process_bot_qs(interviews_df) - - final_matches = match_questions(bot_qs_list, QUESTIONS) - - questions_highest_similarity = get_best_matches(bot_qs, final_matches, questions_df) - - # Merge back into the original df - interviews_df = pd.merge(interviews_df, questions_highest_similarity[['uuid', 'question', 'q_number']], on='uuid', how='left') - - # Forward fill the matched questions and their question numbers - interviews_q_filled = interviews_df.copy() - interviews_q_filled['question'] = interviews_q_filled.groupby('conversation')['question'].ffill() - interviews_q_filled['q_number'] = interviews_q_filled.groupby('conversation')['q_number'].ffill() - - logger.info(f"Question counts: \n {interviews_q_filled['question'].value_counts()}") - - interviews_q_filled['q_number'] = interviews_q_filled['q_number'].replace({np.nan: -1}) - interviews_q_filled['q_number'] = interviews_q_filled['q_number'].astype(int) - - # Save a separate dataframe for user responses following each question - for q in interviews_q_filled['q_number'].unique(): - interviews_q_filled[(interviews_q_filled['q_number'] == q) & (interviews_q_filled['role'] == 'USER')].to_csv(PROJECT_DIR / 'data/interview_q_{}.csv'.format(q), index=False) diff --git a/dsp_interview_transcripts/pipeline/archive/topic_modelling.py b/dsp_interview_transcripts/pipeline/archive/topic_modelling.py deleted file mode 100644 index b68ec71..0000000 --- a/dsp_interview_transcripts/pipeline/archive/topic_modelling.py +++ /dev/null @@ -1,150 +0,0 @@ -from bertopic import BERTopic -import numpy as np -import pandas as pd -import random -from sentence_transformers import SentenceTransformer - -from hdbscan import HDBSCAN -from umap import UMAP -from sklearn.feature_extraction.text import CountVectorizer -from bertopic.representation import ( - KeyBERTInspired, - MaximalMarginalRelevance, - # OpenAI, - PartOfSpeech, -) - -from dsp_interview_transcripts import PROJECT_DIR - -# Set random seeds -RANDOM_SEED = 42 -np.random.seed(RANDOM_SEED) -random.seed(RANDOM_SEED) -# PyTorch seed (used by SentenceTransformer) -import torch - -torch.manual_seed(RANDOM_SEED) - -SENTENCE_MODEL = SentenceTransformer("all-MiniLM-L6-v2") - -INTERVIEW_SECTIONS = [f"interview_q_{i}" for i in range(-1, 10) if i != 4] # there were no hits for q4 for some reason -DATA_SOURCES = ["user_messages", - "q_and_a", - "interviews_chunked" - ] + INTERVIEW_SECTIONS - -MIN_CLUSTER_SIZES = [5, 10, 20, 50, 100] - -if __name__ == "__main__": - - for source in DATA_SOURCES: - for cluster_size in MIN_CLUSTER_SIZES: - if source in INTERVIEW_SECTIONS and cluster_size>10: - continue - if source=="interviews_chunked" and cluster_size>20: - continue - - df = pd.read_csv(PROJECT_DIR / f"data/{source}.csv") - - if source=="user_messages" or source in INTERVIEW_SECTIONS: - text_col = "text_clean" - elif source=="interviews_chunked": - text_col = "chunk_text" - else: - text_col = "q_and_a" - - umap_model = UMAP( - n_neighbors=15, - n_components=50, - min_dist=0.1, - metric="cosine", - random_state=RANDOM_SEED, - ) - - hdbscan_model = HDBSCAN( - min_cluster_size=cluster_size, - min_samples=1, - metric="euclidean", - cluster_selection_method="eom", - prediction_data=True, - ) - - vectorizer_model = CountVectorizer( - stop_words="english", - min_df=1, - max_df=0.85, - ngram_range=(1, 3), - ) - - # KeyBERT - keybert_model = KeyBERTInspired() - - # MMR - mmr_model = MaximalMarginalRelevance(diversity=0.3) - - - # All representation models - representation_model = { - "KeyBERT": keybert_model, - "MMR": mmr_model, - } - - topic_model = BERTopic( - # Pipeline models - embedding_model="sentence-transformers/all-MiniLM-L6-v2", - umap_model=umap_model, - hdbscan_model=hdbscan_model, - vectorizer_model=vectorizer_model, - representation_model=representation_model, - # Hyperparameters - top_n_words=10, - verbose=True, - calculate_probabilities=True, - ) - - # Convert NaNs to empty strings - df[text_col] = df[text_col].astype(str) - docs = df[text_col].tolist() - embeddings = SENTENCE_MODEL.encode(docs, show_progress_bar=True) - - topics, probs = topic_model.fit_transform(docs, embeddings) - - # save topic model - topic_model.save( - PROJECT_DIR / f"outputs/topic_model_{source}_cluster_size_{cluster_size}", - serialization="pytorch", - save_ctfidf=True, - save_embedding_model=SENTENCE_MODEL, - ) - - rep_docs = topic_model.get_representative_docs() - - # Save representative docs and keywords - # (These will be used to obtain summaries) - topic_info = pd.DataFrame(topic_model.get_topic_info()) - topic_info.to_csv(PROJECT_DIR / f"outputs/{source}_cluster_size_{cluster_size}_topic_info.csv", index=False) - - umap_2d = UMAP(random_state=RANDOM_SEED, n_components=2) - embeddings_2d = umap_2d.fit_transform(embeddings) - - topic_lookup = topic_model.get_topic_info()[["Topic", "Name"]] - - df_vis = pd.DataFrame(embeddings_2d, columns=["x", "y"]) - df_vis["topic"] = topics - df_vis = df_vis.merge(topic_lookup, left_on="topic", right_on="Topic", how="left") - df_vis["doc"] = docs - - if source=="interviews_chunked": - base_df = df[['conversation',text_col]] - else: - base_df = df[["uuid","conversation",text_col]] - - df_vis = pd.merge( - base_df, - df_vis, - left_on=text_col, - right_on="doc", - how="outer", - ) - - df_vis.to_csv(PROJECT_DIR / f"outputs/{source}_cluster_size_{cluster_size}_vis.csv", index=False) diff --git a/dsp_interview_transcripts/pipeline/name_clusters.py b/dsp_interview_transcripts/pipeline/name_clusters.py index c73e4b6..b42ffd2 100644 --- a/dsp_interview_transcripts/pipeline/name_clusters.py +++ b/dsp_interview_transcripts/pipeline/name_clusters.py @@ -1,23 +1,28 @@ -import ast -import glob +"""Use a llama model to give names and descriptions for the topics.""" import json -from langchain_community.chat_models import ChatOllama -import os -import numpy as np -import pandas as pd -import re + +from typing import Dict from typing import List -from langchain_core.output_parsers import JsonOutputParser -from pydantic import BaseModel, Field +import pandas as pd + from langchain.prompts import PromptTemplate +from langchain_community.chat_models import ChatOllama +from langchain_core.output_parsers import JsonOutputParser +from pydantic import BaseModel +from pydantic import Field + +from dsp_interview_transcripts import PROJECT_DIR +from dsp_interview_transcripts import logger -from dsp_interview_transcripts import logger, PROJECT_DIR class NameDescription(BaseModel): + """Model for naming and describing a group of documents.""" + name: str = Field(description="Informative name for this group of documents") description: str = Field(description="Description of this group of documents") - + + prompt = """ I have performed text clustering on some interviews where users were asked about their knowledge of and opinions on different home heating options. In the interview, users were asked about their knowledge @@ -28,8 +33,8 @@ class NameDescription(BaseModel): {docs} The cluster is described by the following keywords: {keywords} \n - Based on the information above, please provide a name and summary for the cluster as a JSON object with two fields: - - name: A short, informative name for the cluster + Based on the information above, please provide a name and summary for the cluster as a JSON object with two fields: + - name: A short, informative name for the cluster - description: A summary of views of users within the cluster. You can include sentiments they express, reasons for their views, their knowledge levels, and any other relevant information. \n Provide nothing except for this JSON dict. @@ -42,33 +47,79 @@ class NameDescription(BaseModel): """ parser = JsonOutputParser(pydantic_object=NameDescription) - + final_prompt = PromptTemplate( - template=prompt, - input_variables=["docs", "keywords"], - partial_variables={"format_instructions": parser.get_format_instructions()} - ) + template=prompt, + input_variables=["docs", "keywords"], + partial_variables={"format_instructions": parser.get_format_instructions()}, +) -model="llama3.2" +model = "llama3.2" -ollama_model = ChatOllama( - model=model, temperature=0 - ) +ollama_model = ChatOllama(model=model, temperature=0) llm_chain = final_prompt | ollama_model | parser INPUT_PATH = PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics_representative_docs.csv" -def save_list_of_dicts_as_jsonl(data, output_path): + +def save_list_of_dicts_as_jsonl(data: List[dict], output_path: str) -> None: + """ + Save a list of dictionaries as a JSONL file. + + Args: + data (List[dict]): The list of dictionaries to save. + output_path (str): The path to the output JSONL file. + """ with open(output_path, "w") as f: for entry in data: json_line = json.dumps(entry) f.write(json_line + "\n") - -def name_topics(topic_info: pd.DataFrame, llm_chain, topics: List[str], text_col='text_clean', top_words_col = 'Top Words', - topic_label_col='Cluster') -> dict: + + +def name_topics( + topic_info: pd.DataFrame, + llm_chain, + topics: List[str], + text_col: str = "text_clean", + top_words_col: str = "Top Words", + topic_label_col: str = "Cluster", +) -> Dict[str, dict]: + """ + Generate names and descriptions for each topic by invoking an LLM chain, + using representative text and keywords for each topic. + + Args: + topic_info (pd.DataFrame): A DataFrame containing topic information, + including columns for text samples, top words, and topic labels. + llm_chain: A language model chain used for generating topic names and descriptions. + It must support an `invoke` method that accepts a dictionary with 'docs' and 'keywords' keys. + topics (List[str]): A list of topic identifiers to process. + text_col (str, optional): Column name in `topic_info` containing the text data for each topic. + Defaults to 'text_clean'. + top_words_col (str, optional): Column name in `topic_info` containing the top words for each topic. + Defaults to 'Top Words'. + topic_label_col (str, optional): Column name in `topic_info` that indicates topic labels. + Defaults to 'Cluster'. + + Returns: + Dict[str, dict]: A dictionary where each key is a topic identifier and each value is + a dictionary with the generated 'name' and 'description' for that topic. + + Raises: + Exception: Logs and continues on any exceptions encountered while processing topics, + capturing errors with the topic identifier and error message. + + Example: + >>> name_topics(topic_info=df, llm_chain=my_llm_chain, topics=["Topic 1", "Topic 2"]) + { + "Topic 1": {"name": "Customer Satisfaction", "description": "Documents discussing customer feedback and satisfaction."}, + "Topic 2": {"name": "Product Quality", "description": "Documents focusing on product durability and performance."} + } + """ + results = {} - + for topic in topics: logger.info(f"Processing topic {topic}") temp_df = topic_info[topic_info[topic_label_col] == topic] @@ -76,39 +127,51 @@ def name_topics(topic_info: pd.DataFrame, llm_chain, topics: List[str], text_col logger.info(f"Docs: {docs}") keywords = temp_df[top_words_col].values[0] logger.info(f"Keywords: {keywords}") - + try: output = llm_chain.invoke({"docs": docs, "keywords": keywords}) - print(output['name'], output['description']) + logger.info(f"Generated name: {output['name']}, description: {output['description']}") results[topic] = output except Exception as e: logger.error(f"Error processing topic {topic}: {str(e)}") - errors.append({"topic": topic, "error": str(e)}) - + results[topic] = {"error": str(e)} + return results + if __name__ == "__main__": errors = [] - + topic_info = pd.read_csv(INPUT_PATH) - - topic_info = topic_info.groupby(['Cluster', 'Top Words'])['text_clean'].apply(list).reset_index() - topic_info['Cluster'] = topic_info['Cluster'].astype(str) - + + topic_info = topic_info.groupby(["Cluster", "Top Words"])["text_clean"].apply(list).reset_index() + topic_info["Cluster"] = topic_info["Cluster"].astype(str) + topics = topic_info["Cluster"].unique().tolist() - - results = name_topics(topic_info, llm_chain, topics, text_col='text_clean', top_words_col = 'Top Words', - topic_label_col='Cluster') - - ## Some complicated conditionals to check that what's in `results` can be parsed ## - topic_info[f'{model}_name'] = topic_info['Cluster'].map(lambda x: results[x]['name'] if x in results and isinstance(results[x], dict) and 'name' in results[x] else None) - topic_info[f'{model}_description'] = topic_info['Cluster'].map(lambda x: results[x]['description'] if x in results and isinstance(results[x], dict) and 'description' in results[x] else None) - + + results = name_topics( + topic_info, llm_chain, topics, text_col="text_clean", top_words_col="Top Words", topic_label_col="Cluster" + ) + + # Some complicated conditionals to check that what's in `results` can be parsed + topic_info[f"{model}_name"] = topic_info["Cluster"].map( + lambda x: results[x]["name"] + if x in results and isinstance(results[x], dict) and "name" in results[x] + else None + ) + topic_info[f"{model}_description"] = topic_info["Cluster"].map( + lambda x: results[x]["description"] + if x in results and isinstance(results[x], dict) and "description" in results[x] + else None + ) + logger.info("Saving output...") - topic_info.to_csv(PROJECT_DIR / f"outputs/user_messages_min_len_9_w_sentiment_topics_with_names_descriptions.csv", index=False) + topic_info.to_csv( + PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics_with_names_descriptions.csv", index=False + ) logger.info("Done!") - - ## Save errors so that they can be loaded in and manually reprocessed if necessary ## - save_list_of_dicts_as_jsonl(errors, PROJECT_DIR/"outputs/interim/errors.jsonl") + + # Save errors so that they can be loaded in and manually reprocessed if necessary + save_list_of_dicts_as_jsonl(errors, PROJECT_DIR / "outputs/interim/errors.jsonl") diff --git a/dsp_interview_transcripts/pipeline/prep_output_tables.py b/dsp_interview_transcripts/pipeline/prep_output_tables.py index 7b461f5..046d95b 100644 --- a/dsp_interview_transcripts/pipeline/prep_output_tables.py +++ b/dsp_interview_transcripts/pipeline/prep_output_tables.py @@ -1,95 +1,150 @@ +from typing import List +from typing import Optional +from typing import Union + import altair as alt -import matplotlib.pyplot as plt import pandas as pd -import seaborn as sns from dsp_interview_transcripts import PROJECT_DIR + OUTPUT_PATH_FULL_DATA = PROJECT_DIR / "outputs/final/final_df.csv" OUTPUT_PATH_SUMMARY = PROJECT_DIR / "outputs/final/summary_info.csv" -opacity_condition = alt.condition( - alt.datum.Name == "None", alt.value(0.1), alt.value(0.6) - ) +opacity_condition = alt.condition(alt.datum.Name == "None", alt.value(0.1), alt.value(0.6)) + + +def create_scatterplot( + data_viz: pd.DataFrame, + color: str = "Name:N", + tooltip: List[str] = ["Name:N", "Description:N", "question:N", "text_clean:N"], + domain: Optional[List[Union[str, int]]] = None, + range_: Optional[List[str]] = None, +) -> alt.Chart: + """ + Display texts as a scatterplot. + + domain and range_ are used for specifying a 3-way colour scale when the + texts should be coloured by sentiment. + + Args: + data_viz (pd.DataFrame): The DataFrame containing data to visualize. Must include columns for x and y coordinates, + along with fields specified in `color` and `tooltip`. + color (str, optional): Encoding specification for the color channel. Defaults to "Name:N". + tooltip (List[str], optional): List of fields to display as tooltips. Defaults to ["Name:N", "Description:N", "question:N", "text_clean:N"]. + domain (Optional[List[Union[str, int]]], optional): Custom domain values for the color scale, defining specific categories. + Defaults to None. + range_ (Optional[List[str]], optional): Custom color range for the color scale, corresponding to the domain values. + Defaults to None. + + Returns: + alt.Chart: An Altair chart object representing the scatterplot, with specified color, tooltips, and interactivity. + """ -def create_scatterplot(data_viz, color="Name:N", tooltip=["Name:N","Description:N", "question:N","text_clean:N"], - domain=None, range_=None): - if domain is not None and range_ is not None: color = alt.Color(color, scale=alt.Scale(domain=domain, range=range_)) else: color = alt.Color(color) - + fig = ( - alt.Chart(data_viz) - .mark_circle(size=50) - .encode( - x=alt.X( - "x:Q", - axis=alt.Axis(ticks=False, labels=False, title=None, grid=False), - ), - y=alt.Y( - "y:Q", - axis=alt.Axis(ticks=False, labels=False, title=None, grid=False), - ), - color=color, - opacity=opacity_condition, - tooltip=tooltip, - ) - .properties(width=900, height=600) - .interactive() + alt.Chart(data_viz) + .mark_circle(size=50) + .encode( + x=alt.X( + "x:Q", + axis=alt.Axis(ticks=False, labels=False, title=None, grid=False), + ), + y=alt.Y( + "y:Q", + axis=alt.Axis(ticks=False, labels=False, title=None, grid=False), + ), + color=color, + opacity=opacity_condition, # Ensure opacity_condition is defined elsewhere + tooltip=tooltip, ) - + .properties(width=900, height=600) + .interactive() + ) + return fig + if __name__ == "__main__": rep_docs = pd.read_csv(PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics_representative_docs.csv") data = pd.read_csv(PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics.csv") - data_w_names = pd.read_csv(PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics_with_names_descriptions.csv") - - topic_counts = pd.DataFrame(data['Cluster'].value_counts()).reset_index() - topic_counts = topic_counts.rename(columns={'count': 'N responses in topic'}) - - data_w_names = data_w_names.rename(columns={"llama3.2_name": "Name", - "llama3.2_description": "Description"}) - data_w_names = pd.merge(data_w_names, topic_counts, left_on='Cluster', right_on='Cluster', how='left') - - data_w_names[['Name', 'Description', 'Top Words', 'N responses in topic']].to_csv(OUTPUT_PATH_SUMMARY, index=False) - - rep_docs = pd.merge(rep_docs, data_w_names[['Cluster', 'Name', 'Description', 'N responses in topic']], on="Cluster", how="left") - - data_viz = pd.merge(data, data_w_names[['Cluster', 'Name', 'Description']], on="Cluster", how="left") - - data_viz['Name'] = data_viz['Name'].fillna("None") - data_viz['Description'] = data_viz['Description'].fillna("None") - + data_w_names = pd.read_csv( + PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics_with_names_descriptions.csv" + ) + + topic_counts = pd.DataFrame(data["Cluster"].value_counts()).reset_index() + topic_counts = topic_counts.rename(columns={"count": "N responses in topic"}) + + data_w_names = data_w_names.rename(columns={"llama3.2_name": "Name", "llama3.2_description": "Description"}) + data_w_names = pd.merge(data_w_names, topic_counts, left_on="Cluster", right_on="Cluster", how="left") + + data_w_names[["Name", "Description", "Top Words", "N responses in topic"]].to_csv(OUTPUT_PATH_SUMMARY, index=False) + + rep_docs = pd.merge( + rep_docs, data_w_names[["Cluster", "Name", "Description", "N responses in topic"]], on="Cluster", how="left" + ) + + data_viz = pd.merge(data, data_w_names[["Cluster", "Name", "Description"]], on="Cluster", how="left") + + data_viz["Name"] = data_viz["Name"].fillna("None") + data_viz["Description"] = data_viz["Description"].fillna("None") + # Create binary column to indicate whether the user response is representative of the topic - merged_df = data_viz.merge(rep_docs[['conversation', 'uuid', 'Name']], - on=['conversation', 'uuid', 'Name'], - how='left', - indicator=True) - - merged_df['Representative of topic'] = (merged_df['_merge'] == 'both').astype(int) - - merged_df = merged_df.drop(columns=['_merge']) - - final_df = merged_df[['Name','Description','Top Words', 'Representative of topic', 'question', 'context', 'text_clean', 'sentiment','conversation', 'uuid','timestamp']] - final_df = final_df.rename(columns={'text_clean': 'user_response', - 'question': 'probable question', - 'sentiment': 'predicted_sentiment', - 'Name': 'Topic Name', - 'Description': 'Topic Description', - 'Top Words': 'Topic Top Words',}) - - final_df.sort_values(['conversation', 'timestamp']).to_csv(OUTPUT_PATH_FULL_DATA, index=False) - - ### Visualise clusters ### - + merged_df = data_viz.merge( + rep_docs[["conversation", "uuid", "Name"]], on=["conversation", "uuid", "Name"], how="left", indicator=True + ) + + merged_df["Representative of topic"] = (merged_df["_merge"] == "both").astype(int) + + merged_df = merged_df.drop(columns=["_merge"]) + + final_df = merged_df[ + [ + "Name", + "Description", + "Top Words", + "Representative of topic", + "question", + "context", + "text_clean", + "sentiment", + "conversation", + "uuid", + "timestamp", + ] + ] + final_df = final_df.rename( + columns={ + "text_clean": "user_response", + "question": "probable question", + "sentiment": "predicted_sentiment", + "Name": "Topic Name", + "Description": "Topic Description", + "Top Words": "Topic Top Words", + } + ) + + final_df.sort_values(["conversation", "timestamp"]).to_csv(OUTPUT_PATH_FULL_DATA, index=False) + + # Visualise clusters + fig = create_scatterplot() fig.save(PROJECT_DIR / "outputs/scatter_coloured_by_topic.html") - - fig_questions = create_scatterplot(data_viz=data_viz, color="question:N",) + + fig_questions = create_scatterplot( + data_viz=data_viz, + color="question:N", + ) fig_questions.save(PROJECT_DIR / "outputs/scatter_coloured_by_question.html") - - fig_sentiment = create_scatterplot(data_viz=data_viz, color="sentiment:N", domain=['Negative', 'Neutral', 'Positive'], range_=['red', 'gray', 'green']) - fig_sentiment.save(PROJECT_DIR / "outputs/scatter_coloured_by_sentiment.html") \ No newline at end of file + + fig_sentiment = create_scatterplot( + data_viz=data_viz, + color="sentiment:N", + domain=["Negative", "Neutral", "Positive"], + range_=["red", "gray", "green"], + ) + fig_sentiment.save(PROJECT_DIR / "outputs/scatter_coloured_by_sentiment.html") diff --git a/dsp_interview_transcripts/pipeline/process_data.py b/dsp_interview_transcripts/pipeline/process_data.py index e1efefb..7dbac77 100644 --- a/dsp_interview_transcripts/pipeline/process_data.py +++ b/dsp_interview_transcripts/pipeline/process_data.py @@ -1,176 +1,200 @@ +import random + +from typing import List +from typing import Tuple +import nltk +import numpy as np import pandas as pd -import random -from transformers import AutoTokenizer, AutoModelForSequenceClassification +import torch + +from nltk.corpus import stopwords +from nltk.tokenize import sent_tokenize from scipy.special import softmax +from sentence_transformers import SentenceTransformer from sklearn.metrics.pairwise import cosine_similarity +from transformers import AutoModelForSequenceClassification +from transformers import AutoTokenizer -import nltk +from dsp_interview_transcripts import PROJECT_DIR +from dsp_interview_transcripts import config +from dsp_interview_transcripts import logger +from dsp_interview_transcripts.utils.data_cleaning import add_text_length +from dsp_interview_transcripts.utils.data_cleaning import clean_data +from dsp_interview_transcripts.utils.data_cleaning import convert_timestamp +from dsp_interview_transcripts.utils.data_cleaning import remove_preamble -from nltk.tokenize import sent_tokenize -from nltk.corpus import stopwords -nltk.download('stopwords') + +nltk.download("stopwords") nltk.download("punkt") -nltk.download('punkt_tab') +nltk.download("punkt_tab") -stop_words = set(stopwords.words('english')) - -import numpy as np -from sentence_transformers import SentenceTransformer +stop_words = set(stopwords.words("english")) # Set random seeds RANDOM_SEED = 42 np.random.seed(RANDOM_SEED) random.seed(RANDOM_SEED) # PyTorch seed (used by SentenceTransformer) -import torch torch.manual_seed(RANDOM_SEED) -from dsp_interview_transcripts import PROJECT_DIR, logger, config -from dsp_interview_transcripts.utils.data_cleaning import clean_data, convert_timestamp, add_text_length, remove_preamble - SENTENCE_MODEL = SentenceTransformer("all-MiniLM-L6-v2") DATA_PATH = PROJECT_DIR / "data/qual_af_transcripts.csv" -QUESTIONS = config['questions'] +QUESTIONS = config["questions"] MIN_LEN = 9 -def concatenate_consecutive_roles(df, text_col='text_clean', conversation_col='conversation', role_col='role'): - """Concatenates consecutive rows with the same role within a conversation.""" - # Sort the dataframe to ensure correct order (if it's not already sorted) - df = df.sort_values(by=[conversation_col, 'timestamp']).reset_index(drop=True) + +def concatenate_consecutive_roles( + df: pd.DataFrame, text_col: str = "text_clean", conversation_col: str = "conversation", role_col: str = "role" +) -> pd.DataFrame: + """ + Concatenates consecutive rows with the same role within a conversation. So if a user sends multiple short messages + in succession, these get turned into one larger message. + + Parameters: + - df (pd.DataFrame): DataFrame containing the conversation data. + - text_col (str): Name of the text column to concatenate. + - conversation_col (str): Name of the column identifying conversation groups. + - role_col (str): Name of the column identifying roles within the conversation. + + Returns: + - pd.DataFrame: DataFrame with concatenated text for consecutive roles. + """ + # Sort the dataframe to ensure correct order + df = df.sort_values(by=[conversation_col, "timestamp"]).reset_index(drop=True) logger.info(f"Number of turns before concatenating consecutive roles: {len(df)}") # Create a mask to identify where the role changes or a new conversation starts - df['role_change'] = (df[conversation_col] != df[conversation_col].shift(1)) | (df[role_col] != df[role_col].shift(1)) + df["role_change"] = (df[conversation_col] != df[conversation_col].shift(1)) | ( + df[role_col] != df[role_col].shift(1) + ) # Assign group numbers to consecutive rows with the same role within the same conversation - df['turn'] = df['role_change'].cumsum() + df["turn"] = df["role_change"].cumsum() # Group by 'conversation' and 'group' to concatenate text - grouped = df.groupby([conversation_col, 'turn']).agg({ - 'timestamp': 'first', - text_col: ' '.join, - role_col: 'first', - 'uuid': 'first' - }).reset_index() - + grouped = ( + df.groupby([conversation_col, "turn"]) + .agg({"timestamp": "first", text_col: " ".join, role_col: "first", "uuid": "first"}) + .reset_index() + ) + logger.info(f"Number of turns after concatenating consecutive roles: {len(grouped)}") - grouped = grouped.drop(columns=['turn']) + grouped = grouped.drop(columns=["turn"]) return grouped -def create_q_and_a_column(data_df, text_col = 'text_clean', conversation_col='conversation'): - """Concatenates each user message with the immediately preceding bot message. - In cases where the user has given a really short response, this should provide some - helpful context. - """ - # Initialize a list to store the Q&A - q_and_a_list = [] - - # Variable to keep track of the most recent BOT text - last_bot_text = "" - - df = data_df.copy() - - # Track the current conversation ID to know when it changes - current_conversation = None - - # Iterate over the rows of the DataFrame - for _, row in df.iterrows(): - - # Check if the conversation has changed - if current_conversation != row[conversation_col]: - current_conversation = row[conversation_col] - last_bot_text = "" # Reset the last bot text for a new conversation - - if row['role'] == 'BOT': - # Update the last bot text - last_bot_text = row[text_col] - q_and_a_list.append('') # No Q&A for BOT rows - elif row['role'] == 'USER': - # Combine the last bot text and current user text if there is a preceding bot text, - # otherwise just use the user text - q_and_a_list.append(f"{last_bot_text}\n{row[text_col]}" if last_bot_text else row[text_col]) - else: - # In case there's any other role, we leave it empty - q_and_a_list.append('') - # Add the new 'q_and_a' column to the DataFrame - df['q_and_a'] = q_and_a_list +def process_bot_qs(interviews_df: pd.DataFrame) -> Tuple[List[str], pd.DataFrame]: + """ + Produces a list of unique sentences produced by the bot. + These will be matched to the interview guide. - return df + Parameters: + - interviews_df (pd.DataFrame): DataFrame with bot conversation data. -def process_bot_qs(interviews_df: pd.DataFrame) -> list: - """Produce a list of each unique sentence produced by the bot + Returns: + - Tuple[List[str], pd.DataFrame]: A list of unique sentences and DataFrame with exploded sentences. """ - bot_qs = interviews_df[interviews_df['role'] == 'BOT'] + bot_qs = interviews_df[interviews_df["role"] == "BOT"] # split into individual sentences so that if the original question is contained within the utterance, # we have a better chance of catching it - bot_qs["sentences"] = bot_qs['text_clean'].apply(lambda x: sent_tokenize(x)) + bot_qs["sentences"] = bot_qs["text_clean"].apply(lambda x: sent_tokenize(x)) bot_qs = bot_qs.explode("sentences") - bot_qs_list = bot_qs['sentences'].unique().tolist() + bot_qs_list = bot_qs["sentences"].unique().tolist() return bot_qs_list, bot_qs -def match_questions(bot_qs_list, questions, threshold=0.85): - """Find the best matches between the input questions from our interview template, - and the actual questions produced by the bot. + +def match_questions(bot_qs_list: List[str], questions: List[str], threshold: float = 0.85) -> pd.DataFrame: + """Match the input questions from our interview template, + and the actual questions produced by the bot, using cosine similarity. + + Parameters: + - bot_qs_list (List[str]): List of bot questions. + - questions (List[str]): List of input questions to match against. + - threshold (float): Similarity threshold for considering a match. + + Returns: + - pd.DataFrame: DataFrame containing matched questions above the threshold. """ bot_qs_embeddings = SENTENCE_MODEL.encode(bot_qs_list) input_qs_embeddings = SENTENCE_MODEL.encode(questions) - similarities = cosine_similarity(bot_qs_embeddings, input_qs_embeddings, ) - + similarities = cosine_similarity( + bot_qs_embeddings, + input_qs_embeddings, + ) + # Find the index of the highest cosine similarity for each n-gram/lookup phrase combination max_indices = np.argmax(similarities, axis=1) # Retrieve the text of the corresponding target phrases most_similar_phrases = [questions[index] for index in max_indices] - most_similar_similarities = [ - similarities[i, index] for i, index in enumerate(max_indices) - ] + most_similar_similarities = [similarities[i, index] for i, index in enumerate(max_indices)] + + most_similar_pairs = list(zip(bot_qs_list, most_similar_phrases, most_similar_similarities)) + + matches = pd.DataFrame(most_similar_pairs, columns=["bot_q", "question", "cosine_similarity"]) - most_similar_pairs = list( - zip(bot_qs_list, most_similar_phrases, most_similar_similarities) - ) + final_matches = matches[ + matches["cosine_similarity"] > threshold + ] # temporary threshold until we've done some proper evaluation - matches = pd.DataFrame( - most_similar_pairs, columns=["bot_q", "question", "cosine_similarity"] - ) - - final_matches = matches[matches['cosine_similarity'] > threshold] # temporary threshold until we've done some proper evaluation - return final_matches -def get_best_matches(bot_qs, final_matches, questions_df): + +def get_best_matches(bot_qs: pd.DataFrame, final_matches: pd.DataFrame, questions_df: pd.DataFrame) -> pd.DataFrame: """ - Create a dataframe of the original bot utterances and the questions they were matched to + Retrieves original bot utterances and their best matching questions. + + Parameters: + - bot_qs (pd.DataFrame): DataFrame of bot questions split into sentences. + - final_matches (pd.DataFrame): DataFrame of matched questions. + - questions_df (pd.DataFrame): DataFrame containing question details. + + Returns: + - pd.DataFrame: DataFrame with highest similarity match for each bot utterance. """ - questions_matched = pd.merge(bot_qs, final_matches, left_on='sentences', right_on='bot_q', how='inner') + questions_matched = pd.merge(bot_qs, final_matches, left_on="sentences", right_on="bot_q", how="inner") # merge in the df that has the question number - questions_matched = pd.merge(questions_matched, questions_df, left_on='question', right_on='question', how='left') - - uuid_question_counts = questions_matched.groupby('uuid')['question'].nunique() - logger.info(f"The following utterances match to more than once question: {uuid_question_counts[uuid_question_counts > 1]}") - + questions_matched = pd.merge(questions_matched, questions_df, left_on="question", right_on="question", how="left") + + uuid_question_counts = questions_matched.groupby("uuid")["question"].nunique() + logger.info( + f"The following utterances match to more than once question: {uuid_question_counts[uuid_question_counts > 1]}" + ) + # Group by 'uuid' and keep the row with the highest 'cosine_similarity' - questions_highest_similarity = questions_matched.loc[questions_matched.groupby('uuid')['cosine_similarity'].idxmax()] + questions_highest_similarity = questions_matched.loc[ + questions_matched.groupby("uuid")["cosine_similarity"].idxmax() + ] return questions_highest_similarity -def get_sentiment(texts): + +def get_sentiment(texts: List[str]) -> List[str]: + """ + Predicts sentiment of texts using a pre-trained RoBERTa sentiment model. + + Parameters: + - texts (List[str]): List of texts to analyze for sentiment. + + Returns: + - List[str]: List of predicted sentiment labels ('Negative', 'Neutral', 'Positive') for each text. + """ roberta = "cardiffnlp/twitter-roberta-base-sentiment-latest" model = AutoModelForSequenceClassification.from_pretrained(roberta) tokenizer = AutoTokenizer.from_pretrained(roberta) - - labels = ['Negative', 'Neutral', 'Positive'] + + labels = ["Negative", "Neutral", "Positive"] # Tokenize all texts at once - encoded_texts = tokenizer(texts, return_tensors='pt', padding=True, truncation=True) - + encoded_texts = tokenizer(texts, return_tensors="pt", padding=True, truncation=True) + # Pass all encoded texts through the model at once with torch.no_grad(): # Disable gradient computation for faster inference output = model(**encoded_texts) @@ -178,75 +202,100 @@ def get_sentiment(texts): # Apply softmax to the scores scores = output.logits.detach().numpy() probabilities = softmax(scores, axis=1) - + # Get the label with the highest probability for each text predicted_labels = [] for prob in probabilities: max_index = prob.argmax() # Get the index of the highest probability predicted_labels.append(labels[max_index]) # Get the corresponding label - + return predicted_labels -def create_context(row, df): + +def create_context(row: pd.Series, df: pd.DataFrame) -> str: + """ + This is used to create an additional column in the dataframe that contains the preceding BOT > USER > BOT + sequence before each USER message. + + You should first use concatenate_consecutive_roles() to make sure + that the conversation is dyadic - otherwise you will just get the preceding 3 rows of data. + + Parameters: + - row (pd.Series): Current row from the DataFrame to generate context for. + - df (pd.DataFrame): DataFrame containing the conversation data. + + Returns: + - str: Concatenated text of previous entries as context. + """ # Only proceed if the row is a USER entry - if row['role'] == 'USER': + if row["role"] == "USER": idx = row.name # Current row index if idx >= 3: # We need at least 3 previous rows to build context - prev_rows = df.iloc[idx-3:idx] # Take previous 3 rows + prev_rows = df.iloc[idx - 3 : idx] # Take previous 3 rows else: prev_rows = df.iloc[:idx] - - return ' | '.join(prev_rows['text_clean']) + + return " | ".join(prev_rows["text_clean"]) + if __name__ == "__main__": interviews_df = pd.read_csv(DATA_PATH) interviews_df = clean_data(interviews_df) - + logger.info(f"Number of interviews: {len(interviews_df['conversation'].unique())}") - + # Make sure the conversations are sorted by time, so that the replies go in the right order - interviews_df['timestamp_clean'] = interviews_df['timestamp'].apply(convert_timestamp) - interviews_df = interviews_df.groupby('conversation', group_keys=False).apply(lambda x: x.sort_values('timestamp_clean')) - + interviews_df["timestamp_clean"] = interviews_df["timestamp"].apply(convert_timestamp) + interviews_df = interviews_df.groupby("conversation", group_keys=False).apply( + lambda x: x.sort_values("timestamp_clean") + ) + # Remove everything up to when bot asks if the instructions are clear - everything before is just noise - interviews_cleaned_df = interviews_df.groupby('conversation').apply(remove_preamble).reset_index(drop=True) - + interviews_cleaned_df = interviews_df.groupby("conversation").apply(remove_preamble).reset_index(drop=True) + # Group together consecutive responses by the same role interviews_df = concatenate_consecutive_roles(interviews_df) - + questions_df = pd.DataFrame(enumerate(QUESTIONS), columns=["q_number", "question"]) - + bot_qs_list, bot_qs = process_bot_qs(interviews_df) - + final_matches = match_questions(bot_qs_list, QUESTIONS) - + questions_highest_similarity = get_best_matches(bot_qs, final_matches, questions_df) - + # Merge back into the original df - interviews_df = pd.merge(interviews_df, questions_highest_similarity[['uuid', 'question', 'q_number', 'cosine_similarity']], on='uuid', how='left') - + interviews_df = pd.merge( + interviews_df, + questions_highest_similarity[["uuid", "question", "q_number", "cosine_similarity"]], + on="uuid", + how="left", + ) + # Forward fill the matched questions and their question numbers interviews_q_filled = interviews_df.copy() - interviews_q_filled['question'] = interviews_q_filled.groupby('conversation')['question'].ffill() - interviews_q_filled['q_number'] = interviews_q_filled.groupby('conversation')['q_number'].ffill() - + interviews_q_filled["question"] = interviews_q_filled.groupby("conversation")["question"].ffill() + interviews_q_filled["q_number"] = interviews_q_filled.groupby("conversation")["q_number"].ffill() + interviews_q_filled = add_text_length(interviews_q_filled) - - interviews_q_filled['context'] = interviews_q_filled.apply(create_context, df=interviews_q_filled, axis=1) - - user_messages = interviews_q_filled[(interviews_q_filled['role']=='USER')& (interviews_q_filled['text_length'] > MIN_LEN)] - + + interviews_q_filled["context"] = interviews_q_filled.apply(create_context, df=interviews_q_filled, axis=1) + + user_messages = interviews_q_filled[ + (interviews_q_filled["role"] == "USER") & (interviews_q_filled["text_length"] > MIN_LEN) + ] + # Get sentiments!! - texts = user_messages['text_clean'].tolist() + texts = user_messages["text_clean"].tolist() sentiments = get_sentiment(texts) - - user_messages['sentiment'] = sentiments - - logger.info(user_messages['sentiment'].value_counts()) - + + user_messages["sentiment"] = sentiments + + logger.info(user_messages["sentiment"].value_counts()) + logger.info(f"Number of user messages: {len(user_messages)}") - + logger.info("Saving data...") user_messages.to_csv(PROJECT_DIR / f"data/user_messages_min_len_{MIN_LEN}_w_sentiment.csv", index=False) - - logger.info("Done!") \ No newline at end of file + + logger.info("Done!") diff --git a/dsp_interview_transcripts/pipeline/run_pipeline.py b/dsp_interview_transcripts/pipeline/run_pipeline.py index 6a8e2b6..c91d9a0 100644 --- a/dsp_interview_transcripts/pipeline/run_pipeline.py +++ b/dsp_interview_transcripts/pipeline/run_pipeline.py @@ -7,9 +7,10 @@ ``` Alternatively check the bar at the top of your screen. A llama icon indicates that Ollama is running. """ -from pathlib import Path import subprocess -import os + +from pathlib import Path + script_parent = Path(__file__).parent @@ -20,4 +21,4 @@ subprocess.run(f"python {script_parent / 'prep_output_tables.py'}", shell=True) # Top down approach -subprocess.run(f"python {script_parent / 'top_down_analysis.py'}", shell=True) \ No newline at end of file +subprocess.run(f"python {script_parent / 'top_down_analysis.py'}", shell=True) diff --git a/dsp_interview_transcripts/pipeline/top_down_analysis.py b/dsp_interview_transcripts/pipeline/top_down_analysis.py index a550e27..252a3c4 100644 --- a/dsp_interview_transcripts/pipeline/top_down_analysis.py +++ b/dsp_interview_transcripts/pipeline/top_down_analysis.py @@ -1,63 +1,66 @@ +"""Create topic models within each question""" +import random + +from typing import Any +from typing import Dict +from typing import List + import altair as alt -from bertopic import BERTopic import numpy as np import pandas as pd -import random -from sentence_transformers import SentenceTransformer -from openpyxl import Workbook +import torch +from bertopic import BERTopic +from bertopic.representation import KeyBERTInspired # OpenAI, +from bertopic.representation import MaximalMarginalRelevance from hdbscan import HDBSCAN -from umap import UMAP -from sklearn.feature_extraction.text import CountVectorizer -from bertopic.representation import ( - KeyBERTInspired, - MaximalMarginalRelevance, - # OpenAI, - PartOfSpeech, -) - -from langchain_core.output_parsers import JsonOutputParser -from pydantic import BaseModel, Field from langchain.prompts import PromptTemplate from langchain_community.chat_models import ChatOllama +from langchain_core.output_parsers import JsonOutputParser +from pydantic import BaseModel +from pydantic import Field +from sentence_transformers import SentenceTransformer +from sklearn.feature_extraction.text import CountVectorizer +from umap import UMAP + +from dsp_interview_transcripts import PROJECT_DIR +from dsp_interview_transcripts import logger -from dsp_interview_transcripts import PROJECT_DIR, logger -pd.set_option('display.max_colwidth', 500) +pd.set_option("display.max_colwidth", 500) # Set random seeds RANDOM_SEED = 42 np.random.seed(RANDOM_SEED) random.seed(RANDOM_SEED) # PyTorch seed (used by SentenceTransformer) -import torch torch.manual_seed(RANDOM_SEED) SENTENCE_MODEL = SentenceTransformer("all-MiniLM-L6-v2") umap_model = UMAP( - n_neighbors=15, - n_components=50, - min_dist=0.1, - metric="cosine", - random_state=RANDOM_SEED, - ) + n_neighbors=15, + n_components=50, + min_dist=0.1, + metric="cosine", + random_state=RANDOM_SEED, +) hdbscan_model = HDBSCAN( - min_cluster_size=5, # setting this really small because some questions don't have many responses - min_samples=1, - metric="euclidean", - cluster_selection_method="eom", - prediction_data=True, - ) + min_cluster_size=5, # setting this really small because some questions don't have many responses + min_samples=1, + metric="euclidean", + cluster_selection_method="eom", + prediction_data=True, +) vectorizer_model = CountVectorizer( - stop_words="english", - min_df=1, - max_df=0.85, - ngram_range=(1, 3), - ) + stop_words="english", + min_df=1, + max_df=0.85, + ngram_range=(1, 3), +) # KeyBERT keybert_model = KeyBERTInspired() @@ -68,29 +71,39 @@ # All representation models representation_model = { - "KeyBERT": keybert_model, - # "OpenAI": openai_model, # Uncomment if you will use OpenAI - "MMR": mmr_model, - # "POS": pos_model, - } + "KeyBERT": keybert_model, + # "OpenAI": openai_model, # Uncomment if you will use OpenAI + "MMR": mmr_model, + # "POS": pos_model, +} topic_model = BERTopic( - # Pipeline models - embedding_model="sentence-transformers/all-MiniLM-L6-v2", - umap_model=umap_model, - hdbscan_model=hdbscan_model, - vectorizer_model=vectorizer_model, - representation_model=representation_model, - # Hyperparameters - top_n_words=10, - verbose=True, - calculate_probabilities=True, - ) + # Pipeline models + embedding_model="sentence-transformers/all-MiniLM-L6-v2", + umap_model=umap_model, + hdbscan_model=hdbscan_model, + vectorizer_model=vectorizer_model, + representation_model=representation_model, + # Hyperparameters + top_n_words=10, + verbose=True, + calculate_probabilities=True, +) + class NameDescription(BaseModel): + """ + A model representing a named group of documents, including a brief description. + + Attributes: + name (str): A concise, informative name for this group of documents. + description (str): A detailed description providing context for the group of documents. + """ + name: str = Field(description="Informative name for this group of documents") description: str = Field(description="Description of this group of documents") - + + prompt = """ I have data for some interviews where users were asked about their knowledge of and opinions on different home heating options. In the interview, users were asked about their knowledge @@ -103,8 +116,8 @@ class NameDescription(BaseModel): {docs} The cluster is described by the following keywords: {keywords} \n - Based on the information above, please provide a name and summary for the cluster as a JSON object with two fields: - - name: A short, informative name for the cluster + Based on the information above, please provide a name and summary for the cluster as a JSON object with two fields: + - name: A short, informative name for the cluster - description: A summary of views of users within the cluster. You can include sentiments they express, reasons for their views, their knowledge levels, and any other relevant information. \n Provide nothing except for this JSON dict. @@ -117,25 +130,61 @@ class NameDescription(BaseModel): """ parser = JsonOutputParser(pydantic_object=NameDescription) - + final_prompt = PromptTemplate( - template=prompt, - input_variables=["question","docs", "keywords"], - partial_variables={"format_instructions": parser.get_format_instructions()} - ) + template=prompt, + input_variables=["question", "docs", "keywords"], + partial_variables={"format_instructions": parser.get_format_instructions()}, +) -model="llama3.2" +model = "llama3.2" -ollama_model = ChatOllama( - model=model, temperature=0 - ) +ollama_model = ChatOllama(model=model, temperature=0) llm_chain = final_prompt | ollama_model | parser -def name_topics(topic_info: pd.DataFrame, llm_chain, topics, text_col='text_clean', top_words_col = 'Top Words', - topic_label_col='Cluster', question='') -> dict: + +def name_topics( + topic_info: pd.DataFrame, + llm_chain: Any, + topics: List[str], + text_col: str = "text_clean", + top_words_col: str = "Top Words", + topic_label_col: str = "Cluster", + question: str = "", +) -> Dict[str, Dict[str, str]]: + """ + Generate names and descriptions for each topic by invoking an LLM chain, + using representative text and keywords for each topic. + + Args: + topic_info (pd.DataFrame): A DataFrame containing information about topics, + with columns for text samples, top words, and topic labels. + llm_chain (Any): A language model chain with an `invoke` method that accepts + a dictionary containing 'question', 'docs', and 'keywords'. + topics (List[str]): A list of topic identifiers to process. + text_col (str, optional): The name of the column in `topic_info` containing + text data for each topic. Defaults to "text_clean". + top_words_col (str, optional): The name of the column in `topic_info` containing + the top words for each topic. Defaults to "Top Words". + topic_label_col (str, optional): The name of the column in `topic_info` that + indicates topic labels. Defaults to "Cluster". + question (str, optional): A question to provide context to the language model + when generating names and descriptions. Defaults to an empty string. + + Returns: + Dict[str, Dict[str, str]]: A dictionary where each key is a topic identifier + and each value is a dictionary containing 'name' and 'description' for that topic. + + Example: + >>> name_topics(topic_info=df, llm_chain=my_llm_chain, topics=["Topic 1", "Topic 2"], question="What is this topic about?") + { + "Topic 1": {"name": "Customer Satisfaction", "description": "Documents discussing customer feedback and satisfaction."}, + "Topic 2": {"name": "Product Quality", "description": "Documents focusing on product durability and performance."} + } + """ results = {} - + for topic in topics: logger.info(f"Processing topic {topic}") temp_df = topic_info[topic_info[topic_label_col] == topic] @@ -143,42 +192,43 @@ def name_topics(topic_info: pd.DataFrame, llm_chain, topics, text_col='text_clea logger.info(f"Docs: {docs}") keywords = temp_df[top_words_col].values[0] logger.info(f"Keywords: {keywords}") - + try: - output = llm_chain.invoke({ "question": question, "docs": docs, "keywords": keywords}) - print(output['name'], output['description']) + output = llm_chain.invoke({"question": question, "docs": docs, "keywords": keywords}) + print(output["name"], output["description"]) results[topic] = output except Exception as e: logger.error(f"Error processing topic {topic}: {str(e)}") # errors.append({"topic": topic, "error": str(e)}) - + return results + if __name__ == "__main__": data = pd.read_csv(PROJECT_DIR / "data/user_messages_min_len_9_w_sentiment.csv") - + data_dict = {} - for q in data['q_number'].unique().tolist(): + for q in data["q_number"].unique().tolist(): df = data.copy() - data_dict[q] = df[df['q_number'] == q] - + data_dict[q] = df[df["q_number"] == q] + full_dfs = {} summary_dfs = {} - for key in [0, 1,4,6,7,9]: # these questions are the ones with >100 data points in each + for key in [0, 1, 4, 6, 7, 9]: # these questions are the ones with >100 data points in each temp_df = data_dict[key] - - question = temp_df['question'].unique()[0] - - temp_df['text_clean'] = temp_df['text_clean'].astype(str) - docs = temp_df['text_clean'].tolist() + + question = temp_df["question"].unique()[0] + + temp_df["text_clean"] = temp_df["text_clean"].astype(str) + docs = temp_df["text_clean"].tolist() embeddings = SENTENCE_MODEL.encode(docs, show_progress_bar=True) topics, probs = topic_model.fit_transform(docs, embeddings) - + rep_docs = topic_model.get_representative_docs() umap_2d = UMAP(random_state=RANDOM_SEED, n_components=2) @@ -186,66 +236,93 @@ def name_topics(topic_info: pd.DataFrame, llm_chain, topics, text_col='text_clea topic_lookup = topic_model.get_topic_info()[["Topic", "Name"]] - temp_df['x'] = embeddings_2d[:,0] - temp_df['y'] = embeddings_2d[:,1] - temp_df['topic'] = topics - temp_df['doc'] = docs + temp_df["x"] = embeddings_2d[:, 0] + temp_df["y"] = embeddings_2d[:, 1] + temp_df["topic"] = topics + temp_df["doc"] = docs df_vis = temp_df.merge(topic_lookup, left_on="topic", right_on="Topic", how="left") - + df_for_summarisation = pd.DataFrame(topic_model.get_topic_info()) - df_for_summarisation = df_for_summarisation[df_for_summarisation['Topic'] != -1] - - topic_list = df_for_summarisation['Topic'].to_list() - - results = name_topics(df_for_summarisation, llm_chain, topic_list, text_col='Representative_Docs', top_words_col = 'Representation', - topic_label_col='Topic', question=temp_df['question'].unique()[0]) - - df_for_summarisation['name'] = df_for_summarisation['Topic'].map(lambda x: results[x]['name'] if x in results and isinstance(results[x], dict) and 'name' in results[x] else None) - df_for_summarisation['description'] = df_for_summarisation['Topic'].map(lambda x: results[x]['description'] if x in results and isinstance(results[x], dict) and 'description' in results[x] else None) - df_for_summarisation['question'] = question + df_for_summarisation = df_for_summarisation[df_for_summarisation["Topic"] != -1] + + topic_list = df_for_summarisation["Topic"].to_list() + + results = name_topics( + df_for_summarisation, + llm_chain, + topic_list, + text_col="Representative_Docs", + top_words_col="Representation", + topic_label_col="Topic", + question=temp_df["question"].unique()[0], + ) + + df_for_summarisation["name"] = df_for_summarisation["Topic"].map( + lambda x: results[x]["name"] + if x in results and isinstance(results[x], dict) and "name" in results[x] + else None + ) + df_for_summarisation["description"] = df_for_summarisation["Topic"].map( + lambda x: results[x]["description"] + if x in results and isinstance(results[x], dict) and "description" in results[x] + else None + ) + df_for_summarisation["question"] = question summary_dfs[key] = df_for_summarisation - - df_vis = df_vis.merge(df_for_summarisation[['Topic', 'name', 'description']], left_on='Topic', right_on='Topic', how='left') + + df_vis = df_vis.merge( + df_for_summarisation[["Topic", "name", "description"]], left_on="Topic", right_on="Topic", how="left" + ) full_dfs[key] = df_vis - + # save two bar charts per question for key in full_dfs.keys(): - question = full_dfs[key]['question'].unique()[0] - + question = full_dfs[key]["question"].unique()[0] + df = full_dfs[key] - - name_count_chart = alt.Chart(df).mark_bar().encode( - y=alt.Y('Name:N', title=None), - x=alt.X('count()', title='Count') - ).properties( - #title='Count of Values of Name' + + name_count_chart = ( + alt.Chart(df) + .mark_bar() + .encode(y=alt.Y("Name:N", title=None), x=alt.X("count()", title="Count")) + .properties( + # title='Count of Values of Name' ) + ) - sentiment_proportion_chart = alt.Chart(df).mark_bar().encode( - y=alt.Y('Name:N', title='Name'), - x=alt.X('count()', stack='normalize', title='Proportion'), - color=alt.Color('sentiment:N', title='Sentiment', scale=alt.Scale(domain=['Negative', 'Neutral', 'Positive'], range=['red', 'gray', 'green'])) - ).properties( + sentiment_proportion_chart = ( + alt.Chart(df) + .mark_bar() + .encode( + y=alt.Y("Name:N", title="Name"), + x=alt.X("count()", stack="normalize", title="Proportion"), + color=alt.Color( + "sentiment:N", + title="Sentiment", + scale=alt.Scale(domain=["Negative", "Neutral", "Positive"], range=["red", "gray", "green"]), + ), + ) + .properties( # title='Proportion of Sentiment within Name' ) + ) # Combine the charts and add a title - combined_chart = (name_count_chart | sentiment_proportion_chart).properties( - title=f"{question}" - ) + combined_chart = (name_count_chart | sentiment_proportion_chart).properties(title=f"{question}") combined_chart.save(PROJECT_DIR / f"outputs/by_question/{question}_name_sentiment_chart.html") - - excel_file = PROJECT_DIR / 'outputs/by_question/question_topic_models.xlsx' - with pd.ExcelWriter(excel_file, engine='openpyxl') as writer: - + + excel_file = PROJECT_DIR / "outputs/by_question/question_topic_models.xlsx" + with pd.ExcelWriter(excel_file, engine="openpyxl") as writer: + # Iterate over unique values of 'question' for key in summary_dfs.keys(): df = summary_dfs[key] - df = df.explode('Representative_Docs') - - question = df['question'].unique()[0] - + df = df.explode("Representative_Docs") + + question = df["question"].unique()[0] + # Save each summary table as a separate sheet in the Excel workbook sheet_name = question[:30] # Excel sheet names are limited to 31 characters - df[['question','Topic', 'Count', 'Name', 'Representation', 'name','description','Representative_Docs']].to_excel(writer, sheet_name=sheet_name, index=False) - \ No newline at end of file + df[ + ["question", "Topic", "Count", "Name", "Representation", "name", "description", "Representative_Docs"] + ].to_excel(writer, sheet_name=sheet_name, index=False) diff --git a/dsp_interview_transcripts/pipeline/topic_modelling.py b/dsp_interview_transcripts/pipeline/topic_modelling.py index 64f223d..41a5918 100644 --- a/dsp_interview_transcripts/pipeline/topic_modelling.py +++ b/dsp_interview_transcripts/pipeline/topic_modelling.py @@ -1,22 +1,27 @@ -import numpy as np -import pandas as pd import random -from sentence_transformers import SentenceTransformer + from collections import defaultdict +import numpy as np +import pandas as pd +import torch + from hdbscan import HDBSCAN -from umap import UMAP -from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer +from sentence_transformers import SentenceTransformer +from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.preprocessing import StandardScaler +from umap import UMAP + +from dsp_interview_transcripts import PROJECT_DIR +from dsp_interview_transcripts import logger -from dsp_interview_transcripts import PROJECT_DIR, logger # Set random seeds RANDOM_SEED = 42 np.random.seed(RANDOM_SEED) random.seed(RANDOM_SEED) # PyTorch seed (used by SentenceTransformer) -import torch + torch.manual_seed(RANDOM_SEED) @@ -27,42 +32,61 @@ MIN_CLUSTER_SIZE = 20 umap_model = UMAP( - n_neighbors=15, - n_components=50, - min_dist=0.1, - metric="cosine", - random_state=RANDOM_SEED, - ) + n_neighbors=15, + n_components=50, + min_dist=0.1, + metric="cosine", + random_state=RANDOM_SEED, +) hdbscan_model = HDBSCAN( - min_cluster_size=20, - metric="euclidean", - cluster_selection_method="eom", - prediction_data=True, - ) + min_cluster_size=20, + metric="euclidean", + cluster_selection_method="eom", + prediction_data=True, +) vectorizer_model = TfidfVectorizer( - stop_words="english", - min_df=1, - max_df=0.85, - ngram_range=(1, 3), - ) - -def stratified_sample(group, n=10): - #TODO: filter based on hdbscan probability - #TODO: find docs from centre of the cluster - - - # Calculate the sample size for each combination of 'question' and 'sentiment' - stratified_sample = group.groupby(['question', 'sentiment']).apply(lambda x: x.sample(frac=min(1, n / len(group)), random_state=42)) - + stop_words="english", + min_df=1, + max_df=0.85, + ngram_range=(1, 3), +) + + +def stratified_sample(group: pd.DataFrame, n: int = 10) -> pd.DataFrame: + """ + Generate a stratified sample: get N responses stratified by + 'question' and 'sentiment' values. The dataframe should already contain + just one topic ie one group. + + Args: + group (pd.DataFrame): The DataFrame containing data for 1 topic to sample from. + Must contain 'question' and 'sentiment' columns. + n (int, optional): The number of samples to draw per 'question' and + 'sentiment' combination. If there are fewer than `n` records in a + group, all available records are returned. Defaults to 10. + + Returns: + pd.DataFrame: A new DataFrame containing the stratified sample. + """ + # TODO: filter based on hdbscan probability + # TODO: find docs from centre of the cluster + + # Calculate the sample size for each combination of 'question' + # and 'sentiment' + stratified_sample = group.groupby(["question", "sentiment"]).apply( + lambda x: x.sample(frac=min(1, n / len(group)), random_state=42) + ) + # Reset the index to tidy up the resulting DataFrame return stratified_sample.reset_index(drop=True) + if __name__ == "__main__": user_messages = pd.read_csv(DATA_PATH) - - docs = user_messages['text_clean'].tolist() + + docs = user_messages["text_clean"].tolist() embeddings = SENTENCE_MODEL.encode(docs, show_progress_bar=True) umap_vectors = umap_model.fit_transform(embeddings) @@ -71,27 +95,27 @@ def stratified_sample(group, n=10): umap_df.columns = umap_df.columns.map(str) user_messages_w_umap = pd.concat([user_messages.reset_index(), umap_df], axis=1) - umap_vars = [f'{i}' for i in range(50)] + umap_vars = [f"{i}" for i in range(50)] model_vars = umap_vars # Normalise the umap vectors scaler = StandardScaler() df_normalized = scaler.fit_transform(user_messages_w_umap[model_vars]) - + clusters = hdbscan_model.fit_predict(df_normalized) cluster_probabilities = hdbscan_model.probabilities_ - user_messages_w_umap['label'] = clusters - user_messages_w_umap['probs'] = cluster_probabilities - logger.info(user_messages_w_umap['label'].value_counts()) - + user_messages_w_umap["label"] = clusters + user_messages_w_umap["probs"] = cluster_probabilities + logger.info(user_messages_w_umap["label"].value_counts()) + # 2d embeddings for visualisation umap_2d = UMAP(random_state=RANDOM_SEED, n_components=2) embeddings_2d = umap_2d.fit_transform(embeddings) - - ### topic representations ### - cluster_groups = user_messages_w_umap.groupby('label').agg({'text_clean': ' '.join}).reset_index() - tfidf_matrix = vectorizer_model.fit_transform(cluster_groups['text_clean'].to_list()) + # topic representations + cluster_groups = user_messages_w_umap.groupby("label").agg({"text_clean": " ".join}).reset_index() + + tfidf_matrix = vectorizer_model.fit_transform(cluster_groups["text_clean"].to_list()) feature_names = vectorizer_model.get_feature_names_out() @@ -104,31 +128,38 @@ def stratified_sample(group, n=10): # Iterate over each cluster and get top words for cluster_idx, tfidf_scores in enumerate(tfidf_matrix): # Get indices of top n words within the cluster - top_word_indices = tfidf_scores.toarray()[0].argsort()[:-n_top_words - 1:-1] - + top_word_indices = tfidf_scores.toarray()[0].argsort()[: -n_top_words - 1 : -1] + # Get the top words corresponding to the top indices top_words = [feature_names[i] for i in top_word_indices] - + # Append the words to the dictionary - top_words_per_cluster[cluster_groups.iloc[cluster_idx]['label']] = ', '.join(top_words) - - top_words_df = pd.DataFrame(list(top_words_per_cluster.items()), columns=['Cluster', 'Top Words']) - - user_messages_w_umap_topics = pd.merge(user_messages_w_umap, top_words_df, left_on='label', right_on='Cluster', how='left') - user_messages_w_umap_topics['x'] = embeddings_2d[:, 0] - user_messages_w_umap_topics['y'] = embeddings_2d[:, 1] - - user_messages_w_umap_topics.to_csv(PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics.csv", index=False) - - - ### save most representative documents ### - - filtered_df = user_messages_w_umap_topics[(user_messages_w_umap_topics['probs'] >= 0.5) & (user_messages_w_umap_topics['Cluster'] != -1)] - + top_words_per_cluster[cluster_groups.iloc[cluster_idx]["label"]] = ", ".join(top_words) + + top_words_df = pd.DataFrame(list(top_words_per_cluster.items()), columns=["Cluster", "Top Words"]) + + user_messages_w_umap_topics = pd.merge( + user_messages_w_umap, top_words_df, left_on="label", right_on="Cluster", how="left" + ) + user_messages_w_umap_topics["x"] = embeddings_2d[:, 0] + user_messages_w_umap_topics["y"] = embeddings_2d[:, 1] + + user_messages_w_umap_topics.to_csv( + PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics.csv", index=False + ) + + # save most representative documents + + filtered_df = user_messages_w_umap_topics[ + (user_messages_w_umap_topics["probs"] >= 0.5) & (user_messages_w_umap_topics["Cluster"] != -1) + ] + # Group by 'Cluster' and apply the stratified sampling - sampled_texts = filtered_df.groupby('Cluster').apply(stratified_sample).reset_index(drop=True) + sampled_texts = filtered_df.groupby("Cluster").apply(stratified_sample).reset_index(drop=True) # Keep only the first 10 samples per cluster - sampled_texts = sampled_texts.groupby('Cluster').head(10) - - sampled_texts[['Cluster', 'Top Words', 'text_clean', 'sentiment', 'question', 'context','conversation', 'uuid', 'probs']].to_csv(PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics_representative_docs.csv", index=False) \ No newline at end of file + sampled_texts = sampled_texts.groupby("Cluster").head(10) + + sampled_texts[ + ["Cluster", "Top Words", "text_clean", "sentiment", "question", "context", "conversation", "uuid", "probs"] + ].to_csv(PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics_representative_docs.csv", index=False) diff --git a/pyproject.toml b/pyproject.toml index a65eb01..1342dab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -91,44 +91,6 @@ lines_after_imports = 2 src_paths = ["dsp_interview_transcripts", "tests"] line_length = 119 -[tool.flake8] -max-line-length = 119 -select = [ # Choose the flake8 errors to select globally here - "C", # McCabe complexity - "E", # pep8 errors - "W", # pep8 warnings - "F", # pyflakes errors - "N", # naming conventions - "B", # bugbear errors (needs plugin installed) - "ANN", # flake8 annotations errors (needs plugin installed) - "T", # flake8 print errors (needs plugin installed) - "D", # flake8 doscstrings errors (needs plugin installed) - "B950", # Line too long. It considers "max-line-length" but only triggers when exceeded by more than 10%. -] -ignore = [ # Choose the flake8 errors to ignore globally here - "E501", # Line too long (using B950 instead, which has 10% tolerance) - "D107", # Missing docstring in __init__ - "D202", # No blank lines allowed after function docstring - "D400", # First line should end with a period - "D100", # Missing docstring in public module - "D104", # Missing docstring in public package - "ANN003", # Missing type annotation for **kwargs - "ANN002", # Missing type annotation for **args - "ANN1", # Missing type annotation for self in methot or cls method - "W503", # Line break occurred before a binary operator - "E203", # Whitespace before ':' -] -per-file-ignores = [ # Choose the flake8 errors to ignore per file here - "*/__init__.py:F401", # Ignore imported but unused in __init__.py files - "tests/*:ANN,D", # Ignore Docstring and annotations on tests -] -exclude = [ - ".venv/*", - ".vscode/*", -] -# See other flake8 extensions here: https://github.com/DmytroLitvinov/awesome-flake8-extensions - - [tool.bandit] skips = [ # Choose the bandit errors to ignore globally "B101", # Use of assert detected. The enclosed code will be removed when compiling to optimised byte code. From fc44cfd055c91962dbe46f527759056d7b04ef70 Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Wed, 6 Nov 2024 15:07:12 +0000 Subject: [PATCH 15/25] Update readme --- dsp_interview_transcripts/pipeline/README.md | 14 +++++++------- .../pipeline/topic_modelling.py | 6 +----- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/dsp_interview_transcripts/pipeline/README.md b/dsp_interview_transcripts/pipeline/README.md index a146516..223250d 100644 --- a/dsp_interview_transcripts/pipeline/README.md +++ b/dsp_interview_transcripts/pipeline/README.md @@ -1,13 +1,13 @@ # Basic pipeline -The pipeline is very simple at this stage. - 1. Make sure you have a directory `data/` with the file `qual_af_transcripts.csv` in it. -2. Run `python dsp_interview_transcripts/pipeline/run_pipeline.py`. This runs the following scripts: - - `process_data.py`. This will do some cleaning of the data (text cleaning, making sure the conversations are in order, concatenating consecutive messages by the same person within a conversation) and save two outputs: a file with just user messages over a certain length; one with the user messages, plus the immediately preceding bot message (in order to see if this extra context helps the text clustering and topic representations). - - `python dsp_interview_transcripts/pipeline/segment_interviews.py`. This uses some basic comparison between the topic guide and the BOT messages to figure out which parts of the interview might correspond to which question from the topic guide. A separate dataset is saved for each of the questions from the topic guide. - - `python dsp_interview_transcripts/pipeline/topic_modelling.py`. This runs BERTopic on the datasets provided above, and also iterates over a few different minimum cluster sizes. +2. Create a directory `outputs/` in the root of the repository. -3. Run `streamlit run dsp_interview_transcripts/pipeline/app.py`. This launches a streamlit app that allows you to inspect the outputs of BERTopic that we created in the step above. +3. Run `python dsp_interview_transcripts/pipeline/run_pipeline.py`. This runs the following scripts: + - `process_data.py`. This will do some cleaning of the data (text cleaning, making sure the conversations are in order, concatenating consecutive messages by the same person within a conversation). It also applies a sentiment analysis model and also estimates which question from the interview guide is being discussed at each point. + - `python dsp_interview_transcripts/pipeline/topic_modelling.py`. This runs a BERTopic style model, but the actual steps (dimensionality reduction, HDBSCAN, tfidf representation) are run separatley because I wanted to normalise the embedding vectors before reducing the dimensionality. + - `python dsp_interview_transcripts/pipeline/name_clusters.py`. This uses llama3.2 to generate a name and a description for each topic. + - `python dsp_interview_transcripts/pipeline/prep_output_tables.py`. This formats the output data and also saves scatterplots locally. + - `python dsp_interview_transcripts/pipeline/top_down_analysis.py`. This mimics the top-down/framework approach. It subsets the data according to which question is being discussed and then runs BERTopic on each subset. diff --git a/dsp_interview_transcripts/pipeline/topic_modelling.py b/dsp_interview_transcripts/pipeline/topic_modelling.py index 41a5918..82c36c4 100644 --- a/dsp_interview_transcripts/pipeline/topic_modelling.py +++ b/dsp_interview_transcripts/pipeline/topic_modelling.py @@ -21,14 +21,10 @@ np.random.seed(RANDOM_SEED) random.seed(RANDOM_SEED) # PyTorch seed (used by SentenceTransformer) - - torch.manual_seed(RANDOM_SEED) SENTENCE_MODEL = SentenceTransformer("all-MiniLM-L6-v2") - DATA_PATH = PROJECT_DIR / "data/user_messages_min_len_9_w_sentiment.csv" - MIN_CLUSTER_SIZE = 20 umap_model = UMAP( @@ -40,7 +36,7 @@ ) hdbscan_model = HDBSCAN( - min_cluster_size=20, + min_cluster_size=MIN_CLUSTER_SIZE, metric="euclidean", cluster_selection_method="eom", prediction_data=True, From 37bd67429fd64ce6a355aa012383a57192f6708f Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Wed, 6 Nov 2024 15:10:50 +0000 Subject: [PATCH 16/25] Untrack poetry lock --- poetry.lock | 5194 --------------------------------------------------- 1 file changed, 5194 deletions(-) delete mode 100644 poetry.lock diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 39561a1..0000000 --- a/poetry.lock +++ /dev/null @@ -1,5194 +0,0 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. - -[[package]] -name = "altair" -version = "5.4.1" -description = "Vega-Altair: A declarative statistical visualization library for Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "altair-5.4.1-py3-none-any.whl", hash = "sha256:0fb130b8297a569d08991fb6fe763582e7569f8a04643bbd9212436e3be04aef"}, - {file = "altair-5.4.1.tar.gz", hash = "sha256:0ce8c2e66546cb327e5f2d7572ec0e7c6feece816203215613962f0ec1d76a82"}, -] - -[package.dependencies] -jinja2 = "*" -jsonschema = ">=3.0" -narwhals = ">=1.5.2" -packaging = "*" -typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\""} - -[package.extras] -all = ["altair-tiles (>=0.3.0)", "anywidget (>=0.9.0)", "numpy", "pandas (>=0.25.3)", "pyarrow (>=11)", "vega-datasets (>=0.9.0)", "vegafusion[embed] (>=1.6.6)", "vl-convert-python (>=1.6.0)"] -dev = ["geopandas", "hatch", "ibis-framework[polars]", "ipython[kernel]", "mistune", "mypy", "pandas (>=0.25.3)", "pandas-stubs", "polars (>=0.20.3)", "pytest", "pytest-cov", "pytest-xdist[psutil] (>=3.5,<4.0)", "ruff (>=0.6.0)", "types-jsonschema", "types-setuptools"] -doc = ["docutils", "jinja2", "myst-parser", "numpydoc", "pillow (>=9,<10)", "pydata-sphinx-theme (>=0.14.1)", "scipy", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinxext-altair"] - -[[package]] -name = "anyio" -version = "4.6.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.9" -files = [ - {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, - {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, -] - -[package.dependencies] -idna = ">=2.8" -sniffio = ">=1.1" - -[package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] -trio = ["trio (>=0.26.1)"] - -[[package]] -name = "appnope" -version = "0.1.4" -description = "Disable App Nap on macOS >= 10.9" -optional = false -python-versions = ">=3.6" -files = [ - {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, - {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, -] - -[[package]] -name = "argon2-cffi" -version = "23.1.0" -description = "Argon2 for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, - {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, -] - -[package.dependencies] -argon2-cffi-bindings = "*" - -[package.extras] -dev = ["argon2-cffi[tests,typing]", "tox (>4)"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] -tests = ["hypothesis", "pytest"] -typing = ["mypy"] - -[[package]] -name = "argon2-cffi-bindings" -version = "21.2.0" -description = "Low-level CFFI bindings for Argon2" -optional = false -python-versions = ">=3.6" -files = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, -] - -[package.dependencies] -cffi = ">=1.0.1" - -[package.extras] -dev = ["cogapp", "pre-commit", "pytest", "wheel"] -tests = ["pytest"] - -[[package]] -name = "arrow" -version = "1.3.0" -description = "Better dates & times for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, - {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, -] - -[package.dependencies] -python-dateutil = ">=2.7.0" -types-python-dateutil = ">=2.8.10" - -[package.extras] -doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] -test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] - -[[package]] -name = "asttokens" -version = "2.4.1" -description = "Annotate AST trees with source code positions" -optional = false -python-versions = "*" -files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, -] - -[package.dependencies] -six = ">=1.12.0" - -[package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] - -[[package]] -name = "async-lru" -version = "2.0.4" -description = "Simple LRU cache for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, - {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, -] - -[[package]] -name = "attrs" -version = "24.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, -] - -[package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] - -[[package]] -name = "babel" -version = "2.16.0" -description = "Internationalization utilities" -optional = false -python-versions = ">=3.8" -files = [ - {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, - {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, -] - -[package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] - -[[package]] -name = "bandit" -version = "1.7.10" -description = "Security oriented static analyser for python code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "bandit-1.7.10-py3-none-any.whl", hash = "sha256:665721d7bebbb4485a339c55161ac0eedde27d51e638000d91c8c2d68343ad02"}, - {file = "bandit-1.7.10.tar.gz", hash = "sha256:59ed5caf5d92b6ada4bf65bc6437feea4a9da1093384445fed4d472acc6cff7b"}, -] - -[package.dependencies] -colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -PyYAML = ">=5.3.1" -rich = "*" -stevedore = ">=1.20.0" - -[package.extras] -baseline = ["GitPython (>=3.1.30)"] -sarif = ["jschema-to-python (>=1.2.3)", "sarif-om (>=1.0.4)"] -test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] -toml = ["tomli (>=1.1.0)"] -yaml = ["PyYAML"] - -[[package]] -name = "beautifulsoup4" -version = "4.12.3" -description = "Screen-scraping library" -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, - {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, -] - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -cchardet = ["cchardet"] -chardet = ["chardet"] -charset-normalizer = ["charset-normalizer"] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "bertopic" -version = "0.16.3" -description = "BERTopic performs topic Modeling with state-of-the-art transformer models." -optional = false -python-versions = ">=3.8" -files = [ - {file = "bertopic-0.16.3-py3-none-any.whl", hash = "sha256:d9b8b530d58a00034b4e1ac54aec4a677a2d0131429575fa45782d43a1185f90"}, - {file = "bertopic-0.16.3.tar.gz", hash = "sha256:5a309dd2598649ea8bb04ae1fed2786ad64abd29870d9f2ebf1dedfc6a5ad622"}, -] - -[package.dependencies] -hdbscan = ">=0.8.29" -numpy = ">=1.20.0" -pandas = ">=1.1.5" -plotly = ">=4.7.0" -scikit-learn = ">=0.22.2.post1" -sentence-transformers = ">=0.4.1" -tqdm = ">=4.41.1" -umap-learn = ">=0.5.0" - -[package.extras] -datamap = ["datamapplot (>=0.1)", "matplotlib (>=3.8)"] -dev = ["bertopic[docs,test]"] -docs = ["mkdocs (==1.5.3)", "mkdocs-material (==9.5.18)", "mkdocstrings (==0.24.3)", "mkdocstrings-python (==1.10.0)"] -flair = ["flair (>=0.7)", "torch (>=1.4.0)", "transformers (>=3.5.1)"] -gensim = ["gensim (>=4.0.0)"] -spacy = ["spacy (>=3.0.1)"] -test = ["pytest (>=5.4.3)", "pytest-cov (>=2.6.1)", "ruff (>=0.4.7,<0.5.0)"] -use = ["tensorflow", "tensorflow-hub", "tensorflow-text"] -vision = ["Pillow (>=9.2.0)", "accelerate (>=0.19.0)"] - -[[package]] -name = "black" -version = "22.12.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.7" -files = [ - {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, - {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, - {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, - {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, - {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, - {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, - {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, - {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, - {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, - {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, - {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, - {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -pathspec = ">=0.9.0" -platformdirs = ">=2" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "bleach" -version = "6.1.0" -description = "An easy safelist-based HTML-sanitizing tool." -optional = false -python-versions = ">=3.8" -files = [ - {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, - {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, -] - -[package.dependencies] -six = ">=1.9.0" -webencodings = "*" - -[package.extras] -css = ["tinycss2 (>=1.1.0,<1.3)"] - -[[package]] -name = "blinker" -version = "1.8.2" -description = "Fast, simple object-to-object and broadcast signaling" -optional = false -python-versions = ">=3.8" -files = [ - {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, - {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, -] - -[[package]] -name = "boto3" -version = "1.35.25" -description = "The AWS SDK for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "boto3-1.35.25-py3-none-any.whl", hash = "sha256:b1cfad301184cdd44dfd4805187ccab12de8dd28dd12a11a5cfdace17918c6de"}, - {file = "boto3-1.35.25.tar.gz", hash = "sha256:5df4e2cbe3409db07d3a0d8d63d5220ce3202a78206ad87afdbb41519b26ce45"}, -] - -[package.dependencies] -botocore = ">=1.35.25,<1.36.0" -jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.10.0,<0.11.0" - -[package.extras] -crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] - -[[package]] -name = "botocore" -version = "1.35.25" -description = "Low-level, data-driven core of boto 3." -optional = false -python-versions = ">=3.8" -files = [ - {file = "botocore-1.35.25-py3-none-any.whl", hash = "sha256:e58d60260abf10ccc4417967923117c9902a6a0cff9fddb6ea7ff42dc1bd4630"}, - {file = "botocore-1.35.25.tar.gz", hash = "sha256:76c5706b2c6533000603ae8683a297c887abbbaf6ee31e1b2e2863b74b2989bc"}, -] - -[package.dependencies] -jmespath = ">=0.7.1,<2.0.0" -python-dateutil = ">=2.1,<3.0.0" -urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} - -[package.extras] -crt = ["awscrt (==0.21.5)"] - -[[package]] -name = "cachetools" -version = "5.5.0" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, - {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, -] - -[[package]] -name = "certifi" -version = "2024.8.30" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, -] - -[[package]] -name = "cffi" -version = "1.17.1" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, - {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, - {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, - {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, - {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, - {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, - {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, - {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, - {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, - {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, - {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, - {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, - {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, - {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, - {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, - {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "cfgv" -version = "3.4.0" -description = "Validate configuration and produce human readable error messages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, - {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "comm" -version = "0.2.2" -description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -optional = false -python-versions = ">=3.8" -files = [ - {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, - {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, -] - -[package.dependencies] -traitlets = ">=4" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "contourpy" -version = "1.3.0" -description = "Python library for calculating contours of 2D quadrilateral grids" -optional = false -python-versions = ">=3.9" -files = [ - {file = "contourpy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:880ea32e5c774634f9fcd46504bf9f080a41ad855f4fef54f5380f5133d343c7"}, - {file = "contourpy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:76c905ef940a4474a6289c71d53122a4f77766eef23c03cd57016ce19d0f7b42"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92f8557cbb07415a4d6fa191f20fd9d2d9eb9c0b61d1b2f52a8926e43c6e9af7"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36f965570cff02b874773c49bfe85562b47030805d7d8360748f3eca570f4cab"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cacd81e2d4b6f89c9f8a5b69b86490152ff39afc58a95af002a398273e5ce589"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69375194457ad0fad3a839b9e29aa0b0ed53bb54db1bfb6c3ae43d111c31ce41"}, - {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a52040312b1a858b5e31ef28c2e865376a386c60c0e248370bbea2d3f3b760d"}, - {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3faeb2998e4fcb256542e8a926d08da08977f7f5e62cf733f3c211c2a5586223"}, - {file = "contourpy-1.3.0-cp310-cp310-win32.whl", hash = "sha256:36e0cff201bcb17a0a8ecc7f454fe078437fa6bda730e695a92f2d9932bd507f"}, - {file = "contourpy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:87ddffef1dbe5e669b5c2440b643d3fdd8622a348fe1983fad7a0f0ccb1cd67b"}, - {file = "contourpy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fa4c02abe6c446ba70d96ece336e621efa4aecae43eaa9b030ae5fb92b309ad"}, - {file = "contourpy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:834e0cfe17ba12f79963861e0f908556b2cedd52e1f75e6578801febcc6a9f49"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbc4c3217eee163fa3984fd1567632b48d6dfd29216da3ded3d7b844a8014a66"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4865cd1d419e0c7a7bf6de1777b185eebdc51470800a9f42b9e9decf17762081"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:303c252947ab4b14c08afeb52375b26781ccd6a5ccd81abcdfc1fafd14cf93c1"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637f674226be46f6ba372fd29d9523dd977a291f66ab2a74fbeb5530bb3f445d"}, - {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76a896b2f195b57db25d6b44e7e03f221d32fe318d03ede41f8b4d9ba1bff53c"}, - {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e1fd23e9d01591bab45546c089ae89d926917a66dceb3abcf01f6105d927e2cb"}, - {file = "contourpy-1.3.0-cp311-cp311-win32.whl", hash = "sha256:d402880b84df3bec6eab53cd0cf802cae6a2ef9537e70cf75e91618a3801c20c"}, - {file = "contourpy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:6cb6cc968059db9c62cb35fbf70248f40994dfcd7aa10444bbf8b3faeb7c2d67"}, - {file = "contourpy-1.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:570ef7cf892f0afbe5b2ee410c507ce12e15a5fa91017a0009f79f7d93a1268f"}, - {file = "contourpy-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:da84c537cb8b97d153e9fb208c221c45605f73147bd4cadd23bdae915042aad6"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0be4d8425bfa755e0fd76ee1e019636ccc7c29f77a7c86b4328a9eb6a26d0639"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c0da700bf58f6e0b65312d0a5e695179a71d0163957fa381bb3c1f72972537c"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb8b141bb00fa977d9122636b16aa67d37fd40a3d8b52dd837e536d64b9a4d06"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3634b5385c6716c258d0419c46d05c8aa7dc8cb70326c9a4fb66b69ad2b52e09"}, - {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0dce35502151b6bd35027ac39ba6e5a44be13a68f55735c3612c568cac3805fd"}, - {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea348f053c645100612b333adc5983d87be69acdc6d77d3169c090d3b01dc35"}, - {file = "contourpy-1.3.0-cp312-cp312-win32.whl", hash = "sha256:90f73a5116ad1ba7174341ef3ea5c3150ddf20b024b98fb0c3b29034752c8aeb"}, - {file = "contourpy-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:b11b39aea6be6764f84360fce6c82211a9db32a7c7de8fa6dd5397cf1d079c3b"}, - {file = "contourpy-1.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3e1c7fa44aaae40a2247e2e8e0627f4bea3dd257014764aa644f319a5f8600e3"}, - {file = "contourpy-1.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:364174c2a76057feef647c802652f00953b575723062560498dc7930fc9b1cb7"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32b238b3b3b649e09ce9aaf51f0c261d38644bdfa35cbaf7b263457850957a84"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d51fca85f9f7ad0b65b4b9fe800406d0d77017d7270d31ec3fb1cc07358fdea0"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:732896af21716b29ab3e988d4ce14bc5133733b85956316fb0c56355f398099b"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d73f659398a0904e125280836ae6f88ba9b178b2fed6884f3b1f95b989d2c8da"}, - {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c6c7c2408b7048082932cf4e641fa3b8ca848259212f51c8c59c45aa7ac18f14"}, - {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f317576606de89da6b7e0861cf6061f6146ead3528acabff9236458a6ba467f8"}, - {file = "contourpy-1.3.0-cp313-cp313-win32.whl", hash = "sha256:31cd3a85dbdf1fc002280c65caa7e2b5f65e4a973fcdf70dd2fdcb9868069294"}, - {file = "contourpy-1.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4553c421929ec95fb07b3aaca0fae668b2eb5a5203d1217ca7c34c063c53d087"}, - {file = "contourpy-1.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:345af746d7766821d05d72cb8f3845dfd08dd137101a2cb9b24de277d716def8"}, - {file = "contourpy-1.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3bb3808858a9dc68f6f03d319acd5f1b8a337e6cdda197f02f4b8ff67ad2057b"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:420d39daa61aab1221567b42eecb01112908b2cab7f1b4106a52caaec8d36973"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d63ee447261e963af02642ffcb864e5a2ee4cbfd78080657a9880b8b1868e18"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:167d6c890815e1dac9536dca00828b445d5d0df4d6a8c6adb4a7ec3166812fa8"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:710a26b3dc80c0e4febf04555de66f5fd17e9cf7170a7b08000601a10570bda6"}, - {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:75ee7cb1a14c617f34a51d11fa7524173e56551646828353c4af859c56b766e2"}, - {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:33c92cdae89ec5135d036e7218e69b0bb2851206077251f04a6c4e0e21f03927"}, - {file = "contourpy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a11077e395f67ffc2c44ec2418cfebed032cd6da3022a94fc227b6faf8e2acb8"}, - {file = "contourpy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e8134301d7e204c88ed7ab50028ba06c683000040ede1d617298611f9dc6240c"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12968fdfd5bb45ffdf6192a590bd8ddd3ba9e58360b29683c6bb71a7b41edca"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd2a0fc506eccaaa7595b7e1418951f213cf8255be2600f1ea1b61e46a60c55f"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfb5c62ce023dfc410d6059c936dcf96442ba40814aefbfa575425a3a7f19dc"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68a32389b06b82c2fdd68276148d7b9275b5f5cf13e5417e4252f6d1a34f72a2"}, - {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94e848a6b83da10898cbf1311a815f770acc9b6a3f2d646f330d57eb4e87592e"}, - {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d78ab28a03c854a873787a0a42254a0ccb3cb133c672f645c9f9c8f3ae9d0800"}, - {file = "contourpy-1.3.0-cp39-cp39-win32.whl", hash = "sha256:81cb5ed4952aae6014bc9d0421dec7c5835c9c8c31cdf51910b708f548cf58e5"}, - {file = "contourpy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:14e262f67bd7e6eb6880bc564dcda30b15e351a594657e55b7eec94b6ef72843"}, - {file = "contourpy-1.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe41b41505a5a33aeaed2a613dccaeaa74e0e3ead6dd6fd3a118fb471644fd6c"}, - {file = "contourpy-1.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca7e17a65f72a5133bdbec9ecf22401c62bcf4821361ef7811faee695799779"}, - {file = "contourpy-1.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ec4dc6bf570f5b22ed0d7efba0dfa9c5b9e0431aeea7581aa217542d9e809a4"}, - {file = "contourpy-1.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:00ccd0dbaad6d804ab259820fa7cb0b8036bda0686ef844d24125d8287178ce0"}, - {file = "contourpy-1.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca947601224119117f7c19c9cdf6b3ab54c5726ef1d906aa4a69dfb6dd58102"}, - {file = "contourpy-1.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6ec93afeb848a0845a18989da3beca3eec2c0f852322efe21af1931147d12cb"}, - {file = "contourpy-1.3.0.tar.gz", hash = "sha256:7ffa0db17717a8ffb127efd0c95a4362d996b892c2904db72428d5b52e1938a4"}, -] - -[package.dependencies] -numpy = ">=1.23" - -[package.extras] -bokeh = ["bokeh", "selenium"] -docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] -mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.11.1)", "types-Pillow"] -test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] -test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"] - -[[package]] -name = "coverage" -version = "7.6.1" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, - {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, - {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, - {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, - {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, - {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, - {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, - {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, - {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, - {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, - {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, - {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, - {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, - {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, - {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, - {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, - {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, - {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, -] - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "cryptography" -version = "43.0.1" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, - {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, - {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, - {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, - {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, - {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, - {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, -] - -[package.dependencies] -cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] -nox = ["nox"] -pep8test = ["check-sdist", "click", "mypy", "ruff"] -sdist = ["build"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] -test-randomorder = ["pytest-randomly"] - -[[package]] -name = "cycler" -version = "0.12.1" -description = "Composable style cycles" -optional = false -python-versions = ">=3.8" -files = [ - {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, - {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, -] - -[package.extras] -docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] -tests = ["pytest", "pytest-cov", "pytest-xdist"] - -[[package]] -name = "debugpy" -version = "1.8.5" -description = "An implementation of the Debug Adapter Protocol for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "debugpy-1.8.5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7e4d594367d6407a120b76bdaa03886e9eb652c05ba7f87e37418426ad2079f7"}, - {file = "debugpy-1.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4413b7a3ede757dc33a273a17d685ea2b0c09dbd312cc03f5534a0fd4d40750a"}, - {file = "debugpy-1.8.5-cp310-cp310-win32.whl", hash = "sha256:dd3811bd63632bb25eda6bd73bea8e0521794cda02be41fa3160eb26fc29e7ed"}, - {file = "debugpy-1.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:b78c1250441ce893cb5035dd6f5fc12db968cc07f91cc06996b2087f7cefdd8e"}, - {file = "debugpy-1.8.5-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:606bccba19f7188b6ea9579c8a4f5a5364ecd0bf5a0659c8a5d0e10dcee3032a"}, - {file = "debugpy-1.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db9fb642938a7a609a6c865c32ecd0d795d56c1aaa7a7a5722d77855d5e77f2b"}, - {file = "debugpy-1.8.5-cp311-cp311-win32.whl", hash = "sha256:4fbb3b39ae1aa3e5ad578f37a48a7a303dad9a3d018d369bc9ec629c1cfa7408"}, - {file = "debugpy-1.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:345d6a0206e81eb68b1493ce2fbffd57c3088e2ce4b46592077a943d2b968ca3"}, - {file = "debugpy-1.8.5-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:5b5c770977c8ec6c40c60d6f58cacc7f7fe5a45960363d6974ddb9b62dbee156"}, - {file = "debugpy-1.8.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a65b00b7cdd2ee0c2cf4c7335fef31e15f1b7056c7fdbce9e90193e1a8c8cb"}, - {file = "debugpy-1.8.5-cp312-cp312-win32.whl", hash = "sha256:c9f7c15ea1da18d2fcc2709e9f3d6de98b69a5b0fff1807fb80bc55f906691f7"}, - {file = "debugpy-1.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:28ced650c974aaf179231668a293ecd5c63c0a671ae6d56b8795ecc5d2f48d3c"}, - {file = "debugpy-1.8.5-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:3df6692351172a42af7558daa5019651f898fc67450bf091335aa8a18fbf6f3a"}, - {file = "debugpy-1.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd04a73eb2769eb0bfe43f5bfde1215c5923d6924b9b90f94d15f207a402226"}, - {file = "debugpy-1.8.5-cp38-cp38-win32.whl", hash = "sha256:8f913ee8e9fcf9d38a751f56e6de12a297ae7832749d35de26d960f14280750a"}, - {file = "debugpy-1.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:a697beca97dad3780b89a7fb525d5e79f33821a8bc0c06faf1f1289e549743cf"}, - {file = "debugpy-1.8.5-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0a1029a2869d01cb777216af8c53cda0476875ef02a2b6ff8b2f2c9a4b04176c"}, - {file = "debugpy-1.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84c276489e141ed0b93b0af648eef891546143d6a48f610945416453a8ad406"}, - {file = "debugpy-1.8.5-cp39-cp39-win32.whl", hash = "sha256:ad84b7cde7fd96cf6eea34ff6c4a1b7887e0fe2ea46e099e53234856f9d99a34"}, - {file = "debugpy-1.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:7b0fe36ed9d26cb6836b0a51453653f8f2e347ba7348f2bbfe76bfeb670bfb1c"}, - {file = "debugpy-1.8.5-py2.py3-none-any.whl", hash = "sha256:55919dce65b471eff25901acf82d328bbd5b833526b6c1364bd5133754777a44"}, - {file = "debugpy-1.8.5.zip", hash = "sha256:b2112cfeb34b4507399d298fe7023a16656fc553ed5246536060ca7bd0e668d0"}, -] - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - -[[package]] -name = "defusedxml" -version = "0.7.1" -description = "XML bomb protection for Python stdlib modules" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] - -[[package]] -name = "distlib" -version = "0.3.8" -description = "Distribution utilities" -optional = false -python-versions = "*" -files = [ - {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, - {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, -] - -[[package]] -name = "emoji" -version = "2.13.2" -description = "Emoji for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "emoji-2.13.2-py3-none-any.whl", hash = "sha256:ef6f2ee63b245e934c763b1a9a0637713955aa3d9e322432e036bb60559de4d6"}, - {file = "emoji-2.13.2.tar.gz", hash = "sha256:f95d10d96c5f21299ed2c4b32511611ba890b8c07f5f2bf5b04d5d3eee91fd19"}, -] - -[package.extras] -dev = ["coverage", "pytest (>=7.4.4)"] - -[[package]] -name = "et-xmlfile" -version = "1.1.0" -description = "An implementation of lxml.xmlfile for the standard library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, - {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, -] - -[[package]] -name = "executing" -version = "2.1.0" -description = "Get the currently executing AST node of a frame, and other information" -optional = false -python-versions = ">=3.8" -files = [ - {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, - {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, -] - -[package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] - -[[package]] -name = "fastjsonschema" -version = "2.20.0" -description = "Fastest Python implementation of JSON schema" -optional = false -python-versions = "*" -files = [ - {file = "fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a"}, - {file = "fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23"}, -] - -[package.extras] -devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] - -[[package]] -name = "filelock" -version = "3.16.1" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.8" -files = [ - {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, - {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] -typing = ["typing-extensions (>=4.12.2)"] - -[[package]] -name = "flake8" -version = "4.0.1" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.6" -files = [ - {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, - {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, -] - -[package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.8.0,<2.9.0" -pyflakes = ">=2.4.0,<2.5.0" - -[[package]] -name = "flake8-annotations" -version = "2.9.1" -description = "Flake8 Type Annotation Checks" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "flake8-annotations-2.9.1.tar.gz", hash = "sha256:11f09efb99ae63c8f9d6b492b75fe147fbc323179fddfe00b2e56eefeca42f57"}, - {file = "flake8_annotations-2.9.1-py3-none-any.whl", hash = "sha256:a4385158a7a9fc8af1d8820a2f4c8d03387997006a83f5f8bfe5bc6085bdf88a"}, -] - -[package.dependencies] -attrs = ">=21.4" -flake8 = ">=3.7" - -[[package]] -name = "flake8-bugbear" -version = "22.12.6" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8-bugbear-22.12.6.tar.gz", hash = "sha256:4cdb2c06e229971104443ae293e75e64c6107798229202fbe4f4091427a30ac0"}, - {file = "flake8_bugbear-22.12.6-py3-none-any.whl", hash = "sha256:b69a510634f8a9c298dfda2b18a8036455e6b19ecac4fe582e4d7a0abfa50a30"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -flake8 = ">=3.0.0" - -[package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"] - -[[package]] -name = "flake8-docstrings" -version = "1.7.0" -description = "Extension for flake8 which uses pydocstyle to check docstrings" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, - {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, -] - -[package.dependencies] -flake8 = ">=3" -pydocstyle = ">=2.1" - -[[package]] -name = "flake8-print" -version = "4.0.1" -description = "print statement checker plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "flake8-print-4.0.1.tar.gz", hash = "sha256:12b3c3bf65329d8ca9acde949fb3b932ec113e9e5ffa6cb7cd55a7dbcd67dae1"}, - {file = "flake8_print-4.0.1-py3-none-any.whl", hash = "sha256:e246bcd5b07d5259af460b7eff148052c49114640380d7f22340f30920fabf02"}, -] - -[package.dependencies] -flake8 = ">=3.0" -pycodestyle = "*" -six = "*" - -[[package]] -name = "fonttools" -version = "4.53.1" -description = "Tools to manipulate font files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fonttools-4.53.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0679a30b59d74b6242909945429dbddb08496935b82f91ea9bf6ad240ec23397"}, - {file = "fonttools-4.53.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8bf06b94694251861ba7fdeea15c8ec0967f84c3d4143ae9daf42bbc7717fe3"}, - {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b96cd370a61f4d083c9c0053bf634279b094308d52fdc2dd9a22d8372fdd590d"}, - {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1c7c5aa18dd3b17995898b4a9b5929d69ef6ae2af5b96d585ff4005033d82f0"}, - {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e013aae589c1c12505da64a7d8d023e584987e51e62006e1bb30d72f26522c41"}, - {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9efd176f874cb6402e607e4cc9b4a9cd584d82fc34a4b0c811970b32ba62501f"}, - {file = "fonttools-4.53.1-cp310-cp310-win32.whl", hash = "sha256:c8696544c964500aa9439efb6761947393b70b17ef4e82d73277413f291260a4"}, - {file = "fonttools-4.53.1-cp310-cp310-win_amd64.whl", hash = "sha256:8959a59de5af6d2bec27489e98ef25a397cfa1774b375d5787509c06659b3671"}, - {file = "fonttools-4.53.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:da33440b1413bad53a8674393c5d29ce64d8c1a15ef8a77c642ffd900d07bfe1"}, - {file = "fonttools-4.53.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ff7e5e9bad94e3a70c5cd2fa27f20b9bb9385e10cddab567b85ce5d306ea923"}, - {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6e7170d675d12eac12ad1a981d90f118c06cf680b42a2d74c6c931e54b50719"}, - {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bee32ea8765e859670c4447b0817514ca79054463b6b79784b08a8df3a4d78e3"}, - {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e08f572625a1ee682115223eabebc4c6a2035a6917eac6f60350aba297ccadb"}, - {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b21952c092ffd827504de7e66b62aba26fdb5f9d1e435c52477e6486e9d128b2"}, - {file = "fonttools-4.53.1-cp311-cp311-win32.whl", hash = "sha256:9dfdae43b7996af46ff9da520998a32b105c7f098aeea06b2226b30e74fbba88"}, - {file = "fonttools-4.53.1-cp311-cp311-win_amd64.whl", hash = "sha256:d4d0096cb1ac7a77b3b41cd78c9b6bc4a400550e21dc7a92f2b5ab53ed74eb02"}, - {file = "fonttools-4.53.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d92d3c2a1b39631a6131c2fa25b5406855f97969b068e7e08413325bc0afba58"}, - {file = "fonttools-4.53.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3b3c8ebafbee8d9002bd8f1195d09ed2bd9ff134ddec37ee8f6a6375e6a4f0e8"}, - {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f029c095ad66c425b0ee85553d0dc326d45d7059dbc227330fc29b43e8ba60"}, - {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f5e6c3510b79ea27bb1ebfcc67048cde9ec67afa87c7dd7efa5c700491ac7f"}, - {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f677ce218976496a587ab17140da141557beb91d2a5c1a14212c994093f2eae2"}, - {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9e6ceba2a01b448e36754983d376064730690401da1dd104ddb543519470a15f"}, - {file = "fonttools-4.53.1-cp312-cp312-win32.whl", hash = "sha256:791b31ebbc05197d7aa096bbc7bd76d591f05905d2fd908bf103af4488e60670"}, - {file = "fonttools-4.53.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ed170b5e17da0264b9f6fae86073be3db15fa1bd74061c8331022bca6d09bab"}, - {file = "fonttools-4.53.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c818c058404eb2bba05e728d38049438afd649e3c409796723dfc17cd3f08749"}, - {file = "fonttools-4.53.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:651390c3b26b0c7d1f4407cad281ee7a5a85a31a110cbac5269de72a51551ba2"}, - {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54f1bba2f655924c1138bbc7fa91abd61f45c68bd65ab5ed985942712864bbb"}, - {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9cd19cf4fe0595ebdd1d4915882b9440c3a6d30b008f3cc7587c1da7b95be5f"}, - {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2af40ae9cdcb204fc1d8f26b190aa16534fcd4f0df756268df674a270eab575d"}, - {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:35250099b0cfb32d799fb5d6c651220a642fe2e3c7d2560490e6f1d3f9ae9169"}, - {file = "fonttools-4.53.1-cp38-cp38-win32.whl", hash = "sha256:f08df60fbd8d289152079a65da4e66a447efc1d5d5a4d3f299cdd39e3b2e4a7d"}, - {file = "fonttools-4.53.1-cp38-cp38-win_amd64.whl", hash = "sha256:7b6b35e52ddc8fb0db562133894e6ef5b4e54e1283dff606fda3eed938c36fc8"}, - {file = "fonttools-4.53.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75a157d8d26c06e64ace9df037ee93a4938a4606a38cb7ffaf6635e60e253b7a"}, - {file = "fonttools-4.53.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4824c198f714ab5559c5be10fd1adf876712aa7989882a4ec887bf1ef3e00e31"}, - {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:becc5d7cb89c7b7afa8321b6bb3dbee0eec2b57855c90b3e9bf5fb816671fa7c"}, - {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ec3fb43befb54be490147b4a922b5314e16372a643004f182babee9f9c3407"}, - {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:73379d3ffdeecb376640cd8ed03e9d2d0e568c9d1a4e9b16504a834ebadc2dfb"}, - {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:02569e9a810f9d11f4ae82c391ebc6fb5730d95a0657d24d754ed7763fb2d122"}, - {file = "fonttools-4.53.1-cp39-cp39-win32.whl", hash = "sha256:aae7bd54187e8bf7fd69f8ab87b2885253d3575163ad4d669a262fe97f0136cb"}, - {file = "fonttools-4.53.1-cp39-cp39-win_amd64.whl", hash = "sha256:e5b708073ea3d684235648786f5f6153a48dc8762cdfe5563c57e80787c29fbb"}, - {file = "fonttools-4.53.1-py3-none-any.whl", hash = "sha256:f1f8758a2ad110bd6432203a344269f445a2907dc24ef6bccfd0ac4e14e0d71d"}, - {file = "fonttools-4.53.1.tar.gz", hash = "sha256:e128778a8e9bc11159ce5447f76766cefbd876f44bd79aff030287254e4752c4"}, -] - -[package.extras] -all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] -graphite = ["lz4 (>=1.7.4.2)"] -interpolatable = ["munkres", "pycairo", "scipy"] -lxml = ["lxml (>=4.0)"] -pathops = ["skia-pathops (>=0.5.0)"] -plot = ["matplotlib"] -repacker = ["uharfbuzz (>=0.23.0)"] -symfont = ["sympy"] -type1 = ["xattr"] -ufo = ["fs (>=2.2.0,<3)"] -unicode = ["unicodedata2 (>=15.1.0)"] -woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] - -[[package]] -name = "fqdn" -version = "1.5.1" -description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" -optional = false -python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" -files = [ - {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, - {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, -] - -[[package]] -name = "fsspec" -version = "2024.9.0" -description = "File-system specification" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fsspec-2024.9.0-py3-none-any.whl", hash = "sha256:a0947d552d8a6efa72cc2c730b12c41d043509156966cca4fb157b0f2a0c574b"}, - {file = "fsspec-2024.9.0.tar.gz", hash = "sha256:4b0afb90c2f21832df142f292649035d80b421f60a9e1c027802e5a0da2b04e8"}, -] - -[package.extras] -abfs = ["adlfs"] -adl = ["adlfs"] -arrow = ["pyarrow (>=1)"] -dask = ["dask", "distributed"] -dev = ["pre-commit", "ruff"] -doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"] -dropbox = ["dropbox", "dropboxdrivefs", "requests"] -full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] -fuse = ["fusepy"] -gcs = ["gcsfs"] -git = ["pygit2"] -github = ["requests"] -gs = ["gcsfs"] -gui = ["panel"] -hdfs = ["pyarrow (>=1)"] -http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] -libarchive = ["libarchive-c"] -oci = ["ocifs"] -s3 = ["s3fs"] -sftp = ["paramiko"] -smb = ["smbprotocol"] -ssh = ["paramiko"] -test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"] -test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"] -test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"] -tqdm = ["tqdm"] - -[[package]] -name = "ftfy" -version = "6.2.3" -description = "Fixes mojibake and other problems with Unicode, after the fact" -optional = false -python-versions = "<4,>=3.8.1" -files = [ - {file = "ftfy-6.2.3-py3-none-any.whl", hash = "sha256:f15761b023f3061a66207d33f0c0149ad40a8319fd16da91796363e2c049fdf8"}, - {file = "ftfy-6.2.3.tar.gz", hash = "sha256:79b505988f29d577a58a9069afe75553a02a46e42de6091c0660cdc67812badc"}, -] - -[package.dependencies] -wcwidth = ">=0.2.12,<0.3.0" - -[[package]] -name = "gitdb" -version = "4.0.11" -description = "Git Object Database" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, - {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.43" -description = "GitPython is a Python library used to interact with Git repositories" -optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, - {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" - -[package.extras] -doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] -test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "hdbscan" -version = "0.8.38.post1" -description = "Clustering based on density with variable density clusters" -optional = false -python-versions = "*" -files = [ - {file = "hdbscan-0.8.38.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf515c5e3e22b61c5200271a8fc9219c1851abdc2dcf89231cd5142f9abb1c1"}, - {file = "hdbscan-0.8.38.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf1a532d1a6395b3ac90cca694676b41306bbcee78ccba77b75700d29ca6292"}, - {file = "hdbscan-0.8.38.post1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cb0d7c453ba96fb3b1f1595385a6485e51c4e7c16a73337b47f2f7075c7f16"}, - {file = "hdbscan-0.8.38.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9da363d479d33e61e743affa53675e17e571ab261c3674b3764b478b06532c4"}, - {file = "hdbscan-0.8.38.post1.tar.gz", hash = "sha256:5fbdba2ffb5a99a8b52fa2915658ced6bed59f4d0d5f40b1c673646c928aac0b"}, -] - -[package.dependencies] -joblib = ">=1.0" -numpy = ">=1.20,<3" -scikit-learn = ">=0.20" -scipy = ">=1.0" - -[[package]] -name = "httpcore" -version = "1.0.5" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] - -[[package]] -name = "httpx" -version = "0.27.2" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, - {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "huggingface-hub" -version = "0.25.1" -description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "huggingface_hub-0.25.1-py3-none-any.whl", hash = "sha256:a5158ded931b3188f54ea9028097312cb0acd50bffaaa2612014c3c526b44972"}, - {file = "huggingface_hub-0.25.1.tar.gz", hash = "sha256:9ff7cb327343211fbd06e2b149b8f362fd1e389454f3f14c6db75a4999ee20ff"}, -] - -[package.dependencies] -filelock = "*" -fsspec = ">=2023.5.0" -packaging = ">=20.9" -pyyaml = ">=5.1" -requests = "*" -tqdm = ">=4.42.1" -typing-extensions = ">=3.7.4.3" - -[package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] -hf-transfer = ["hf-transfer (>=0.1.4)"] -inference = ["aiohttp", "minijinja (>=1.0)"] -quality = ["mypy (==1.5.1)", "ruff (>=0.5.0)"] -tensorflow = ["graphviz", "pydot", "tensorflow"] -tensorflow-testing = ["keras (<3.0)", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] -torch = ["safetensors[torch]", "torch"] -typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] - -[[package]] -name = "identify" -version = "2.6.1" -description = "File identification library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, - {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, -] - -[package.extras] -license = ["ukkonen"] - -[[package]] -name = "idna" -version = "3.10" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.6" -files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, -] - -[package.extras] -all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "ipykernel" -version = "6.29.5" -description = "IPython Kernel for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, - {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, -] - -[package.dependencies] -appnope = {version = "*", markers = "platform_system == \"Darwin\""} -comm = ">=0.1.1" -debugpy = ">=1.6.5" -ipython = ">=7.23.1" -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -matplotlib-inline = ">=0.1" -nest-asyncio = "*" -packaging = "*" -psutil = "*" -pyzmq = ">=24" -tornado = ">=6.1" -traitlets = ">=5.4.0" - -[package.extras] -cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] -pyqt5 = ["pyqt5"] -pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "ipython" -version = "8.27.0" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.10" -files = [ - {file = "ipython-8.27.0-py3-none-any.whl", hash = "sha256:f68b3cb8bde357a5d7adc9598d57e22a45dfbea19eb6b98286fa3b288c9cd55c"}, - {file = "ipython-8.27.0.tar.gz", hash = "sha256:0b99a2dc9f15fd68692e898e5568725c6d49c527d36a9fb5960ffbdeaa82ff7e"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} -prompt-toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5.13.0" -typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} - -[package.extras] -all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"] -kernel = ["ipykernel"] -matplotlib = ["matplotlib"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] - -[[package]] -name = "ipywidgets" -version = "8.1.5" -description = "Jupyter interactive widgets" -optional = false -python-versions = ">=3.7" -files = [ - {file = "ipywidgets-8.1.5-py3-none-any.whl", hash = "sha256:3290f526f87ae6e77655555baba4f36681c555b8bdbbff430b70e52c34c86245"}, - {file = "ipywidgets-8.1.5.tar.gz", hash = "sha256:870e43b1a35656a80c18c9503bbf2d16802db1cb487eec6fab27d683381dde17"}, -] - -[package.dependencies] -comm = ">=0.1.3" -ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0.12,<3.1.0" -traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0.12,<4.1.0" - -[package.extras] -test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] - -[[package]] -name = "isoduration" -version = "20.11.0" -description = "Operations with ISO 8601 durations" -optional = false -python-versions = ">=3.7" -files = [ - {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, - {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, -] - -[package.dependencies] -arrow = ">=0.15.0" - -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - -[[package]] -name = "jedi" -version = "0.19.1" -description = "An autocompletion tool for Python that can be used for text editors." -optional = false -python-versions = ">=3.6" -files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, -] - -[package.dependencies] -parso = ">=0.8.3,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "jmespath" -version = "1.0.1" -description = "JSON Matching Expressions" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, - {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, -] - -[[package]] -name = "joblib" -version = "1.4.2" -description = "Lightweight pipelining with Python functions" -optional = false -python-versions = ">=3.8" -files = [ - {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, - {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, -] - -[[package]] -name = "json5" -version = "0.9.25" -description = "A Python implementation of the JSON5 data format." -optional = false -python-versions = ">=3.8" -files = [ - {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"}, - {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"}, -] - -[[package]] -name = "jsonpointer" -version = "3.0.0" -description = "Identify specific nodes in a JSON document (RFC 6901)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, - {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, -] - -[[package]] -name = "jsonschema" -version = "4.23.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} -jsonschema-specifications = ">=2023.03.6" -referencing = ">=0.28.4" -rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} -rpds-py = ">=0.7.1" -uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -webcolors = {version = ">=24.6.0", optional = true, markers = "extra == \"format-nongpl\""} - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] - -[[package]] -name = "jsonschema-specifications" -version = "2023.12.1" -description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, -] - -[package.dependencies] -referencing = ">=0.31.0" - -[[package]] -name = "jupyter" -version = "1.1.1" -description = "Jupyter metapackage. Install all the Jupyter components in one go." -optional = false -python-versions = "*" -files = [ - {file = "jupyter-1.1.1-py2.py3-none-any.whl", hash = "sha256:7a59533c22af65439b24bbe60373a4e95af8f16ac65a6c00820ad378e3f7cc83"}, - {file = "jupyter-1.1.1.tar.gz", hash = "sha256:d55467bceabdea49d7e3624af7e33d59c37fff53ed3a350e1ac957bed731de7a"}, -] - -[package.dependencies] -ipykernel = "*" -ipywidgets = "*" -jupyter-console = "*" -jupyterlab = "*" -nbconvert = "*" -notebook = "*" - -[[package]] -name = "jupyter-client" -version = "8.6.3" -description = "Jupyter protocol implementation and client libraries" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, - {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, -] - -[package.dependencies] -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -python-dateutil = ">=2.8.2" -pyzmq = ">=23.0" -tornado = ">=6.2" -traitlets = ">=5.3" - -[package.extras] -docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] - -[[package]] -name = "jupyter-console" -version = "6.6.3" -description = "Jupyter terminal console" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485"}, - {file = "jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539"}, -] - -[package.dependencies] -ipykernel = ">=6.14" -ipython = "*" -jupyter-client = ">=7.0.0" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -prompt-toolkit = ">=3.0.30" -pygments = "*" -pyzmq = ">=17" -traitlets = ">=5.4" - -[package.extras] -test = ["flaky", "pexpect", "pytest"] - -[[package]] -name = "jupyter-core" -version = "5.7.2" -description = "Jupyter core package. A base package on which Jupyter projects rely." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, - {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, -] - -[package.dependencies] -platformdirs = ">=2.5" -pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} -traitlets = ">=5.3" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "jupyter-events" -version = "0.10.0" -description = "Jupyter Event System library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_events-0.10.0-py3-none-any.whl", hash = "sha256:4b72130875e59d57716d327ea70d3ebc3af1944d3717e5a498b8a06c6c159960"}, - {file = "jupyter_events-0.10.0.tar.gz", hash = "sha256:670b8229d3cc882ec782144ed22e0d29e1c2d639263f92ca8383e66682845e22"}, -] - -[package.dependencies] -jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} -python-json-logger = ">=2.0.4" -pyyaml = ">=5.3" -referencing = "*" -rfc3339-validator = "*" -rfc3986-validator = ">=0.1.1" -traitlets = ">=5.3" - -[package.extras] -cli = ["click", "rich"] -docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] -test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] - -[[package]] -name = "jupyter-lsp" -version = "2.2.5" -description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter-lsp-2.2.5.tar.gz", hash = "sha256:793147a05ad446f809fd53ef1cd19a9f5256fd0a2d6b7ce943a982cb4f545001"}, - {file = "jupyter_lsp-2.2.5-py3-none-any.whl", hash = "sha256:45fbddbd505f3fbfb0b6cb2f1bc5e15e83ab7c79cd6e89416b248cb3c00c11da"}, -] - -[package.dependencies] -jupyter-server = ">=1.1.2" - -[[package]] -name = "jupyter-server" -version = "2.14.2" -description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_server-2.14.2-py3-none-any.whl", hash = "sha256:47ff506127c2f7851a17bf4713434208fc490955d0e8632e95014a9a9afbeefd"}, - {file = "jupyter_server-2.14.2.tar.gz", hash = "sha256:66095021aa9638ced276c248b1d81862e4c50f292d575920bbe960de1c56b12b"}, -] - -[package.dependencies] -anyio = ">=3.1.0" -argon2-cffi = ">=21.1" -jinja2 = ">=3.0.3" -jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -jupyter-events = ">=0.9.0" -jupyter-server-terminals = ">=0.4.4" -nbconvert = ">=6.4.4" -nbformat = ">=5.3.0" -overrides = ">=5.0" -packaging = ">=22.0" -prometheus-client = ">=0.9" -pywinpty = {version = ">=2.0.1", markers = "os_name == \"nt\""} -pyzmq = ">=24" -send2trash = ">=1.8.2" -terminado = ">=0.8.3" -tornado = ">=6.2.0" -traitlets = ">=5.6.0" -websocket-client = ">=1.7" - -[package.extras] -docs = ["ipykernel", "jinja2", "jupyter-client", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] -test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0,<9)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"] - -[[package]] -name = "jupyter-server-terminals" -version = "0.5.3" -description = "A Jupyter Server Extension Providing Terminals." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"}, - {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"}, -] - -[package.dependencies] -pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} -terminado = ">=0.8.3" - -[package.extras] -docs = ["jinja2", "jupyter-server", "mistune (<4.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] -test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] - -[[package]] -name = "jupyterlab" -version = "4.2.5" -description = "JupyterLab computational environment" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab-4.2.5-py3-none-any.whl", hash = "sha256:73b6e0775d41a9fee7ee756c80f58a6bed4040869ccc21411dc559818874d321"}, - {file = "jupyterlab-4.2.5.tar.gz", hash = "sha256:ae7f3a1b8cb88b4f55009ce79fa7c06f99d70cd63601ee4aa91815d054f46f75"}, -] - -[package.dependencies] -async-lru = ">=1.0.0" -httpx = ">=0.25.0" -ipykernel = ">=6.5.0" -jinja2 = ">=3.0.3" -jupyter-core = "*" -jupyter-lsp = ">=2.0.0" -jupyter-server = ">=2.4.0,<3" -jupyterlab-server = ">=2.27.1,<3" -notebook-shim = ">=0.2" -packaging = "*" -setuptools = ">=40.1.0" -tornado = ">=6.2.0" -traitlets = "*" - -[package.extras] -dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.3.5)"] -docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<7.3.0)", "sphinx-copybutton"] -docs-screenshots = ["altair (==5.3.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.2)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.1.post2)", "matplotlib (==3.8.3)", "nbconvert (>=7.0.0)", "pandas (==2.2.1)", "scipy (==1.12.0)", "vega-datasets (==0.9.0)"] -test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] -upgrade-extension = ["copier (>=9,<10)", "jinja2-time (<0.3)", "pydantic (<3.0)", "pyyaml-include (<3.0)", "tomli-w (<2.0)"] - -[[package]] -name = "jupyterlab-pygments" -version = "0.3.0" -description = "Pygments theme using JupyterLab CSS variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, - {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, -] - -[[package]] -name = "jupyterlab-server" -version = "2.27.3" -description = "A set of server components for JupyterLab and JupyterLab like applications." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab_server-2.27.3-py3-none-any.whl", hash = "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4"}, - {file = "jupyterlab_server-2.27.3.tar.gz", hash = "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4"}, -] - -[package.dependencies] -babel = ">=2.10" -jinja2 = ">=3.0.3" -json5 = ">=0.9.0" -jsonschema = ">=4.18.0" -jupyter-server = ">=1.21,<3" -packaging = ">=21.3" -requests = ">=2.31" - -[package.extras] -docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] -openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] -test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0,<8)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] - -[[package]] -name = "jupyterlab-widgets" -version = "3.0.13" -description = "Jupyter interactive widgets for JupyterLab" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jupyterlab_widgets-3.0.13-py3-none-any.whl", hash = "sha256:e3cda2c233ce144192f1e29914ad522b2f4c40e77214b0cc97377ca3d323db54"}, - {file = "jupyterlab_widgets-3.0.13.tar.gz", hash = "sha256:a2966d385328c1942b683a8cd96b89b8dd82c8b8f81dda902bb2bc06d46f5bed"}, -] - -[[package]] -name = "kiwisolver" -version = "1.4.7" -description = "A fast implementation of the Cassowary constraint solver" -optional = false -python-versions = ">=3.8" -files = [ - {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8a9c83f75223d5e48b0bc9cb1bf2776cf01563e00ade8775ffe13b0b6e1af3a6"}, - {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58370b1ffbd35407444d57057b57da5d6549d2d854fa30249771775c63b5fe17"}, - {file = "kiwisolver-1.4.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa0abdf853e09aff551db11fce173e2177d00786c688203f52c87ad7fcd91ef9"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8d53103597a252fb3ab8b5845af04c7a26d5e7ea8122303dd7a021176a87e8b9"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:88f17c5ffa8e9462fb79f62746428dd57b46eb931698e42e990ad63103f35e6c"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a9ca9c710d598fd75ee5de59d5bda2684d9db36a9f50b6125eaea3969c2599"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f4d742cb7af1c28303a51b7a27aaee540e71bb8e24f68c736f6f2ffc82f2bf05"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28c7fea2196bf4c2f8d46a0415c77a1c480cc0724722f23d7410ffe9842c407"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e968b84db54f9d42046cf154e02911e39c0435c9801681e3fc9ce8a3c4130278"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0c18ec74c0472de033e1bebb2911c3c310eef5649133dd0bedf2a169a1b269e5"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8f0ea6da6d393d8b2e187e6a5e3fb81f5862010a40c3945e2c6d12ae45cfb2ad"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:f106407dda69ae456dd1227966bf445b157ccc80ba0dff3802bb63f30b74e895"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:84ec80df401cfee1457063732d90022f93951944b5b58975d34ab56bb150dfb3"}, - {file = "kiwisolver-1.4.7-cp310-cp310-win32.whl", hash = "sha256:71bb308552200fb2c195e35ef05de12f0c878c07fc91c270eb3d6e41698c3bcc"}, - {file = "kiwisolver-1.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:44756f9fd339de0fb6ee4f8c1696cfd19b2422e0d70b4cefc1cc7f1f64045a8c"}, - {file = "kiwisolver-1.4.7-cp310-cp310-win_arm64.whl", hash = "sha256:78a42513018c41c2ffd262eb676442315cbfe3c44eed82385c2ed043bc63210a"}, - {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d2b0e12a42fb4e72d509fc994713d099cbb15ebf1103545e8a45f14da2dfca54"}, - {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2a8781ac3edc42ea4b90bc23e7d37b665d89423818e26eb6df90698aa2287c95"}, - {file = "kiwisolver-1.4.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:46707a10836894b559e04b0fd143e343945c97fd170d69a2d26d640b4e297935"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef97b8df011141c9b0f6caf23b29379f87dd13183c978a30a3c546d2c47314cb"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab58c12a2cd0fc769089e6d38466c46d7f76aced0a1f54c77652446733d2d02"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:803b8e1459341c1bb56d1c5c010406d5edec8a0713a0945851290a7930679b51"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9a9e8a507420fe35992ee9ecb302dab68550dedc0da9e2880dd88071c5fb052"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18077b53dc3bb490e330669a99920c5e6a496889ae8c63b58fbc57c3d7f33a18"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6af936f79086a89b3680a280c47ea90b4df7047b5bdf3aa5c524bbedddb9e545"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3abc5b19d24af4b77d1598a585b8a719beb8569a71568b66f4ebe1fb0449460b"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:933d4de052939d90afbe6e9d5273ae05fb836cc86c15b686edd4b3560cc0ee36"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:65e720d2ab2b53f1f72fb5da5fb477455905ce2c88aaa671ff0a447c2c80e8e3"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3bf1ed55088f214ba6427484c59553123fdd9b218a42bbc8c6496d6754b1e523"}, - {file = "kiwisolver-1.4.7-cp311-cp311-win32.whl", hash = "sha256:4c00336b9dd5ad96d0a558fd18a8b6f711b7449acce4c157e7343ba92dd0cf3d"}, - {file = "kiwisolver-1.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:929e294c1ac1e9f615c62a4e4313ca1823ba37326c164ec720a803287c4c499b"}, - {file = "kiwisolver-1.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:e33e8fbd440c917106b237ef1a2f1449dfbb9b6f6e1ce17c94cd6a1e0d438376"}, - {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:5360cc32706dab3931f738d3079652d20982511f7c0ac5711483e6eab08efff2"}, - {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942216596dc64ddb25adb215c3c783215b23626f8d84e8eff8d6d45c3f29f75a"}, - {file = "kiwisolver-1.4.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:48b571ecd8bae15702e4f22d3ff6a0f13e54d3d00cd25216d5e7f658242065ee"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad42ba922c67c5f219097b28fae965e10045ddf145d2928bfac2eb2e17673640"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:612a10bdae23404a72941a0fc8fa2660c6ea1217c4ce0dbcab8a8f6543ea9e7f"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e838bba3a3bac0fe06d849d29772eb1afb9745a59710762e4ba3f4cb8424483"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:22f499f6157236c19f4bbbd472fa55b063db77a16cd74d49afe28992dff8c258"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693902d433cf585133699972b6d7c42a8b9f8f826ebcaf0132ff55200afc599e"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4e77f2126c3e0b0d055f44513ed349038ac180371ed9b52fe96a32aa071a5107"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:657a05857bda581c3656bfc3b20e353c232e9193eb167766ad2dc58b56504948"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4bfa75a048c056a411f9705856abfc872558e33c055d80af6a380e3658766038"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:34ea1de54beef1c104422d210c47c7d2a4999bdecf42c7b5718fbe59a4cac383"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:90da3b5f694b85231cf93586dad5e90e2d71b9428f9aad96952c99055582f520"}, - {file = "kiwisolver-1.4.7-cp312-cp312-win32.whl", hash = "sha256:18e0cca3e008e17fe9b164b55735a325140a5a35faad8de92dd80265cd5eb80b"}, - {file = "kiwisolver-1.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:58cb20602b18f86f83a5c87d3ee1c766a79c0d452f8def86d925e6c60fbf7bfb"}, - {file = "kiwisolver-1.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:f5a8b53bdc0b3961f8b6125e198617c40aeed638b387913bf1ce78afb1b0be2a"}, - {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2e6039dcbe79a8e0f044f1c39db1986a1b8071051efba3ee4d74f5b365f5226e"}, - {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a1ecf0ac1c518487d9d23b1cd7139a6a65bc460cd101ab01f1be82ecf09794b6"}, - {file = "kiwisolver-1.4.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7ab9ccab2b5bd5702ab0803676a580fffa2aa178c2badc5557a84cc943fcf750"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f816dd2277f8d63d79f9c8473a79fe54047bc0467754962840782c575522224d"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf8bcc23ceb5a1b624572a1623b9f79d2c3b337c8c455405ef231933a10da379"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dea0bf229319828467d7fca8c7c189780aa9ff679c94539eed7532ebe33ed37c"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c06a4c7cf15ec739ce0e5971b26c93638730090add60e183530d70848ebdd34"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:913983ad2deb14e66d83c28b632fd35ba2b825031f2fa4ca29675e665dfecbe1"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5337ec7809bcd0f424c6b705ecf97941c46279cf5ed92311782c7c9c2026f07f"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c26ed10c4f6fa6ddb329a5120ba3b6db349ca192ae211e882970bfc9d91420b"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c619b101e6de2222c1fcb0531e1b17bbffbe54294bfba43ea0d411d428618c27"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:073a36c8273647592ea332e816e75ef8da5c303236ec0167196793eb1e34657a"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3ce6b2b0231bda412463e152fc18335ba32faf4e8c23a754ad50ffa70e4091ee"}, - {file = "kiwisolver-1.4.7-cp313-cp313-win32.whl", hash = "sha256:f4c9aee212bc89d4e13f58be11a56cc8036cabad119259d12ace14b34476fd07"}, - {file = "kiwisolver-1.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:8a3ec5aa8e38fc4c8af308917ce12c536f1c88452ce554027e55b22cbbfbff76"}, - {file = "kiwisolver-1.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:76c8094ac20ec259471ac53e774623eb62e6e1f56cd8690c67ce6ce4fcb05650"}, - {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5d5abf8f8ec1f4e22882273c423e16cae834c36856cac348cfbfa68e01c40f3a"}, - {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aeb3531b196ef6f11776c21674dba836aeea9d5bd1cf630f869e3d90b16cfade"}, - {file = "kiwisolver-1.4.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7d755065e4e866a8086c9bdada157133ff466476a2ad7861828e17b6026e22c"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08471d4d86cbaec61f86b217dd938a83d85e03785f51121e791a6e6689a3be95"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7bbfcb7165ce3d54a3dfbe731e470f65739c4c1f85bb1018ee912bae139e263b"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d34eb8494bea691a1a450141ebb5385e4b69d38bb8403b5146ad279f4b30fa3"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9242795d174daa40105c1d86aba618e8eab7bf96ba8c3ee614da8302a9f95503"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a0f64a48bb81af7450e641e3fe0b0394d7381e342805479178b3d335d60ca7cf"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8e045731a5416357638d1700927529e2b8ab304811671f665b225f8bf8d8f933"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4322872d5772cae7369f8351da1edf255a604ea7087fe295411397d0cfd9655e"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e1631290ee9271dffe3062d2634c3ecac02c83890ada077d225e081aca8aab89"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:edcfc407e4eb17e037bca59be0e85a2031a2ac87e4fed26d3e9df88b4165f92d"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4d05d81ecb47d11e7f8932bd8b61b720bf0b41199358f3f5e36d38e28f0532c5"}, - {file = "kiwisolver-1.4.7-cp38-cp38-win32.whl", hash = "sha256:b38ac83d5f04b15e515fd86f312479d950d05ce2368d5413d46c088dda7de90a"}, - {file = "kiwisolver-1.4.7-cp38-cp38-win_amd64.whl", hash = "sha256:d83db7cde68459fc803052a55ace60bea2bae361fc3b7a6d5da07e11954e4b09"}, - {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f9362ecfca44c863569d3d3c033dbe8ba452ff8eed6f6b5806382741a1334bd"}, - {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8df2eb9b2bac43ef8b082e06f750350fbbaf2887534a5be97f6cf07b19d9583"}, - {file = "kiwisolver-1.4.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f32d6edbc638cde7652bd690c3e728b25332acbadd7cad670cc4a02558d9c417"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e2e6c39bd7b9372b0be21456caab138e8e69cc0fc1190a9dfa92bd45a1e6e904"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dda56c24d869b1193fcc763f1284b9126550eaf84b88bbc7256e15028f19188a"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79849239c39b5e1fd906556c474d9b0439ea6792b637511f3fe3a41158d89ca8"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e3bc157fed2a4c02ec468de4ecd12a6e22818d4f09cde2c31ee3226ffbefab2"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3da53da805b71e41053dc670f9a820d1157aae77b6b944e08024d17bcd51ef88"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8705f17dfeb43139a692298cb6637ee2e59c0194538153e83e9ee0c75c2eddde"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:82a5c2f4b87c26bb1a0ef3d16b5c4753434633b83d365cc0ddf2770c93829e3c"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce8be0466f4c0d585cdb6c1e2ed07232221df101a4c6f28821d2aa754ca2d9e2"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:409afdfe1e2e90e6ee7fc896f3df9a7fec8e793e58bfa0d052c8a82f99c37abb"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5b9c3f4ee0b9a439d2415012bd1b1cc2df59e4d6a9939f4d669241d30b414327"}, - {file = "kiwisolver-1.4.7-cp39-cp39-win32.whl", hash = "sha256:a79ae34384df2b615eefca647a2873842ac3b596418032bef9a7283675962644"}, - {file = "kiwisolver-1.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:cf0438b42121a66a3a667de17e779330fc0f20b0d97d59d2f2121e182b0505e4"}, - {file = "kiwisolver-1.4.7-cp39-cp39-win_arm64.whl", hash = "sha256:764202cc7e70f767dab49e8df52c7455e8de0df5d858fa801a11aa0d882ccf3f"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:94252291e3fe68001b1dd747b4c0b3be12582839b95ad4d1b641924d68fd4643"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b7dfa3b546da08a9f622bb6becdb14b3e24aaa30adba66749d38f3cc7ea9706"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3de6481f4ed8b734da5df134cd5a6a64fe32124fe83dde1e5b5f29fe30b1e6"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a91b5f9f1205845d488c928e8570dcb62b893372f63b8b6e98b863ebd2368ff2"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fa14dbd66b8b8f470d5fc79c089a66185619d31645f9b0773b88b19f7223c4"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:eb542fe7933aa09d8d8f9d9097ef37532a7df6497819d16efe4359890a2f417a"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bfa1acfa0c54932d5607e19a2c24646fb4c1ae2694437789129cf099789a3b00"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:eee3ea935c3d227d49b4eb85660ff631556841f6e567f0f7bda972df6c2c9935"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f3160309af4396e0ed04db259c3ccbfdc3621b5559b5453075e5de555e1f3a1b"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a17f6a29cf8935e587cc8a4dbfc8368c55edc645283db0ce9801016f83526c2d"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10849fb2c1ecbfae45a693c070e0320a91b35dd4bcf58172c023b994283a124d"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ac542bf38a8a4be2dc6b15248d36315ccc65f0743f7b1a76688ffb6b5129a5c2"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8b01aac285f91ca889c800042c35ad3b239e704b150cfd3382adfc9dcc780e39"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:48be928f59a1f5c8207154f935334d374e79f2b5d212826307d072595ad76a2e"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f37cfe618a117e50d8c240555331160d73d0411422b59b5ee217843d7b693608"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599b5c873c63a1f6ed7eead644a8a380cfbdf5db91dcb6f85707aaab213b1674"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:801fa7802e5cfabe3ab0c81a34c323a319b097dfb5004be950482d882f3d7225"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0c6c43471bc764fad4bc99c5c2d6d16a676b1abf844ca7c8702bdae92df01ee0"}, - {file = "kiwisolver-1.4.7.tar.gz", hash = "sha256:9893ff81bd7107f7b685d3017cc6583daadb4fc26e4a888350df530e41980a60"}, -] - -[[package]] -name = "llvmlite" -version = "0.43.0" -description = "lightweight wrapper around basic LLVM functionality" -optional = false -python-versions = ">=3.9" -files = [ - {file = "llvmlite-0.43.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a289af9a1687c6cf463478f0fa8e8aa3b6fb813317b0d70bf1ed0759eab6f761"}, - {file = "llvmlite-0.43.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4fd101f571a31acb1559ae1af30f30b1dc4b3186669f92ad780e17c81e91bc"}, - {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d434ec7e2ce3cc8f452d1cd9a28591745de022f931d67be688a737320dfcead"}, - {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6912a87782acdff6eb8bf01675ed01d60ca1f2551f8176a300a886f09e836a6a"}, - {file = "llvmlite-0.43.0-cp310-cp310-win_amd64.whl", hash = "sha256:14f0e4bf2fd2d9a75a3534111e8ebeb08eda2f33e9bdd6dfa13282afacdde0ed"}, - {file = "llvmlite-0.43.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e8d0618cb9bfe40ac38a9633f2493d4d4e9fcc2f438d39a4e854f39cc0f5f98"}, - {file = "llvmlite-0.43.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0a9a1a39d4bf3517f2af9d23d479b4175ead205c592ceeb8b89af48a327ea57"}, - {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1da416ab53e4f7f3bc8d4eeba36d801cc1894b9fbfbf2022b29b6bad34a7df2"}, - {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977525a1e5f4059316b183fb4fd34fa858c9eade31f165427a3977c95e3ee749"}, - {file = "llvmlite-0.43.0-cp311-cp311-win_amd64.whl", hash = "sha256:d5bd550001d26450bd90777736c69d68c487d17bf371438f975229b2b8241a91"}, - {file = "llvmlite-0.43.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f99b600aa7f65235a5a05d0b9a9f31150c390f31261f2a0ba678e26823ec38f7"}, - {file = "llvmlite-0.43.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:35d80d61d0cda2d767f72de99450766250560399edc309da16937b93d3b676e7"}, - {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eccce86bba940bae0d8d48ed925f21dbb813519169246e2ab292b5092aba121f"}, - {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6509e1507ca0760787a199d19439cc887bfd82226f5af746d6977bd9f66844"}, - {file = "llvmlite-0.43.0-cp312-cp312-win_amd64.whl", hash = "sha256:7a2872ee80dcf6b5dbdc838763d26554c2a18aa833d31a2635bff16aafefb9c9"}, - {file = "llvmlite-0.43.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cd2a7376f7b3367019b664c21f0c61766219faa3b03731113ead75107f3b66c"}, - {file = "llvmlite-0.43.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18e9953c748b105668487b7c81a3e97b046d8abf95c4ddc0cd3c94f4e4651ae8"}, - {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74937acd22dc11b33946b67dca7680e6d103d6e90eeaaaf932603bec6fe7b03a"}, - {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9efc739cc6ed760f795806f67889923f7274276f0eb45092a1473e40d9b867"}, - {file = "llvmlite-0.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:47e147cdda9037f94b399bf03bfd8a6b6b1f2f90be94a454e3386f006455a9b4"}, - {file = "llvmlite-0.43.0.tar.gz", hash = "sha256:ae2b5b5c3ef67354824fb75517c8db5fbe93bc02cd9671f3c62271626bc041d5"}, -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "matplotlib" -version = "3.9.2" -description = "Python plotting package" -optional = false -python-versions = ">=3.9" -files = [ - {file = "matplotlib-3.9.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9d78bbc0cbc891ad55b4f39a48c22182e9bdaea7fc0e5dbd364f49f729ca1bbb"}, - {file = "matplotlib-3.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c375cc72229614632c87355366bdf2570c2dac01ac66b8ad048d2dabadf2d0d4"}, - {file = "matplotlib-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d94ff717eb2bd0b58fe66380bd8b14ac35f48a98e7c6765117fe67fb7684e64"}, - {file = "matplotlib-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab68d50c06938ef28681073327795c5db99bb4666214d2d5f880ed11aeaded66"}, - {file = "matplotlib-3.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:65aacf95b62272d568044531e41de26285d54aec8cb859031f511f84bd8b495a"}, - {file = "matplotlib-3.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:3fd595f34aa8a55b7fc8bf9ebea8aa665a84c82d275190a61118d33fbc82ccae"}, - {file = "matplotlib-3.9.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d8dd059447824eec055e829258ab092b56bb0579fc3164fa09c64f3acd478772"}, - {file = "matplotlib-3.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c797dac8bb9c7a3fd3382b16fe8f215b4cf0f22adccea36f1545a6d7be310b41"}, - {file = "matplotlib-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d719465db13267bcef19ea8954a971db03b9f48b4647e3860e4bc8e6ed86610f"}, - {file = "matplotlib-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8912ef7c2362f7193b5819d17dae8629b34a95c58603d781329712ada83f9447"}, - {file = "matplotlib-3.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7741f26a58a240f43bee74965c4882b6c93df3e7eb3de160126d8c8f53a6ae6e"}, - {file = "matplotlib-3.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:ae82a14dab96fbfad7965403c643cafe6515e386de723e498cf3eeb1e0b70cc7"}, - {file = "matplotlib-3.9.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ac43031375a65c3196bee99f6001e7fa5bdfb00ddf43379d3c0609bdca042df9"}, - {file = "matplotlib-3.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be0fc24a5e4531ae4d8e858a1a548c1fe33b176bb13eff7f9d0d38ce5112a27d"}, - {file = "matplotlib-3.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf81de2926c2db243c9b2cbc3917619a0fc85796c6ba4e58f541df814bbf83c7"}, - {file = "matplotlib-3.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ee45bc4245533111ced13f1f2cace1e7f89d1c793390392a80c139d6cf0e6c"}, - {file = "matplotlib-3.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:306c8dfc73239f0e72ac50e5a9cf19cc4e8e331dd0c54f5e69ca8758550f1e1e"}, - {file = "matplotlib-3.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:5413401594cfaff0052f9d8b1aafc6d305b4bd7c4331dccd18f561ff7e1d3bd3"}, - {file = "matplotlib-3.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:18128cc08f0d3cfff10b76baa2f296fc28c4607368a8402de61bb3f2eb33c7d9"}, - {file = "matplotlib-3.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4876d7d40219e8ae8bb70f9263bcbe5714415acfdf781086601211335e24f8aa"}, - {file = "matplotlib-3.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d9f07a80deab4bb0b82858a9e9ad53d1382fd122be8cde11080f4e7dfedb38b"}, - {file = "matplotlib-3.9.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7c0410f181a531ec4e93bbc27692f2c71a15c2da16766f5ba9761e7ae518413"}, - {file = "matplotlib-3.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:909645cce2dc28b735674ce0931a4ac94e12f5b13f6bb0b5a5e65e7cea2c192b"}, - {file = "matplotlib-3.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:f32c7410c7f246838a77d6d1eff0c0f87f3cb0e7c4247aebea71a6d5a68cab49"}, - {file = "matplotlib-3.9.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:37e51dd1c2db16ede9cfd7b5cabdfc818b2c6397c83f8b10e0e797501c963a03"}, - {file = "matplotlib-3.9.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b82c5045cebcecd8496a4d694d43f9cc84aeeb49fe2133e036b207abe73f4d30"}, - {file = "matplotlib-3.9.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f053c40f94bc51bc03832a41b4f153d83f2062d88c72b5e79997072594e97e51"}, - {file = "matplotlib-3.9.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbe196377a8248972f5cede786d4c5508ed5f5ca4a1e09b44bda889958b33f8c"}, - {file = "matplotlib-3.9.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5816b1e1fe8c192cbc013f8f3e3368ac56fbecf02fb41b8f8559303f24c5015e"}, - {file = "matplotlib-3.9.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:cef2a73d06601437be399908cf13aee74e86932a5ccc6ccdf173408ebc5f6bb2"}, - {file = "matplotlib-3.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e0830e188029c14e891fadd99702fd90d317df294c3298aad682739c5533721a"}, - {file = "matplotlib-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ba9c1299c920964e8d3857ba27173b4dbb51ca4bab47ffc2c2ba0eb5e2cbc5"}, - {file = "matplotlib-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd93b91ab47a3616b4d3c42b52f8363b88ca021e340804c6ab2536344fad9ca"}, - {file = "matplotlib-3.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6d1ce5ed2aefcdce11904fc5bbea7d9c21fff3d5f543841edf3dea84451a09ea"}, - {file = "matplotlib-3.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:b2696efdc08648536efd4e1601b5fd491fd47f4db97a5fbfd175549a7365c1b2"}, - {file = "matplotlib-3.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d52a3b618cb1cbb769ce2ee1dcdb333c3ab6e823944e9a2d36e37253815f9556"}, - {file = "matplotlib-3.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:039082812cacd6c6bec8e17a9c1e6baca230d4116d522e81e1f63a74d01d2e21"}, - {file = "matplotlib-3.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6758baae2ed64f2331d4fd19be38b7b4eae3ecec210049a26b6a4f3ae1c85dcc"}, - {file = "matplotlib-3.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:050598c2b29e0b9832cde72bcf97627bf00262adbc4a54e2b856426bb2ef0697"}, - {file = "matplotlib-3.9.2.tar.gz", hash = "sha256:96ab43906269ca64a6366934106fa01534454a69e471b7bf3d79083981aaab92"}, -] - -[package.dependencies] -contourpy = ">=1.0.1" -cycler = ">=0.10" -fonttools = ">=4.22.0" -kiwisolver = ">=1.3.1" -numpy = ">=1.23" -packaging = ">=20.0" -pillow = ">=8" -pyparsing = ">=2.3.1" -python-dateutil = ">=2.7" - -[package.extras] -dev = ["meson-python (>=0.13.1)", "numpy (>=1.25)", "pybind11 (>=2.6)", "setuptools (>=64)", "setuptools_scm (>=7)"] - -[[package]] -name = "matplotlib-inline" -version = "0.1.7" -description = "Inline Matplotlib backend for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, - {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, -] - -[package.dependencies] -traitlets = "*" - -[[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = "*" -files = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "mistune" -version = "3.0.2" -description = "A sane and fast Markdown parser with useful plugins and renderers" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, - {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, -] - -[[package]] -name = "moto" -version = "3.1.19" -description = "A library that allows your python tests to easily mock out the boto library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "moto-3.1.19-py3-none-any.whl", hash = "sha256:de3cd86cba6c78c61d51d16f04807584a15a7577f656788cbf68a43ebf1a8927"}, - {file = "moto-3.1.19.tar.gz", hash = "sha256:b16b95a9fb434d6f360b8cd20a8eee2e8b129b6715d15c283af1b97ee5a7c210"}, -] - -[package.dependencies] -boto3 = ">=1.9.201" -botocore = ">=1.12.201" -cryptography = ">=3.3.1" -Jinja2 = ">=2.10.1" -MarkupSafe = "!=2.0.0a1" -python-dateutil = ">=2.1,<3.0.0" -pytz = "*" -requests = ">=2.5" -responses = ">=0.9.0" -werkzeug = ">=0.5,<2.2.0" -xmltodict = "*" - -[package.extras] -all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.4.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "idna (>=2.5,<4)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.2.8)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] -apigatewayv2 = ["PyYAML (>=5.1)"] -appsync = ["graphql-core"] -awslambda = ["docker (>=2.5.1)"] -batch = ["docker (>=2.5.1)"] -cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.4.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "idna (>=2.5,<4)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] -ds = ["sshpubkeys (>=3.1.0)"] -dynamodb = ["docker (>=2.5.1)"] -dynamodb2 = ["docker (>=2.5.1)"] -dynamodbstreams = ["docker (>=2.5.1)"] -ebs = ["sshpubkeys (>=3.1.0)"] -ec2 = ["sshpubkeys (>=3.1.0)"] -efs = ["sshpubkeys (>=3.1.0)"] -glue = ["pyparsing (>=3.0.7)"] -iotdata = ["jsondiff (>=1.1.2)"] -route53resolver = ["sshpubkeys (>=3.1.0)"] -s3 = ["PyYAML (>=5.1)"] -server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.4.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "flask (<2.2.0)", "flask-cors", "graphql-core", "idna (>=2.5,<4)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -ssm = ["PyYAML (>=5.1)", "dataclasses"] -xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] - -[[package]] -name = "mpmath" -version = "1.3.0" -description = "Python library for arbitrary-precision floating-point arithmetic" -optional = false -python-versions = "*" -files = [ - {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, - {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, -] - -[package.extras] -develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] -docs = ["sphinx"] -gmpy = ["gmpy2 (>=2.1.0a4)"] -tests = ["pytest (>=4.6)"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "narwhals" -version = "1.8.2" -description = "Extremely lightweight compatibility layer between dataframe libraries" -optional = false -python-versions = ">=3.8" -files = [ - {file = "narwhals-1.8.2-py3-none-any.whl", hash = "sha256:08554e9e46411b9dba0deffe8510b948baad603270697414aebd2e5694e35a52"}, - {file = "narwhals-1.8.2.tar.gz", hash = "sha256:82bc3c630dd48c30dd87e3762e785bd0ae07a6b9039fff8a8004bb452f8c7db6"}, -] - -[package.extras] -cudf = ["cudf (>=23.08.00)"] -dask = ["dask[dataframe] (>=2024.7)"] -modin = ["modin"] -pandas = ["pandas (>=0.25.3)"] -polars = ["polars (>=0.20.3)"] -pyarrow = ["pyarrow (>=11.0.0)"] - -[[package]] -name = "nbclient" -version = "0.10.0" -description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"}, - {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"}, -] - -[package.dependencies] -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -nbformat = ">=5.1" -traitlets = ">=5.4" - -[package.extras] -dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] -test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] - -[[package]] -name = "nbconvert" -version = "7.16.4" -description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." -optional = false -python-versions = ">=3.8" -files = [ - {file = "nbconvert-7.16.4-py3-none-any.whl", hash = "sha256:05873c620fe520b6322bf8a5ad562692343fe3452abda5765c7a34b7d1aa3eb3"}, - {file = "nbconvert-7.16.4.tar.gz", hash = "sha256:86ca91ba266b0a448dc96fa6c5b9d98affabde2867b363258703536807f9f7f4"}, -] - -[package.dependencies] -beautifulsoup4 = "*" -bleach = "!=5.0.0" -defusedxml = "*" -jinja2 = ">=3.0" -jupyter-core = ">=4.7" -jupyterlab-pygments = "*" -markupsafe = ">=2.0" -mistune = ">=2.0.3,<4" -nbclient = ">=0.5.0" -nbformat = ">=5.7" -packaging = "*" -pandocfilters = ">=1.4.1" -pygments = ">=2.4.1" -tinycss2 = "*" -traitlets = ">=5.1" - -[package.extras] -all = ["flaky", "ipykernel", "ipython", "ipywidgets (>=7.5)", "myst-parser", "nbsphinx (>=0.2.12)", "playwright", "pydata-sphinx-theme", "pyqtwebengine (>=5.15)", "pytest (>=7)", "sphinx (==5.0.2)", "sphinxcontrib-spelling", "tornado (>=6.1)"] -docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] -qtpdf = ["pyqtwebengine (>=5.15)"] -qtpng = ["pyqtwebengine (>=5.15)"] -serve = ["tornado (>=6.1)"] -test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest (>=7)"] -webpdf = ["playwright"] - -[[package]] -name = "nbformat" -version = "5.10.4" -description = "The Jupyter Notebook format" -optional = false -python-versions = ">=3.8" -files = [ - {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, - {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, -] - -[package.dependencies] -fastjsonschema = ">=2.15" -jsonschema = ">=2.6" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -traitlets = ">=5.1" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["pep440", "pre-commit", "pytest", "testpath"] - -[[package]] -name = "nest-asyncio" -version = "1.6.0" -description = "Patch asyncio to allow nested event loops" -optional = false -python-versions = ">=3.5" -files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, -] - -[[package]] -name = "networkx" -version = "3.3" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.10" -files = [ - {file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"}, - {file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"}, -] - -[package.extras] -default = ["matplotlib (>=3.6)", "numpy (>=1.23)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"] -developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] -doc = ["myst-nb (>=1.0)", "numpydoc (>=1.7)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=2.0)", "pygraphviz (>=1.12)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] - -[[package]] -name = "nltk" -version = "3.9.1" -description = "Natural Language Toolkit" -optional = false -python-versions = ">=3.8" -files = [ - {file = "nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1"}, - {file = "nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868"}, -] - -[package.dependencies] -click = "*" -joblib = "*" -regex = ">=2021.8.3" -tqdm = "*" - -[package.extras] -all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] -corenlp = ["requests"] -machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] -plot = ["matplotlib"] -tgrep = ["pyparsing"] -twitter = ["twython"] - -[[package]] -name = "nodeenv" -version = "1.9.1" -description = "Node.js virtual environment builder" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, - {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, -] - -[[package]] -name = "notebook" -version = "7.2.2" -description = "Jupyter Notebook - A web-based notebook environment for interactive computing" -optional = false -python-versions = ">=3.8" -files = [ - {file = "notebook-7.2.2-py3-none-any.whl", hash = "sha256:c89264081f671bc02eec0ed470a627ed791b9156cad9285226b31611d3e9fe1c"}, - {file = "notebook-7.2.2.tar.gz", hash = "sha256:2ef07d4220421623ad3fe88118d687bc0450055570cdd160814a59cf3a1c516e"}, -] - -[package.dependencies] -jupyter-server = ">=2.4.0,<3" -jupyterlab = ">=4.2.0,<4.3" -jupyterlab-server = ">=2.27.1,<3" -notebook-shim = ">=0.2,<0.3" -tornado = ">=6.2.0" - -[package.extras] -dev = ["hatch", "pre-commit"] -docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.27.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] - -[[package]] -name = "notebook-shim" -version = "0.2.4" -description = "A shim layer for notebook traits and config" -optional = false -python-versions = ">=3.7" -files = [ - {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"}, - {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"}, -] - -[package.dependencies] -jupyter-server = ">=1.8,<3" - -[package.extras] -test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] - -[[package]] -name = "numba" -version = "0.60.0" -description = "compiling Python code using LLVM" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numba-0.60.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d761de835cd38fb400d2c26bb103a2726f548dc30368853121d66201672e651"}, - {file = "numba-0.60.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:159e618ef213fba758837f9837fb402bbe65326e60ba0633dbe6c7f274d42c1b"}, - {file = "numba-0.60.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1527dc578b95c7c4ff248792ec33d097ba6bef9eda466c948b68dfc995c25781"}, - {file = "numba-0.60.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe0b28abb8d70f8160798f4de9d486143200f34458d34c4a214114e445d7124e"}, - {file = "numba-0.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:19407ced081d7e2e4b8d8c36aa57b7452e0283871c296e12d798852bc7d7f198"}, - {file = "numba-0.60.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a17b70fc9e380ee29c42717e8cc0bfaa5556c416d94f9aa96ba13acb41bdece8"}, - {file = "numba-0.60.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fb02b344a2a80efa6f677aa5c40cd5dd452e1b35f8d1c2af0dfd9ada9978e4b"}, - {file = "numba-0.60.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f4fde652ea604ea3c86508a3fb31556a6157b2c76c8b51b1d45eb40c8598703"}, - {file = "numba-0.60.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4142d7ac0210cc86432b818338a2bc368dc773a2f5cf1e32ff7c5b378bd63ee8"}, - {file = "numba-0.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:cac02c041e9b5bc8cf8f2034ff6f0dbafccd1ae9590dc146b3a02a45e53af4e2"}, - {file = "numba-0.60.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7da4098db31182fc5ffe4bc42c6f24cd7d1cb8a14b59fd755bfee32e34b8404"}, - {file = "numba-0.60.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38d6ea4c1f56417076ecf8fc327c831ae793282e0ff51080c5094cb726507b1c"}, - {file = "numba-0.60.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:62908d29fb6a3229c242e981ca27e32a6e606cc253fc9e8faeb0e48760de241e"}, - {file = "numba-0.60.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0ebaa91538e996f708f1ab30ef4d3ddc344b64b5227b67a57aa74f401bb68b9d"}, - {file = "numba-0.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:f75262e8fe7fa96db1dca93d53a194a38c46da28b112b8a4aca168f0df860347"}, - {file = "numba-0.60.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:01ef4cd7d83abe087d644eaa3d95831b777aa21d441a23703d649e06b8e06b74"}, - {file = "numba-0.60.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:819a3dfd4630d95fd574036f99e47212a1af41cbcb019bf8afac63ff56834449"}, - {file = "numba-0.60.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b983bd6ad82fe868493012487f34eae8bf7dd94654951404114f23c3466d34b"}, - {file = "numba-0.60.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c151748cd269ddeab66334bd754817ffc0cabd9433acb0f551697e5151917d25"}, - {file = "numba-0.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:3031547a015710140e8c87226b4cfe927cac199835e5bf7d4fe5cb64e814e3ab"}, - {file = "numba-0.60.0.tar.gz", hash = "sha256:5df6158e5584eece5fc83294b949fd30b9f1125df7708862205217e068aabf16"}, -] - -[package.dependencies] -llvmlite = "==0.43.*" -numpy = ">=1.22,<2.1" - -[[package]] -name = "numpy" -version = "2.0.2" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"}, - {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"}, - {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"}, - {file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"}, - {file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"}, - {file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"}, - {file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"}, - {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"}, - {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"}, - {file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"}, - {file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"}, - {file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"}, - {file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"}, - {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"}, - {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"}, - {file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"}, - {file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"}, - {file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"}, - {file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"}, - {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"}, - {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"}, - {file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"}, - {file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"}, - {file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"}, - {file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}, - {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, -] - -[[package]] -name = "nvidia-cublas-cu12" -version = "12.1.3.1" -description = "CUBLAS native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728"}, - {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-win_amd64.whl", hash = "sha256:2b964d60e8cf11b5e1073d179d85fa340c120e99b3067558f3cf98dd69d02906"}, -] - -[[package]] -name = "nvidia-cuda-cupti-cu12" -version = "12.1.105" -description = "CUDA profiling tools runtime libs." -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e"}, - {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:bea8236d13a0ac7190bd2919c3e8e6ce1e402104276e6f9694479e48bb0eb2a4"}, -] - -[[package]] -name = "nvidia-cuda-nvrtc-cu12" -version = "12.1.105" -description = "NVRTC native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2"}, - {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:0a98a522d9ff138b96c010a65e145dc1b4850e9ecb75a0172371793752fd46ed"}, -] - -[[package]] -name = "nvidia-cuda-runtime-cu12" -version = "12.1.105" -description = "CUDA Runtime native Libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40"}, - {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:dfb46ef84d73fababab44cf03e3b83f80700d27ca300e537f85f636fac474344"}, -] - -[[package]] -name = "nvidia-cudnn-cu12" -version = "9.1.0.70" -description = "cuDNN runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f"}, - {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-win_amd64.whl", hash = "sha256:6278562929433d68365a07a4a1546c237ba2849852c0d4b2262a486e805b977a"}, -] - -[package.dependencies] -nvidia-cublas-cu12 = "*" - -[[package]] -name = "nvidia-cufft-cu12" -version = "11.0.2.54" -description = "CUFFT native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl", hash = "sha256:794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56"}, - {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-win_amd64.whl", hash = "sha256:d9ac353f78ff89951da4af698f80870b1534ed69993f10a4cf1d96f21357e253"}, -] - -[[package]] -name = "nvidia-curand-cu12" -version = "10.3.2.106" -description = "CURAND native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0"}, - {file = "nvidia_curand_cu12-10.3.2.106-py3-none-win_amd64.whl", hash = "sha256:75b6b0c574c0037839121317e17fd01f8a69fd2ef8e25853d826fec30bdba74a"}, -] - -[[package]] -name = "nvidia-cusolver-cu12" -version = "11.4.5.107" -description = "CUDA solver native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd"}, - {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-win_amd64.whl", hash = "sha256:74e0c3a24c78612192a74fcd90dd117f1cf21dea4822e66d89e8ea80e3cd2da5"}, -] - -[package.dependencies] -nvidia-cublas-cu12 = "*" -nvidia-cusparse-cu12 = "*" -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-cusparse-cu12" -version = "12.1.0.106" -description = "CUSPARSE native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c"}, - {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-win_amd64.whl", hash = "sha256:b798237e81b9719373e8fae8d4f091b70a0cf09d9d85c95a557e11df2d8e9a5a"}, -] - -[package.dependencies] -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-nccl-cu12" -version = "2.20.5" -description = "NVIDIA Collective Communication Library (NCCL) Runtime" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1fc150d5c3250b170b29410ba682384b14581db722b2531b0d8d33c595f33d01"}, - {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56"}, -] - -[[package]] -name = "nvidia-nvjitlink-cu12" -version = "12.6.68" -description = "Nvidia JIT LTO Library" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_nvjitlink_cu12-12.6.68-py3-none-manylinux2014_aarch64.whl", hash = "sha256:b3fd0779845f68b92063ab1393abab1ed0a23412fc520df79a8190d098b5cd6b"}, - {file = "nvidia_nvjitlink_cu12-12.6.68-py3-none-manylinux2014_x86_64.whl", hash = "sha256:125a6c2a44e96386dda634e13d944e60b07a0402d391a070e8fb4104b34ea1ab"}, - {file = "nvidia_nvjitlink_cu12-12.6.68-py3-none-win_amd64.whl", hash = "sha256:a55744c98d70317c5e23db14866a8cc2b733f7324509e941fc96276f9f37801d"}, -] - -[[package]] -name = "nvidia-nvtx-cu12" -version = "12.1.105" -description = "NVIDIA Tools Extension" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5"}, - {file = "nvidia_nvtx_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:65f4d98982b31b60026e0e6de73fbdfc09d08a96f4656dd3665ca616a11e1e82"}, -] - -[[package]] -name = "openpyxl" -version = "3.1.5" -description = "A Python library to read/write Excel 2010 xlsx/xlsm files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"}, - {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"}, -] - -[package.dependencies] -et-xmlfile = "*" - -[[package]] -name = "overrides" -version = "7.7.0" -description = "A decorator to automatically detect mismatch when overriding a method." -optional = false -python-versions = ">=3.6" -files = [ - {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, - {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, -] - -[[package]] -name = "packaging" -version = "24.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, -] - -[[package]] -name = "pandas" -version = "2.2.3" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, - {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, - {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, - {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, - {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, - {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, - {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, - {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, - {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, - {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, - {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, - {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, - {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, - {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.23.2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, -] -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -pyarrow = ["pyarrow (>=10.0.1)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] - -[[package]] -name = "pandocfilters" -version = "1.5.1" -description = "Utilities for writing pandoc filters in python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, - {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, -] - -[[package]] -name = "parso" -version = "0.8.4" -description = "A Python Parser" -optional = false -python-versions = ">=3.6" -files = [ - {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, - {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, -] - -[package.extras] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["docopt", "pytest"] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "pbr" -version = "6.1.0" -description = "Python Build Reasonableness" -optional = false -python-versions = ">=2.6" -files = [ - {file = "pbr-6.1.0-py2.py3-none-any.whl", hash = "sha256:a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a"}, - {file = "pbr-6.1.0.tar.gz", hash = "sha256:788183e382e3d1d7707db08978239965e8b9e4e5ed42669bf4758186734d5f24"}, -] - -[[package]] -name = "pexpect" -version = "4.9.0" -description = "Pexpect allows easy control of interactive console applications." -optional = false -python-versions = "*" -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, -] - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "pillow" -version = "10.4.0" -description = "Python Imaging Library (Fork)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, - {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, - {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, - {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, - {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, - {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, - {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, - {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, - {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, - {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, - {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, - {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, - {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, - {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, - {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, - {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, - {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, - {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, - {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, - {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, -] - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] -fpx = ["olefile"] -mic = ["olefile"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] -typing = ["typing-extensions"] -xmp = ["defusedxml"] - -[[package]] -name = "platformdirs" -version = "4.3.6" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, - {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.11.2)"] - -[[package]] -name = "plotly" -version = "5.24.1" -description = "An open-source, interactive data visualization library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "plotly-5.24.1-py3-none-any.whl", hash = "sha256:f67073a1e637eb0dc3e46324d9d51e2fe76e9727c892dde64ddf1e1b51f29089"}, - {file = "plotly-5.24.1.tar.gz", hash = "sha256:dbc8ac8339d248a4bcc36e08a5659bacfe1b079390b8953533f4eb22169b4bae"}, -] - -[package.dependencies] -packaging = "*" -tenacity = ">=6.2.0" - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pre-commit" -version = "2.21.0" -description = "A framework for managing and maintaining multi-language pre-commit hooks." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, - {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, -] - -[package.dependencies] -cfgv = ">=2.0.0" -identify = ">=1.0.0" -nodeenv = ">=0.11.1" -pyyaml = ">=5.1" -virtualenv = ">=20.10.0" - -[[package]] -name = "prometheus-client" -version = "0.21.0" -description = "Python client for the Prometheus monitoring system." -optional = false -python-versions = ">=3.8" -files = [ - {file = "prometheus_client-0.21.0-py3-none-any.whl", hash = "sha256:4fa6b4dd0ac16d58bb587c04b1caae65b8c5043e85f778f42f5f632f6af2e166"}, - {file = "prometheus_client-0.21.0.tar.gz", hash = "sha256:96c83c606b71ff2b0a433c98889d275f51ffec6c5e267de37c7a2b5c9aa9233e"}, -] - -[package.extras] -twisted = ["twisted"] - -[[package]] -name = "prompt-toolkit" -version = "3.0.47" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, - {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, -] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "protobuf" -version = "5.28.2" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-5.28.2-cp310-abi3-win32.whl", hash = "sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d"}, - {file = "protobuf-5.28.2-cp310-abi3-win_amd64.whl", hash = "sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132"}, - {file = "protobuf-5.28.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7"}, - {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f"}, - {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f"}, - {file = "protobuf-5.28.2-cp38-cp38-win32.whl", hash = "sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0"}, - {file = "protobuf-5.28.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3"}, - {file = "protobuf-5.28.2-cp39-cp39-win32.whl", hash = "sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36"}, - {file = "protobuf-5.28.2-cp39-cp39-win_amd64.whl", hash = "sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276"}, - {file = "protobuf-5.28.2-py3-none-any.whl", hash = "sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece"}, - {file = "protobuf-5.28.2.tar.gz", hash = "sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0"}, -] - -[[package]] -name = "psutil" -version = "6.0.0" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, - {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, - {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, - {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, - {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, - {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, - {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, - {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, - {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, - {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -optional = false -python-versions = "*" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pure-eval" -version = "0.2.3" -description = "Safely evaluate AST nodes without side effects" -optional = false -python-versions = "*" -files = [ - {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, - {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, -] - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "pyarrow" -version = "17.0.0" -description = "Python library for Apache Arrow" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07"}, - {file = "pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1e060b3876faa11cee287839f9cc7cdc00649f475714b8680a05fd9071d545"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c06d4624c0ad6674364bb46ef38c3132768139ddec1c56582dbac54f2663e2"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fa3c246cc58cb5a4a5cb407a18f193354ea47dd0648194e6265bd24177982fe8"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f7ae2de664e0b158d1607699a16a488de3d008ba99b3a7aa5de1cbc13574d047"}, - {file = "pyarrow-17.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5984f416552eea15fd9cee03da53542bf4cddaef5afecefb9aa8d1010c335087"}, - {file = "pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977"}, - {file = "pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4"}, - {file = "pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03"}, - {file = "pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22"}, - {file = "pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b"}, - {file = "pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7"}, - {file = "pyarrow-17.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:af5ff82a04b2171415f1410cff7ebb79861afc5dae50be73ce06d6e870615204"}, - {file = "pyarrow-17.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:edca18eaca89cd6382dfbcff3dd2d87633433043650c07375d095cd3517561d8"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c7916bff914ac5d4a8fe25b7a25e432ff921e72f6f2b7547d1e325c1ad9d155"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f553ca691b9e94b202ff741bdd40f6ccb70cdd5fbf65c187af132f1317de6145"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0cdb0e627c86c373205a2f94a510ac4376fdc523f8bb36beab2e7f204416163c"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d7d192305d9d8bc9082d10f361fc70a73590a4c65cf31c3e6926cd72b76bc35c"}, - {file = "pyarrow-17.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:02dae06ce212d8b3244dd3e7d12d9c4d3046945a5933d28026598e9dbbda1fca"}, - {file = "pyarrow-17.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:13d7a460b412f31e4c0efa1148e1d29bdf18ad1411eb6757d38f8fbdcc8645fb"}, - {file = "pyarrow-17.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b564a51fbccfab5a04a80453e5ac6c9954a9c5ef2890d1bcf63741909c3f8df"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32503827abbc5aadedfa235f5ece8c4f8f8b0a3cf01066bc8d29de7539532687"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a155acc7f154b9ffcc85497509bcd0d43efb80d6f733b0dc3bb14e281f131c8b"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:dec8d129254d0188a49f8a1fc99e0560dc1b85f60af729f47de4046015f9b0a5"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a48ddf5c3c6a6c505904545c25a4ae13646ae1f8ba703c4df4a1bfe4f4006bda"}, - {file = "pyarrow-17.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:42bf93249a083aca230ba7e2786c5f673507fa97bbd9725a1e2754715151a204"}, - {file = "pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28"}, -] - -[package.dependencies] -numpy = ">=1.16.6" - -[package.extras] -test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] - -[[package]] -name = "pycodestyle" -version = "2.8.0" -description = "Python style guide checker" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, - {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, -] - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - -[[package]] -name = "pydeck" -version = "0.9.1" -description = "Widget for deck.gl maps" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydeck-0.9.1-py2.py3-none-any.whl", hash = "sha256:b3f75ba0d273fc917094fa61224f3f6076ca8752b93d46faf3bcfd9f9d59b038"}, - {file = "pydeck-0.9.1.tar.gz", hash = "sha256:f74475ae637951d63f2ee58326757f8d4f9cd9f2a457cf42950715003e2cb605"}, -] - -[package.dependencies] -jinja2 = ">=2.10.1" -numpy = ">=1.16.4" - -[package.extras] -carto = ["pydeck-carto"] -jupyter = ["ipykernel (>=5.1.2)", "ipython (>=5.8.0)", "ipywidgets (>=7,<8)", "traitlets (>=4.3.2)"] - -[[package]] -name = "pydocstyle" -version = "6.3.0" -description = "Python docstring style checker" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, - {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, -] - -[package.dependencies] -snowballstemmer = ">=2.2.0" - -[package.extras] -toml = ["tomli (>=1.2.3)"] - -[[package]] -name = "pyflakes" -version = "2.4.0" -description = "passive checker of Python programs" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, - {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, -] - -[[package]] -name = "pygments" -version = "2.18.0" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pynndescent" -version = "0.5.13" -description = "Nearest Neighbor Descent" -optional = false -python-versions = "*" -files = [ - {file = "pynndescent-0.5.13-py3-none-any.whl", hash = "sha256:69aabb8f394bc631b6ac475a1c7f3994c54adf3f51cd63b2730fefba5771b949"}, - {file = "pynndescent-0.5.13.tar.gz", hash = "sha256:d74254c0ee0a1eeec84597d5fe89fedcf778593eeabe32c2f97412934a9800fb"}, -] - -[package.dependencies] -joblib = ">=0.11" -llvmlite = ">=0.30" -numba = ">=0.51.2" -scikit-learn = ">=0.18" -scipy = ">=1.0" - -[[package]] -name = "pyparsing" -version = "3.1.4" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -optional = false -python-versions = ">=3.6.8" -files = [ - {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, - {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, -] - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pyproject-flake8" -version = "0.0.1a5" -description = "pyproject-flake8 (`pflake8`), a monkey patching wrapper to connect flake8 with pyproject.toml configuration" -optional = false -python-versions = "*" -files = [ - {file = "pyproject-flake8-0.0.1a5.tar.gz", hash = "sha256:22542080ba90d4bd80ee060852db15a24aeea61c9a29ed7c16f5b59b0e47a03a"}, - {file = "pyproject_flake8-0.0.1a5-py2.py3-none-any.whl", hash = "sha256:c843d760c49d7b270e9abda58a57765c031918a9d10da25aa43572f5d77cac43"}, -] - -[package.dependencies] -flake8 = "<5.0.0" - -[[package]] -name = "pytest" -version = "7.4.4" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-cov" -version = "4.1.0" -description = "Pytest plugin for measuring coverage." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "1.0.1" -description = "Read key-value pairs from a .env file and set them as environment variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, -] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "python-json-logger" -version = "2.0.7" -description = "A python library adding a json log formatter" -optional = false -python-versions = ">=3.6" -files = [ - {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, - {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, -] - -[[package]] -name = "pytz" -version = "2024.2" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, -] - -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, -] - -[[package]] -name = "pywinpty" -version = "2.0.13" -description = "Pseudo terminal support for Windows from Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pywinpty-2.0.13-cp310-none-win_amd64.whl", hash = "sha256:697bff211fb5a6508fee2dc6ff174ce03f34a9a233df9d8b5fe9c8ce4d5eaf56"}, - {file = "pywinpty-2.0.13-cp311-none-win_amd64.whl", hash = "sha256:b96fb14698db1284db84ca38c79f15b4cfdc3172065b5137383910567591fa99"}, - {file = "pywinpty-2.0.13-cp312-none-win_amd64.whl", hash = "sha256:2fd876b82ca750bb1333236ce98488c1be96b08f4f7647cfdf4129dfad83c2d4"}, - {file = "pywinpty-2.0.13-cp38-none-win_amd64.whl", hash = "sha256:61d420c2116c0212808d31625611b51caf621fe67f8a6377e2e8b617ea1c1f7d"}, - {file = "pywinpty-2.0.13-cp39-none-win_amd64.whl", hash = "sha256:71cb613a9ee24174730ac7ae439fd179ca34ccb8c5349e8d7b72ab5dea2c6f4b"}, - {file = "pywinpty-2.0.13.tar.gz", hash = "sha256:c34e32351a3313ddd0d7da23d27f835c860d32fe4ac814d372a3ea9594f41dde"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, -] - -[[package]] -name = "pyzmq" -version = "26.2.0" -description = "Python bindings for 0MQ" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629"}, - {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b"}, - {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764"}, - {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c"}, - {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a"}, - {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88"}, - {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f"}, - {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282"}, - {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea"}, - {file = "pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2"}, - {file = "pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971"}, - {file = "pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa"}, - {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218"}, - {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4"}, - {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef"}, - {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317"}, - {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf"}, - {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e"}, - {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37"}, - {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3"}, - {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6"}, - {file = "pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4"}, - {file = "pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5"}, - {file = "pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003"}, - {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9"}, - {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52"}, - {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08"}, - {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5"}, - {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae"}, - {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711"}, - {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6"}, - {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3"}, - {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b"}, - {file = "pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7"}, - {file = "pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a"}, - {file = "pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b"}, - {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726"}, - {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3"}, - {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50"}, - {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb"}, - {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187"}, - {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b"}, - {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18"}, - {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115"}, - {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e"}, - {file = "pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5"}, - {file = "pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad"}, - {file = "pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797"}, - {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a"}, - {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc"}, - {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5"}, - {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672"}, - {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797"}, - {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386"}, - {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306"}, - {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6"}, - {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0"}, - {file = "pyzmq-26.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b55a4229ce5da9497dd0452b914556ae58e96a4381bb6f59f1305dfd7e53fc8"}, - {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cb3a6460cdea8fe8194a76de8895707e61ded10ad0be97188cc8463ffa7e3a8"}, - {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ab5cad923cc95c87bffee098a27856c859bd5d0af31bd346035aa816b081fe1"}, - {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ed69074a610fad1c2fda66180e7b2edd4d31c53f2d1872bc2d1211563904cd9"}, - {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cccba051221b916a4f5e538997c45d7d136a5646442b1231b916d0164067ea27"}, - {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0eaa83fc4c1e271c24eaf8fb083cbccef8fde77ec8cd45f3c35a9a123e6da097"}, - {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9edda2df81daa129b25a39b86cb57dfdfe16f7ec15b42b19bfac503360d27a93"}, - {file = "pyzmq-26.2.0-cp37-cp37m-win32.whl", hash = "sha256:ea0eb6af8a17fa272f7b98d7bebfab7836a0d62738e16ba380f440fceca2d951"}, - {file = "pyzmq-26.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4ff9dc6bc1664bb9eec25cd17506ef6672d506115095411e237d571e92a58231"}, - {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2eb7735ee73ca1b0d71e0e67c3739c689067f055c764f73aac4cc8ecf958ee3f"}, - {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a534f43bc738181aa7cbbaf48e3eca62c76453a40a746ab95d4b27b1111a7d2"}, - {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:aedd5dd8692635813368e558a05266b995d3d020b23e49581ddd5bbe197a8ab6"}, - {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8be4700cd8bb02cc454f630dcdf7cfa99de96788b80c51b60fe2fe1dac480289"}, - {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fcc03fa4997c447dce58264e93b5aa2d57714fbe0f06c07b7785ae131512732"}, - {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:402b190912935d3db15b03e8f7485812db350d271b284ded2b80d2e5704be780"}, - {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8685fa9c25ff00f550c1fec650430c4b71e4e48e8d852f7ddcf2e48308038640"}, - {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:76589c020680778f06b7e0b193f4b6dd66d470234a16e1df90329f5e14a171cd"}, - {file = "pyzmq-26.2.0-cp38-cp38-win32.whl", hash = "sha256:8423c1877d72c041f2c263b1ec6e34360448decfb323fa8b94e85883043ef988"}, - {file = "pyzmq-26.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:76589f2cd6b77b5bdea4fca5992dc1c23389d68b18ccc26a53680ba2dc80ff2f"}, - {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:b1d464cb8d72bfc1a3adc53305a63a8e0cac6bc8c5a07e8ca190ab8d3faa43c2"}, - {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4da04c48873a6abdd71811c5e163bd656ee1b957971db7f35140a2d573f6949c"}, - {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d049df610ac811dcffdc147153b414147428567fbbc8be43bb8885f04db39d98"}, - {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05590cdbc6b902101d0e65d6a4780af14dc22914cc6ab995d99b85af45362cc9"}, - {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c811cfcd6a9bf680236c40c6f617187515269ab2912f3d7e8c0174898e2519db"}, - {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6835dd60355593de10350394242b5757fbbd88b25287314316f266e24c61d073"}, - {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc6bee759a6bddea5db78d7dcd609397449cb2d2d6587f48f3ca613b19410cfc"}, - {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c530e1eecd036ecc83c3407f77bb86feb79916d4a33d11394b8234f3bd35b940"}, - {file = "pyzmq-26.2.0-cp39-cp39-win32.whl", hash = "sha256:367b4f689786fca726ef7a6c5ba606958b145b9340a5e4808132cc65759abd44"}, - {file = "pyzmq-26.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e6fa2e3e683f34aea77de8112f6483803c96a44fd726d7358b9888ae5bb394ec"}, - {file = "pyzmq-26.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:7445be39143a8aa4faec43b076e06944b8f9d0701b669df4af200531b21e40bb"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c"}, - {file = "pyzmq-26.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2ea4ad4e6a12e454de05f2949d4beddb52460f3de7c8b9d5c46fbb7d7222e02c"}, - {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fc4f7a173a5609631bb0c42c23d12c49df3966f89f496a51d3eb0ec81f4519d6"}, - {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:878206a45202247781472a2d99df12a176fef806ca175799e1c6ad263510d57c"}, - {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17c412bad2eb9468e876f556eb4ee910e62d721d2c7a53c7fa31e643d35352e6"}, - {file = "pyzmq-26.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0d987a3ae5a71c6226b203cfd298720e0086c7fe7c74f35fa8edddfbd6597eed"}, - {file = "pyzmq-26.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39887ac397ff35b7b775db7201095fc6310a35fdbae85bac4523f7eb3b840e20"}, - {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fdb5b3e311d4d4b0eb8b3e8b4d1b0a512713ad7e6a68791d0923d1aec433d919"}, - {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:226af7dcb51fdb0109f0016449b357e182ea0ceb6b47dfb5999d569e5db161d5"}, - {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bed0e799e6120b9c32756203fb9dfe8ca2fb8467fed830c34c877e25638c3fc"}, - {file = "pyzmq-26.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:29c7947c594e105cb9e6c466bace8532dc1ca02d498684128b339799f5248277"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cdeabcff45d1c219636ee2e54d852262e5c2e085d6cb476d938aee8d921356b3"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35cffef589bcdc587d06f9149f8d5e9e8859920a071df5a2671de2213bef592a"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18c8dc3b7468d8b4bdf60ce9d7141897da103c7a4690157b32b60acb45e333e6"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7133d0a1677aec369d67dd78520d3fa96dd7f3dcec99d66c1762870e5ea1a50a"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a96179a24b14fa6428cbfc08641c779a53f8fcec43644030328f44034c7f1f4"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4f78c88905461a9203eac9faac157a2a0dbba84a0fd09fd29315db27be40af9f"}, - {file = "pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f"}, -] - -[package.dependencies] -cffi = {version = "*", markers = "implementation_name == \"pypy\""} - -[[package]] -name = "referencing" -version = "0.35.1" -description = "JSON Referencing + Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" - -[[package]] -name = "regex" -version = "2024.9.11" -description = "Alternative regular expression module, to replace re." -optional = false -python-versions = ">=3.8" -files = [ - {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1494fa8725c285a81d01dc8c06b55287a1ee5e0e382d8413adc0a9197aac6408"}, - {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e12c481ad92d129c78f13a2a3662317e46ee7ef96c94fd332e1c29131875b7d"}, - {file = "regex-2024.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16e13a7929791ac1216afde26f712802e3df7bf0360b32e4914dca3ab8baeea5"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46989629904bad940bbec2106528140a218b4a36bb3042d8406980be1941429c"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a906ed5e47a0ce5f04b2c981af1c9acf9e8696066900bf03b9d7879a6f679fc8"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a091b0550b3b0207784a7d6d0f1a00d1d1c8a11699c1a4d93db3fbefc3ad35"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ddcd9a179c0a6fa8add279a4444015acddcd7f232a49071ae57fa6e278f1f71"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b41e1adc61fa347662b09398e31ad446afadff932a24807d3ceb955ed865cc8"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ced479f601cd2f8ca1fd7b23925a7e0ad512a56d6e9476f79b8f381d9d37090a"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:635a1d96665f84b292e401c3d62775851aedc31d4f8784117b3c68c4fcd4118d"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c0256beda696edcf7d97ef16b2a33a8e5a875affd6fa6567b54f7c577b30a137"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ce4f1185db3fbde8ed8aa223fc9620f276c58de8b0d4f8cc86fd1360829edb6"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:09d77559e80dcc9d24570da3745ab859a9cf91953062e4ab126ba9d5993688ca"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a22ccefd4db3f12b526eccb129390942fe874a3a9fdbdd24cf55773a1faab1a"}, - {file = "regex-2024.9.11-cp310-cp310-win32.whl", hash = "sha256:f745ec09bc1b0bd15cfc73df6fa4f726dcc26bb16c23a03f9e3367d357eeedd0"}, - {file = "regex-2024.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:01c2acb51f8a7d6494c8c5eafe3d8e06d76563d8a8a4643b37e9b2dd8a2ff623"}, - {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2cce2449e5927a0bf084d346da6cd5eb016b2beca10d0013ab50e3c226ffc0df"}, - {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b37fa423beefa44919e009745ccbf353d8c981516e807995b2bd11c2c77d268"}, - {file = "regex-2024.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64ce2799bd75039b480cc0360907c4fb2f50022f030bf9e7a8705b636e408fad"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4cc92bb6db56ab0c1cbd17294e14f5e9224f0cc6521167ef388332604e92679"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d05ac6fa06959c4172eccd99a222e1fbf17b5670c4d596cb1e5cde99600674c4"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:040562757795eeea356394a7fb13076ad4f99d3c62ab0f8bdfb21f99a1f85664"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6113c008a7780792efc80f9dfe10ba0cd043cbf8dc9a76ef757850f51b4edc50"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e5fb5f77c8745a60105403a774fe2c1759b71d3e7b4ca237a5e67ad066c7199"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:54d9ff35d4515debf14bc27f1e3b38bfc453eff3220f5bce159642fa762fe5d4"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:df5cbb1fbc74a8305b6065d4ade43b993be03dbe0f8b30032cced0d7740994bd"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7fb89ee5d106e4a7a51bce305ac4efb981536301895f7bdcf93ec92ae0d91c7f"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a738b937d512b30bf75995c0159c0ddf9eec0775c9d72ac0202076c72f24aa96"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e28f9faeb14b6f23ac55bfbbfd3643f5c7c18ede093977f1df249f73fd22c7b1"}, - {file = "regex-2024.9.11-cp311-cp311-win32.whl", hash = "sha256:18e707ce6c92d7282dfce370cd205098384b8ee21544e7cb29b8aab955b66fa9"}, - {file = "regex-2024.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:313ea15e5ff2a8cbbad96ccef6be638393041b0a7863183c2d31e0c6116688cf"}, - {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b0d0a6c64fcc4ef9c69bd5b3b3626cc3776520a1637d8abaa62b9edc147a58f7"}, - {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:49b0e06786ea663f933f3710a51e9385ce0cba0ea56b67107fd841a55d56a231"}, - {file = "regex-2024.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5b513b6997a0b2f10e4fd3a1313568e373926e8c252bd76c960f96fd039cd28d"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee439691d8c23e76f9802c42a95cfeebf9d47cf4ffd06f18489122dbb0a7ad64"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8f877c89719d759e52783f7fe6e1c67121076b87b40542966c02de5503ace42"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23b30c62d0f16827f2ae9f2bb87619bc4fba2044911e2e6c2eb1af0161cdb766"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ab7824093d8f10d44330fe1e6493f756f252d145323dd17ab6b48733ff6c0a"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8dee5b4810a89447151999428fe096977346cf2f29f4d5e29609d2e19e0199c9"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98eeee2f2e63edae2181c886d7911ce502e1292794f4c5ee71e60e23e8d26b5d"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:57fdd2e0b2694ce6fc2e5ccf189789c3e2962916fb38779d3e3521ff8fe7a822"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d552c78411f60b1fdaafd117a1fca2f02e562e309223b9d44b7de8be451ec5e0"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a0b2b80321c2ed3fcf0385ec9e51a12253c50f146fddb2abbb10f033fe3d049a"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:18406efb2f5a0e57e3a5881cd9354c1512d3bb4f5c45d96d110a66114d84d23a"}, - {file = "regex-2024.9.11-cp312-cp312-win32.whl", hash = "sha256:e464b467f1588e2c42d26814231edecbcfe77f5ac414d92cbf4e7b55b2c2a776"}, - {file = "regex-2024.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:9e8719792ca63c6b8340380352c24dcb8cd7ec49dae36e963742a275dfae6009"}, - {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c157bb447303070f256e084668b702073db99bbb61d44f85d811025fcf38f784"}, - {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4db21ece84dfeefc5d8a3863f101995de646c6cb0536952c321a2650aa202c36"}, - {file = "regex-2024.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:220e92a30b426daf23bb67a7962900ed4613589bab80382be09b48896d211e92"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1ae19e64c14c7ec1995f40bd932448713d3c73509e82d8cd7744dc00e29e86"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f47cd43a5bfa48f86925fe26fbdd0a488ff15b62468abb5d2a1e092a4fb10e85"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d4a76b96f398697fe01117093613166e6aa8195d63f1b4ec3f21ab637632963"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ea51dcc0835eea2ea31d66456210a4e01a076d820e9039b04ae8d17ac11dee6"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7aaa315101c6567a9a45d2839322c51c8d6e81f67683d529512f5bcfb99c802"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c57d08ad67aba97af57a7263c2d9006d5c404d721c5f7542f077f109ec2a4a29"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8404bf61298bb6f8224bb9176c1424548ee1181130818fcd2cbffddc768bed8"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dd4490a33eb909ef5078ab20f5f000087afa2a4daa27b4c072ccb3cb3050ad84"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:eee9130eaad130649fd73e5cd92f60e55708952260ede70da64de420cdcad554"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a2644a93da36c784e546de579ec1806bfd2763ef47babc1b03d765fe560c9f8"}, - {file = "regex-2024.9.11-cp313-cp313-win32.whl", hash = "sha256:e997fd30430c57138adc06bba4c7c2968fb13d101e57dd5bb9355bf8ce3fa7e8"}, - {file = "regex-2024.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:042c55879cfeb21a8adacc84ea347721d3d83a159da6acdf1116859e2427c43f"}, - {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:35f4a6f96aa6cb3f2f7247027b07b15a374f0d5b912c0001418d1d55024d5cb4"}, - {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:55b96e7ce3a69a8449a66984c268062fbaa0d8ae437b285428e12797baefce7e"}, - {file = "regex-2024.9.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb130fccd1a37ed894824b8c046321540263013da72745d755f2d35114b81a60"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:323c1f04be6b2968944d730e5c2091c8c89767903ecaa135203eec4565ed2b2b"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be1c8ed48c4c4065ecb19d882a0ce1afe0745dfad8ce48c49586b90a55f02366"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5b029322e6e7b94fff16cd120ab35a253236a5f99a79fb04fda7ae71ca20ae8"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6fff13ef6b5f29221d6904aa816c34701462956aa72a77f1f151a8ec4f56aeb"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:587d4af3979376652010e400accc30404e6c16b7df574048ab1f581af82065e4"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:079400a8269544b955ffa9e31f186f01d96829110a3bf79dc338e9910f794fca"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f9268774428ec173654985ce55fc6caf4c6d11ade0f6f914d48ef4719eb05ebb"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:23f9985c8784e544d53fc2930fc1ac1a7319f5d5332d228437acc9f418f2f168"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2941333154baff9838e88aa71c1d84f4438189ecc6021a12c7573728b5838e"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e93f1c331ca8e86fe877a48ad64e77882c0c4da0097f2212873a69bbfea95d0c"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:846bc79ee753acf93aef4184c040d709940c9d001029ceb7b7a52747b80ed2dd"}, - {file = "regex-2024.9.11-cp38-cp38-win32.whl", hash = "sha256:c94bb0a9f1db10a1d16c00880bdebd5f9faf267273b8f5bd1878126e0fbde771"}, - {file = "regex-2024.9.11-cp38-cp38-win_amd64.whl", hash = "sha256:2b08fce89fbd45664d3df6ad93e554b6c16933ffa9d55cb7e01182baaf971508"}, - {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:07f45f287469039ffc2c53caf6803cd506eb5f5f637f1d4acb37a738f71dd066"}, - {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4838e24ee015101d9f901988001038f7f0d90dc0c3b115541a1365fb439add62"}, - {file = "regex-2024.9.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6edd623bae6a737f10ce853ea076f56f507fd7726bee96a41ee3d68d347e4d16"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c69ada171c2d0e97a4b5aa78fbb835e0ffbb6b13fc5da968c09811346564f0d3"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02087ea0a03b4af1ed6ebab2c54d7118127fee8d71b26398e8e4b05b78963199"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69dee6a020693d12a3cf892aba4808fe168d2a4cef368eb9bf74f5398bfd4ee8"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297f54910247508e6e5cae669f2bc308985c60540a4edd1c77203ef19bfa63ca"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ecea58b43a67b1b79805f1a0255730edaf5191ecef84dbc4cc85eb30bc8b63b9"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eab4bb380f15e189d1313195b062a6aa908f5bd687a0ceccd47c8211e9cf0d4a"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0cbff728659ce4bbf4c30b2a1be040faafaa9eca6ecde40aaff86f7889f4ab39"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:54c4a097b8bc5bb0dfc83ae498061d53ad7b5762e00f4adaa23bee22b012e6ba"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:73d6d2f64f4d894c96626a75578b0bf7d9e56dcda8c3d037a2118fdfe9b1c664"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:e53b5fbab5d675aec9f0c501274c467c0f9a5d23696cfc94247e1fb56501ed89"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ffbcf9221e04502fc35e54d1ce9567541979c3fdfb93d2c554f0ca583a19b35"}, - {file = "regex-2024.9.11-cp39-cp39-win32.whl", hash = "sha256:e4c22e1ac1f1ec1e09f72e6c44d8f2244173db7eb9629cc3a346a8d7ccc31142"}, - {file = "regex-2024.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:faa3c142464efec496967359ca99696c896c591c56c53506bac1ad465f66e919"}, - {file = "regex-2024.9.11.tar.gz", hash = "sha256:6c188c307e8433bcb63dc1915022deb553b4203a70722fc542c363bf120a01fd"}, -] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "responses" -version = "0.25.3" -description = "A utility library for mocking out the `requests` Python library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "responses-0.25.3-py3-none-any.whl", hash = "sha256:521efcbc82081ab8daa588e08f7e8a64ce79b91c39f6e62199b19159bea7dbcb"}, - {file = "responses-0.25.3.tar.gz", hash = "sha256:617b9247abd9ae28313d57a75880422d55ec63c29d33d629697590a034358dba"}, -] - -[package.dependencies] -pyyaml = "*" -requests = ">=2.30.0,<3.0" -urllib3 = ">=1.25.10,<3.0" - -[package.extras] -tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] - -[[package]] -name = "rfc3339-validator" -version = "0.1.4" -description = "A pure python RFC3339 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, - {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "rfc3986-validator" -version = "0.1.1" -description = "Pure python rfc3986 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, - {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, -] - -[[package]] -name = "rich" -version = "13.8.1" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06"}, - {file = "rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "rpds-py" -version = "0.20.0" -description = "Python bindings to Rust's persistent data structures (rpds)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, - {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, - {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, - {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, - {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, - {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, - {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, - {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, - {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, - {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, - {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, - {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, - {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, - {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, - {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, -] - -[[package]] -name = "s3transfer" -version = "0.10.2" -description = "An Amazon S3 Transfer Manager" -optional = false -python-versions = ">=3.8" -files = [ - {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, - {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, -] - -[package.dependencies] -botocore = ">=1.33.2,<2.0a.0" - -[package.extras] -crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] - -[[package]] -name = "safetensors" -version = "0.4.5" -description = "" -optional = false -python-versions = ">=3.7" -files = [ - {file = "safetensors-0.4.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a63eaccd22243c67e4f2b1c3e258b257effc4acd78f3b9d397edc8cf8f1298a7"}, - {file = "safetensors-0.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:23fc9b4ec7b602915cbb4ec1a7c1ad96d2743c322f20ab709e2c35d1b66dad27"}, - {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6885016f34bef80ea1085b7e99b3c1f92cb1be78a49839203060f67b40aee761"}, - {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:133620f443450429322f238fda74d512c4008621227fccf2f8cf4a76206fea7c"}, - {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4fb3e0609ec12d2a77e882f07cced530b8262027f64b75d399f1504ffec0ba56"}, - {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0f1dd769f064adc33831f5e97ad07babbd728427f98e3e1db6902e369122737"}, - {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6d156bdb26732feada84f9388a9f135528c1ef5b05fae153da365ad4319c4c5"}, - {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e347d77e2c77eb7624400ccd09bed69d35c0332f417ce8c048d404a096c593b"}, - {file = "safetensors-0.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9f556eea3aec1d3d955403159fe2123ddd68e880f83954ee9b4a3f2e15e716b6"}, - {file = "safetensors-0.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9483f42be3b6bc8ff77dd67302de8ae411c4db39f7224dec66b0eb95822e4163"}, - {file = "safetensors-0.4.5-cp310-none-win32.whl", hash = "sha256:7389129c03fadd1ccc37fd1ebbc773f2b031483b04700923c3511d2a939252cc"}, - {file = "safetensors-0.4.5-cp310-none-win_amd64.whl", hash = "sha256:e98ef5524f8b6620c8cdef97220c0b6a5c1cef69852fcd2f174bb96c2bb316b1"}, - {file = "safetensors-0.4.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:21f848d7aebd5954f92538552d6d75f7c1b4500f51664078b5b49720d180e47c"}, - {file = "safetensors-0.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bb07000b19d41e35eecef9a454f31a8b4718a185293f0d0b1c4b61d6e4487971"}, - {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09dedf7c2fda934ee68143202acff6e9e8eb0ddeeb4cfc24182bef999efa9f42"}, - {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:59b77e4b7a708988d84f26de3ebead61ef1659c73dcbc9946c18f3b1786d2688"}, - {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d3bc83e14d67adc2e9387e511097f254bd1b43c3020440e708858c684cbac68"}, - {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39371fc551c1072976073ab258c3119395294cf49cdc1f8476794627de3130df"}, - {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6c19feda32b931cae0acd42748a670bdf56bee6476a046af20181ad3fee4090"}, - {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a659467495de201e2f282063808a41170448c78bada1e62707b07a27b05e6943"}, - {file = "safetensors-0.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bad5e4b2476949bcd638a89f71b6916fa9a5cae5c1ae7eede337aca2100435c0"}, - {file = "safetensors-0.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a3a315a6d0054bc6889a17f5668a73f94f7fe55121ff59e0a199e3519c08565f"}, - {file = "safetensors-0.4.5-cp311-none-win32.whl", hash = "sha256:a01e232e6d3d5cf8b1667bc3b657a77bdab73f0743c26c1d3c5dd7ce86bd3a92"}, - {file = "safetensors-0.4.5-cp311-none-win_amd64.whl", hash = "sha256:cbd39cae1ad3e3ef6f63a6f07296b080c951f24cec60188378e43d3713000c04"}, - {file = "safetensors-0.4.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:473300314e026bd1043cef391bb16a8689453363381561b8a3e443870937cc1e"}, - {file = "safetensors-0.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:801183a0f76dc647f51a2d9141ad341f9665602a7899a693207a82fb102cc53e"}, - {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1524b54246e422ad6fb6aea1ac71edeeb77666efa67230e1faf6999df9b2e27f"}, - {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b3139098e3e8b2ad7afbca96d30ad29157b50c90861084e69fcb80dec7430461"}, - {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65573dc35be9059770808e276b017256fa30058802c29e1038eb1c00028502ea"}, - {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd33da8e9407559f8779c82a0448e2133737f922d71f884da27184549416bfed"}, - {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3685ce7ed036f916316b567152482b7e959dc754fcc4a8342333d222e05f407c"}, - {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dde2bf390d25f67908278d6f5d59e46211ef98e44108727084d4637ee70ab4f1"}, - {file = "safetensors-0.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7469d70d3de970b1698d47c11ebbf296a308702cbaae7fcb993944751cf985f4"}, - {file = "safetensors-0.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a6ba28118636a130ccbb968bc33d4684c48678695dba2590169d5ab03a45646"}, - {file = "safetensors-0.4.5-cp312-none-win32.whl", hash = "sha256:c859c7ed90b0047f58ee27751c8e56951452ed36a67afee1b0a87847d065eec6"}, - {file = "safetensors-0.4.5-cp312-none-win_amd64.whl", hash = "sha256:b5a8810ad6a6f933fff6c276eae92c1da217b39b4d8b1bc1c0b8af2d270dc532"}, - {file = "safetensors-0.4.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:25e5f8e2e92a74f05b4ca55686234c32aac19927903792b30ee6d7bd5653d54e"}, - {file = "safetensors-0.4.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:81efb124b58af39fcd684254c645e35692fea81c51627259cdf6d67ff4458916"}, - {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:585f1703a518b437f5103aa9cf70e9bd437cb78eea9c51024329e4fb8a3e3679"}, - {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b99fbf72e3faf0b2f5f16e5e3458b93b7d0a83984fe8d5364c60aa169f2da89"}, - {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b17b299ca9966ca983ecda1c0791a3f07f9ca6ab5ded8ef3d283fff45f6bcd5f"}, - {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76ded72f69209c9780fdb23ea89e56d35c54ae6abcdec67ccb22af8e696e449a"}, - {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2783956926303dcfeb1de91a4d1204cd4089ab441e622e7caee0642281109db3"}, - {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d94581aab8c6b204def4d7320f07534d6ee34cd4855688004a4354e63b639a35"}, - {file = "safetensors-0.4.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:67e1e7cb8678bb1b37ac48ec0df04faf689e2f4e9e81e566b5c63d9f23748523"}, - {file = "safetensors-0.4.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:dbd280b07e6054ea68b0cb4b16ad9703e7d63cd6890f577cb98acc5354780142"}, - {file = "safetensors-0.4.5-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:77d9b228da8374c7262046a36c1f656ba32a93df6cc51cd4453af932011e77f1"}, - {file = "safetensors-0.4.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:500cac01d50b301ab7bb192353317035011c5ceeef0fca652f9f43c000bb7f8d"}, - {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75331c0c746f03158ded32465b7d0b0e24c5a22121743662a2393439c43a45cf"}, - {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:670e95fe34e0d591d0529e5e59fd9d3d72bc77b1444fcaa14dccda4f36b5a38b"}, - {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:098923e2574ff237c517d6e840acada8e5b311cb1fa226019105ed82e9c3b62f"}, - {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13ca0902d2648775089fa6a0c8fc9e6390c5f8ee576517d33f9261656f851e3f"}, - {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f0032bedc869c56f8d26259fe39cd21c5199cd57f2228d817a0e23e8370af25"}, - {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f4b15f51b4f8f2a512341d9ce3475cacc19c5fdfc5db1f0e19449e75f95c7dc8"}, - {file = "safetensors-0.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f6594d130d0ad933d885c6a7b75c5183cb0e8450f799b80a39eae2b8508955eb"}, - {file = "safetensors-0.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:60c828a27e852ded2c85fc0f87bf1ec20e464c5cd4d56ff0e0711855cc2e17f8"}, - {file = "safetensors-0.4.5-cp37-none-win32.whl", hash = "sha256:6d3de65718b86c3eeaa8b73a9c3d123f9307a96bbd7be9698e21e76a56443af5"}, - {file = "safetensors-0.4.5-cp37-none-win_amd64.whl", hash = "sha256:5a2d68a523a4cefd791156a4174189a4114cf0bf9c50ceb89f261600f3b2b81a"}, - {file = "safetensors-0.4.5-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:e7a97058f96340850da0601a3309f3d29d6191b0702b2da201e54c6e3e44ccf0"}, - {file = "safetensors-0.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:63bfd425e25f5c733f572e2246e08a1c38bd6f2e027d3f7c87e2e43f228d1345"}, - {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3664ac565d0e809b0b929dae7ccd74e4d3273cd0c6d1220c6430035befb678e"}, - {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:313514b0b9b73ff4ddfb4edd71860696dbe3c1c9dc4d5cc13dbd74da283d2cbf"}, - {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31fa33ee326f750a2f2134a6174773c281d9a266ccd000bd4686d8021f1f3dac"}, - {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09566792588d77b68abe53754c9f1308fadd35c9f87be939e22c623eaacbed6b"}, - {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309aaec9b66cbf07ad3a2e5cb8a03205663324fea024ba391594423d0f00d9fe"}, - {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:53946c5813b8f9e26103c5efff4a931cc45d874f45229edd68557ffb35ffb9f8"}, - {file = "safetensors-0.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:868f9df9e99ad1e7f38c52194063a982bc88fedc7d05096f4f8160403aaf4bd6"}, - {file = "safetensors-0.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9cc9449bd0b0bc538bd5e268221f0c5590bc5c14c1934a6ae359d44410dc68c4"}, - {file = "safetensors-0.4.5-cp38-none-win32.whl", hash = "sha256:83c4f13a9e687335c3928f615cd63a37e3f8ef072a3f2a0599fa09f863fb06a2"}, - {file = "safetensors-0.4.5-cp38-none-win_amd64.whl", hash = "sha256:b98d40a2ffa560653f6274e15b27b3544e8e3713a44627ce268f419f35c49478"}, - {file = "safetensors-0.4.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:cf727bb1281d66699bef5683b04d98c894a2803442c490a8d45cd365abfbdeb2"}, - {file = "safetensors-0.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96f1d038c827cdc552d97e71f522e1049fef0542be575421f7684756a748e457"}, - {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:139fbee92570ecea774e6344fee908907db79646d00b12c535f66bc78bd5ea2c"}, - {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c36302c1c69eebb383775a89645a32b9d266878fab619819ce660309d6176c9b"}, - {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d641f5b8149ea98deb5ffcf604d764aad1de38a8285f86771ce1abf8e74c4891"}, - {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b4db6a61d968de73722b858038c616a1bebd4a86abe2688e46ca0cc2d17558f2"}, - {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b75a616e02f21b6f1d5785b20cecbab5e2bd3f6358a90e8925b813d557666ec1"}, - {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:788ee7d04cc0e0e7f944c52ff05f52a4415b312f5efd2ee66389fb7685ee030c"}, - {file = "safetensors-0.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:87bc42bd04fd9ca31396d3ca0433db0be1411b6b53ac5a32b7845a85d01ffc2e"}, - {file = "safetensors-0.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4037676c86365a721a8c9510323a51861d703b399b78a6b4486a54a65a975fca"}, - {file = "safetensors-0.4.5-cp39-none-win32.whl", hash = "sha256:1500418454529d0ed5c1564bda376c4ddff43f30fce9517d9bee7bcce5a8ef50"}, - {file = "safetensors-0.4.5-cp39-none-win_amd64.whl", hash = "sha256:9d1a94b9d793ed8fe35ab6d5cea28d540a46559bafc6aae98f30ee0867000cab"}, - {file = "safetensors-0.4.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fdadf66b5a22ceb645d5435a0be7a0292ce59648ca1d46b352f13cff3ea80410"}, - {file = "safetensors-0.4.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d42ffd4c2259f31832cb17ff866c111684c87bd930892a1ba53fed28370c918c"}, - {file = "safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd8a1f6d2063a92cd04145c7fd9e31a1c7d85fbec20113a14b487563fdbc0597"}, - {file = "safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:951d2fcf1817f4fb0ef0b48f6696688a4e852a95922a042b3f96aaa67eedc920"}, - {file = "safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ac85d9a8c1af0e3132371d9f2d134695a06a96993c2e2f0bbe25debb9e3f67a"}, - {file = "safetensors-0.4.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e3cec4a29eb7fe8da0b1c7988bc3828183080439dd559f720414450de076fcab"}, - {file = "safetensors-0.4.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:21742b391b859e67b26c0b2ac37f52c9c0944a879a25ad2f9f9f3cd61e7fda8f"}, - {file = "safetensors-0.4.5-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c7db3006a4915151ce1913652e907cdede299b974641a83fbc092102ac41b644"}, - {file = "safetensors-0.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f68bf99ea970960a237f416ea394e266e0361895753df06e3e06e6ea7907d98b"}, - {file = "safetensors-0.4.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8158938cf3324172df024da511839d373c40fbfaa83e9abf467174b2910d7b4c"}, - {file = "safetensors-0.4.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:540ce6c4bf6b58cb0fd93fa5f143bc0ee341c93bb4f9287ccd92cf898cc1b0dd"}, - {file = "safetensors-0.4.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bfeaa1a699c6b9ed514bd15e6a91e74738b71125a9292159e3d6b7f0a53d2cde"}, - {file = "safetensors-0.4.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:01c8f00da537af711979e1b42a69a8ec9e1d7112f208e0e9b8a35d2c381085ef"}, - {file = "safetensors-0.4.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a0dd565f83b30f2ca79b5d35748d0d99dd4b3454f80e03dfb41f0038e3bdf180"}, - {file = "safetensors-0.4.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:023b6e5facda76989f4cba95a861b7e656b87e225f61811065d5c501f78cdb3f"}, - {file = "safetensors-0.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9633b663393d5796f0b60249549371e392b75a0b955c07e9c6f8708a87fc841f"}, - {file = "safetensors-0.4.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78dd8adfb48716233c45f676d6e48534d34b4bceb50162c13d1f0bdf6f78590a"}, - {file = "safetensors-0.4.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e8deb16c4321d61ae72533b8451ec4a9af8656d1c61ff81aa49f966406e4b68"}, - {file = "safetensors-0.4.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:52452fa5999dc50c4decaf0c53aa28371f7f1e0fe5c2dd9129059fbe1e1599c7"}, - {file = "safetensors-0.4.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d5f23198821e227cfc52d50fa989813513db381255c6d100927b012f0cfec63d"}, - {file = "safetensors-0.4.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f4beb84b6073b1247a773141a6331117e35d07134b3bb0383003f39971d414bb"}, - {file = "safetensors-0.4.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:68814d599d25ed2fdd045ed54d370d1d03cf35e02dce56de44c651f828fb9b7b"}, - {file = "safetensors-0.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b6453c54c57c1781292c46593f8a37254b8b99004c68d6c3ce229688931a22"}, - {file = "safetensors-0.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adaa9c6dead67e2dd90d634f89131e43162012479d86e25618e821a03d1eb1dc"}, - {file = "safetensors-0.4.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:73e7d408e9012cd17511b382b43547850969c7979efc2bc353f317abaf23c84c"}, - {file = "safetensors-0.4.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:775409ce0fcc58b10773fdb4221ed1eb007de10fe7adbdf8f5e8a56096b6f0bc"}, - {file = "safetensors-0.4.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:834001bed193e4440c4a3950a31059523ee5090605c907c66808664c932b549c"}, - {file = "safetensors-0.4.5.tar.gz", hash = "sha256:d73de19682deabb02524b3d5d1f8b3aaba94c72f1bbfc7911b9b9d5d391c0310"}, -] - -[package.extras] -all = ["safetensors[jax]", "safetensors[numpy]", "safetensors[paddlepaddle]", "safetensors[pinned-tf]", "safetensors[quality]", "safetensors[testing]", "safetensors[torch]"] -dev = ["safetensors[all]"] -jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "safetensors[numpy]"] -mlx = ["mlx (>=0.0.9)"] -numpy = ["numpy (>=1.21.6)"] -paddlepaddle = ["paddlepaddle (>=2.4.1)", "safetensors[numpy]"] -pinned-tf = ["safetensors[numpy]", "tensorflow (==2.11.0)"] -quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] -tensorflow = ["safetensors[numpy]", "tensorflow (>=2.11.0)"] -testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools-rust (>=1.5.2)"] -torch = ["safetensors[numpy]", "torch (>=1.10)"] - -[[package]] -name = "scikit-learn" -version = "1.5.2" -description = "A set of python modules for machine learning and data mining" -optional = false -python-versions = ">=3.9" -files = [ - {file = "scikit_learn-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:299406827fb9a4f862626d0fe6c122f5f87f8910b86fe5daa4c32dcd742139b6"}, - {file = "scikit_learn-1.5.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:2d4cad1119c77930b235579ad0dc25e65c917e756fe80cab96aa3b9428bd3fb0"}, - {file = "scikit_learn-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c412ccc2ad9bf3755915e3908e677b367ebc8d010acbb3f182814524f2e5540"}, - {file = "scikit_learn-1.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a686885a4b3818d9e62904d91b57fa757fc2bed3e465c8b177be652f4dd37c8"}, - {file = "scikit_learn-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:c15b1ca23d7c5f33cc2cb0a0d6aaacf893792271cddff0edbd6a40e8319bc113"}, - {file = "scikit_learn-1.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03b6158efa3faaf1feea3faa884c840ebd61b6484167c711548fce208ea09445"}, - {file = "scikit_learn-1.5.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1ff45e26928d3b4eb767a8f14a9a6efbf1cbff7c05d1fb0f95f211a89fd4f5de"}, - {file = "scikit_learn-1.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f763897fe92d0e903aa4847b0aec0e68cadfff77e8a0687cabd946c89d17e675"}, - {file = "scikit_learn-1.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8b0ccd4a902836493e026c03256e8b206656f91fbcc4fde28c57a5b752561f1"}, - {file = "scikit_learn-1.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:6c16d84a0d45e4894832b3c4d0bf73050939e21b99b01b6fd59cbb0cf39163b6"}, - {file = "scikit_learn-1.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f932a02c3f4956dfb981391ab24bda1dbd90fe3d628e4b42caef3e041c67707a"}, - {file = "scikit_learn-1.5.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:3b923d119d65b7bd555c73be5423bf06c0105678ce7e1f558cb4b40b0a5502b1"}, - {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd"}, - {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6"}, - {file = "scikit_learn-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1"}, - {file = "scikit_learn-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9"}, - {file = "scikit_learn-1.5.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1"}, - {file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8"}, - {file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca64b3089a6d9b9363cd3546f8978229dcbb737aceb2c12144ee3f70f95684b7"}, - {file = "scikit_learn-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:3bed4909ba187aca80580fe2ef370d9180dcf18e621a27c4cf2ef10d279a7efe"}, - {file = "scikit_learn-1.5.2.tar.gz", hash = "sha256:b4237ed7b3fdd0a4882792e68ef2545d5baa50aca3bb45aa7df468138ad8f94d"}, -] - -[package.dependencies] -joblib = ">=1.2.0" -numpy = ">=1.19.5" -scipy = ">=1.6.0" -threadpoolctl = ">=3.1.0" - -[package.extras] -benchmark = ["matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "pandas (>=1.1.5)"] -build = ["cython (>=3.0.10)", "meson-python (>=0.16.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)"] -docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pydata-sphinx-theme (>=0.15.3)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=7.3.7)", "sphinx-copybutton (>=0.5.2)", "sphinx-design (>=0.5.0)", "sphinx-design (>=0.6.0)", "sphinx-gallery (>=0.16.0)", "sphinx-prompt (>=1.4.0)", "sphinx-remove-toctrees (>=1.0.0.post1)", "sphinxcontrib-sass (>=0.3.4)", "sphinxext-opengraph (>=0.9.1)"] -examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] -install = ["joblib (>=1.2.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)", "threadpoolctl (>=3.1.0)"] -maintenance = ["conda-lock (==2.5.6)"] -tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.2.1)", "scikit-image (>=0.17.2)"] - -[[package]] -name = "scipy" -version = "1.14.1" -description = "Fundamental algorithms for scientific computing in Python" -optional = false -python-versions = ">=3.10" -files = [ - {file = "scipy-1.14.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389"}, - {file = "scipy-1.14.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3"}, - {file = "scipy-1.14.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0"}, - {file = "scipy-1.14.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3"}, - {file = "scipy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d"}, - {file = "scipy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69"}, - {file = "scipy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad"}, - {file = "scipy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5"}, - {file = "scipy-1.14.1-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675"}, - {file = "scipy-1.14.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2"}, - {file = "scipy-1.14.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617"}, - {file = "scipy-1.14.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8"}, - {file = "scipy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37"}, - {file = "scipy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2"}, - {file = "scipy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2"}, - {file = "scipy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94"}, - {file = "scipy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d"}, - {file = "scipy-1.14.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07"}, - {file = "scipy-1.14.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5"}, - {file = "scipy-1.14.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc"}, - {file = "scipy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310"}, - {file = "scipy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066"}, - {file = "scipy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1"}, - {file = "scipy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f"}, - {file = "scipy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79"}, - {file = "scipy-1.14.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e"}, - {file = "scipy-1.14.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73"}, - {file = "scipy-1.14.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e"}, - {file = "scipy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d"}, - {file = "scipy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e"}, - {file = "scipy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06"}, - {file = "scipy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84"}, - {file = "scipy-1.14.1.tar.gz", hash = "sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417"}, -] - -[package.dependencies] -numpy = ">=1.23.5,<2.3" - -[package.extras] -dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] -doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.13.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<=7.3.7)", "sphinx-design (>=0.4.0)"] -test = ["Cython", "array-api-strict (>=2.0)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] - -[[package]] -name = "seaborn" -version = "0.13.2" -description = "Statistical data visualization" -optional = false -python-versions = ">=3.8" -files = [ - {file = "seaborn-0.13.2-py3-none-any.whl", hash = "sha256:636f8336facf092165e27924f223d3c62ca560b1f2bb5dff7ab7fad265361987"}, - {file = "seaborn-0.13.2.tar.gz", hash = "sha256:93e60a40988f4d65e9f4885df477e2fdaff6b73a9ded434c1ab356dd57eefff7"}, -] - -[package.dependencies] -matplotlib = ">=3.4,<3.6.1 || >3.6.1" -numpy = ">=1.20,<1.24.0 || >1.24.0" -pandas = ">=1.2" - -[package.extras] -dev = ["flake8", "flit", "mypy", "pandas-stubs", "pre-commit", "pytest", "pytest-cov", "pytest-xdist"] -docs = ["ipykernel", "nbconvert", "numpydoc", "pydata_sphinx_theme (==0.10.0rc2)", "pyyaml", "sphinx (<6.0.0)", "sphinx-copybutton", "sphinx-design", "sphinx-issues"] -stats = ["scipy (>=1.7)", "statsmodels (>=0.12)"] - -[[package]] -name = "send2trash" -version = "1.8.3" -description = "Send file to trash natively under Mac OS X, Windows and Linux" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9"}, - {file = "Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf"}, -] - -[package.extras] -nativelib = ["pyobjc-framework-Cocoa", "pywin32"] -objc = ["pyobjc-framework-Cocoa"] -win32 = ["pywin32"] - -[[package]] -name = "sentence-transformers" -version = "3.1.1" -description = "State-of-the-Art Text Embeddings" -optional = false -python-versions = ">=3.8" -files = [ - {file = "sentence_transformers-3.1.1-py3-none-any.whl", hash = "sha256:c73bf6f17e3676bb9372a6133a254ebfb5907586b470f2bac5a840c64c3cf97e"}, - {file = "sentence_transformers-3.1.1.tar.gz", hash = "sha256:8f00020ef4ad6b918475c38af545c22f61403b67eb22d994860bab06902db160"}, -] - -[package.dependencies] -huggingface-hub = ">=0.19.3" -Pillow = "*" -scikit-learn = "*" -scipy = "*" -torch = ">=1.11.0" -tqdm = "*" -transformers = ">=4.38.0,<5.0.0" - -[package.extras] -dev = ["accelerate (>=0.20.3)", "datasets", "pre-commit", "pytest", "pytest-cov"] -train = ["accelerate (>=0.20.3)", "datasets"] - -[[package]] -name = "setuptools" -version = "75.1.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, - {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "smmap" -version = "5.0.1" -description = "A pure Python implementation of a sliding window memory map manager" -optional = false -python-versions = ">=3.7" -files = [ - {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, - {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -optional = false -python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - -[[package]] -name = "soupsieve" -version = "2.6" -description = "A modern CSS selector implementation for Beautiful Soup." -optional = false -python-versions = ">=3.8" -files = [ - {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, - {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, -] - -[[package]] -name = "stack-data" -version = "0.6.3" -description = "Extract data from python stack frames and tracebacks for informative displays" -optional = false -python-versions = "*" -files = [ - {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, - {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, -] - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - -[[package]] -name = "stevedore" -version = "5.3.0" -description = "Manage dynamic plugins for Python applications" -optional = false -python-versions = ">=3.8" -files = [ - {file = "stevedore-5.3.0-py3-none-any.whl", hash = "sha256:1efd34ca08f474dad08d9b19e934a22c68bb6fe416926479ba29e5013bcc8f78"}, - {file = "stevedore-5.3.0.tar.gz", hash = "sha256:9a64265f4060312828151c204efbe9b7a9852a0d9228756344dbc7e4023e375a"}, -] - -[package.dependencies] -pbr = ">=2.0.0" - -[[package]] -name = "streamlit" -version = "1.39.0" -description = "A faster way to build and share data apps" -optional = false -python-versions = "!=3.9.7,>=3.8" -files = [ - {file = "streamlit-1.39.0-py2.py3-none-any.whl", hash = "sha256:a359fc54ed568b35b055ff1d453c320735539ad12e264365a36458aef55a5fba"}, - {file = "streamlit-1.39.0.tar.gz", hash = "sha256:fef9de7983c4ee65c08e85607d7ffccb56b00482b1041fa62f90e4815d39df3a"}, -] - -[package.dependencies] -altair = ">=4.0,<6" -blinker = ">=1.0.0,<2" -cachetools = ">=4.0,<6" -click = ">=7.0,<9" -gitpython = ">=3.0.7,<3.1.19 || >3.1.19,<4" -numpy = ">=1.20,<3" -packaging = ">=20,<25" -pandas = ">=1.4.0,<3" -pillow = ">=7.1.0,<11" -protobuf = ">=3.20,<6" -pyarrow = ">=7.0" -pydeck = ">=0.8.0b4,<1" -requests = ">=2.27,<3" -rich = ">=10.14.0,<14" -tenacity = ">=8.1.0,<10" -toml = ">=0.10.1,<2" -tornado = ">=6.0.3,<7" -typing-extensions = ">=4.3.0,<5" -watchdog = {version = ">=2.1.5,<6", markers = "platform_system != \"Darwin\""} - -[package.extras] -snowflake = ["snowflake-connector-python (>=2.8.0)", "snowflake-snowpark-python[modin] (>=1.17.0)"] - -[[package]] -name = "sympy" -version = "1.13.3" -description = "Computer algebra system (CAS) in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "sympy-1.13.3-py3-none-any.whl", hash = "sha256:54612cf55a62755ee71824ce692986f23c88ffa77207b30c1368eda4a7060f73"}, - {file = "sympy-1.13.3.tar.gz", hash = "sha256:b27fd2c6530e0ab39e275fc9b683895367e51d5da91baa8d3d64db2565fec4d9"}, -] - -[package.dependencies] -mpmath = ">=1.1.0,<1.4" - -[package.extras] -dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] - -[[package]] -name = "tenacity" -version = "9.0.0" -description = "Retry code until it succeeds" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, - {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, -] - -[package.extras] -doc = ["reno", "sphinx"] -test = ["pytest", "tornado (>=4.5)", "typeguard"] - -[[package]] -name = "terminado" -version = "0.18.1" -description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, - {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, -] - -[package.dependencies] -ptyprocess = {version = "*", markers = "os_name != \"nt\""} -pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} -tornado = ">=6.1.0" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] -typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] - -[[package]] -name = "threadpoolctl" -version = "3.5.0" -description = "threadpoolctl" -optional = false -python-versions = ">=3.8" -files = [ - {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, - {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, -] - -[[package]] -name = "tinycss2" -version = "1.3.0" -description = "A tiny CSS parser" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7"}, - {file = "tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d"}, -] - -[package.dependencies] -webencodings = ">=0.4" - -[package.extras] -doc = ["sphinx", "sphinx_rtd_theme"] -test = ["pytest", "ruff"] - -[[package]] -name = "tokenizers" -version = "0.20.1" -description = "" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tokenizers-0.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:439261da7c0a5c88bda97acb284d49fbdaf67e9d3b623c0bfd107512d22787a9"}, - {file = "tokenizers-0.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:03dae629d99068b1ea5416d50de0fea13008f04129cc79af77a2a6392792d93c"}, - {file = "tokenizers-0.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b61f561f329ffe4b28367798b89d60c4abf3f815d37413b6352bc6412a359867"}, - {file = "tokenizers-0.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec870fce1ee5248a10be69f7a8408a234d6f2109f8ea827b4f7ecdbf08c9fd15"}, - {file = "tokenizers-0.20.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d388d1ea8b7447da784e32e3b86a75cce55887e3b22b31c19d0b186b1c677800"}, - {file = "tokenizers-0.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:299c85c1d21135bc01542237979bf25c32efa0d66595dd0069ae259b97fb2dbe"}, - {file = "tokenizers-0.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e96f6c14c9752bb82145636b614d5a78e9cde95edfbe0a85dad0dd5ddd6ec95c"}, - {file = "tokenizers-0.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc9e95ad49c932b80abfbfeaf63b155761e695ad9f8a58c52a47d962d76e310f"}, - {file = "tokenizers-0.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f22dee205329a636148c325921c73cf3e412e87d31f4d9c3153b302a0200057b"}, - {file = "tokenizers-0.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2ffd9a8895575ac636d44500c66dffaef133823b6b25067604fa73bbc5ec09d"}, - {file = "tokenizers-0.20.1-cp310-none-win32.whl", hash = "sha256:2847843c53f445e0f19ea842a4e48b89dd0db4e62ba6e1e47a2749d6ec11f50d"}, - {file = "tokenizers-0.20.1-cp310-none-win_amd64.whl", hash = "sha256:f9aa93eacd865f2798b9e62f7ce4533cfff4f5fbd50c02926a78e81c74e432cd"}, - {file = "tokenizers-0.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4a717dcb08f2dabbf27ae4b6b20cbbb2ad7ed78ce05a829fae100ff4b3c7ff15"}, - {file = "tokenizers-0.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f84dad1ff1863c648d80628b1b55353d16303431283e4efbb6ab1af56a75832"}, - {file = "tokenizers-0.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:929c8f3afa16a5130a81ab5079c589226273ec618949cce79b46d96e59a84f61"}, - {file = "tokenizers-0.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d10766473954397e2d370f215ebed1cc46dcf6fd3906a2a116aa1d6219bfedc3"}, - {file = "tokenizers-0.20.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9300fac73ddc7e4b0330acbdda4efaabf74929a4a61e119a32a181f534a11b47"}, - {file = "tokenizers-0.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ecaf7b0e39caeb1aa6dd6e0975c405716c82c1312b55ac4f716ef563a906969"}, - {file = "tokenizers-0.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5170be9ec942f3d1d317817ced8d749b3e1202670865e4fd465e35d8c259de83"}, - {file = "tokenizers-0.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f1ae08fa9aea5891cbd69df29913e11d3841798e0bfb1ff78b78e4e7ea0a4"}, - {file = "tokenizers-0.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ee86d4095d3542d73579e953c2e5e07d9321af2ffea6ecc097d16d538a2dea16"}, - {file = "tokenizers-0.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:86dcd08da163912e17b27bbaba5efdc71b4fbffb841530fdb74c5707f3c49216"}, - {file = "tokenizers-0.20.1-cp311-none-win32.whl", hash = "sha256:9af2dc4ee97d037bc6b05fa4429ddc87532c706316c5e11ce2f0596dfcfa77af"}, - {file = "tokenizers-0.20.1-cp311-none-win_amd64.whl", hash = "sha256:899152a78b095559c287b4c6d0099469573bb2055347bb8154db106651296f39"}, - {file = "tokenizers-0.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:407ab666b38e02228fa785e81f7cf79ef929f104bcccf68a64525a54a93ceac9"}, - {file = "tokenizers-0.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f13a2d16032ebc8bd812eb8099b035ac65887d8f0c207261472803b9633cf3e"}, - {file = "tokenizers-0.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e98eee4dca22849fbb56a80acaa899eec5b72055d79637dd6aa15d5e4b8628c9"}, - {file = "tokenizers-0.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47c1bcdd61e61136087459cb9e0b069ff23b5568b008265e5cbc927eae3387ce"}, - {file = "tokenizers-0.20.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:128c1110e950534426e2274837fc06b118ab5f2fa61c3436e60e0aada0ccfd67"}, - {file = "tokenizers-0.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2e2d47a819d2954f2c1cd0ad51bb58ffac6f53a872d5d82d65d79bf76b9896d"}, - {file = "tokenizers-0.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bdd67a0e3503a9a7cf8bc5a4a49cdde5fa5bada09a51e4c7e1c73900297539bd"}, - {file = "tokenizers-0.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689b93d2e26d04da337ac407acec8b5d081d8d135e3e5066a88edd5bdb5aff89"}, - {file = "tokenizers-0.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0c6a796ddcd9a19ad13cf146997cd5895a421fe6aec8fd970d69f9117bddb45c"}, - {file = "tokenizers-0.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3ea919687aa7001a8ff1ba36ac64f165c4e89035f57998fa6cedcfd877be619d"}, - {file = "tokenizers-0.20.1-cp312-none-win32.whl", hash = "sha256:6d3ac5c1f48358ffe20086bf065e843c0d0a9fce0d7f0f45d5f2f9fba3609ca5"}, - {file = "tokenizers-0.20.1-cp312-none-win_amd64.whl", hash = "sha256:b0874481aea54a178f2bccc45aa2d0c99cd3f79143a0948af6a9a21dcc49173b"}, - {file = "tokenizers-0.20.1-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:96af92e833bd44760fb17f23f402e07a66339c1dcbe17d79a9b55bb0cc4f038e"}, - {file = "tokenizers-0.20.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:65f34e5b731a262dfa562820818533c38ce32a45864437f3d9c82f26c139ca7f"}, - {file = "tokenizers-0.20.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17f98fccb5c12ab1ce1f471731a9cd86df5d4bd2cf2880c5a66b229802d96145"}, - {file = "tokenizers-0.20.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b8c0fc3542cf9370bf92c932eb71bdeb33d2d4aeeb4126d9fd567b60bd04cb30"}, - {file = "tokenizers-0.20.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b39356df4575d37f9b187bb623aab5abb7b62c8cb702867a1768002f814800c"}, - {file = "tokenizers-0.20.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfdad27b0e50544f6b838895a373db6114b85112ba5c0cefadffa78d6daae563"}, - {file = "tokenizers-0.20.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:094663dd0e85ee2e573126918747bdb40044a848fde388efb5b09d57bc74c680"}, - {file = "tokenizers-0.20.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14e4cf033a2aa207d7ac790e91adca598b679999710a632c4a494aab0fc3a1b2"}, - {file = "tokenizers-0.20.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9310951c92c9fb91660de0c19a923c432f110dbfad1a2d429fbc44fa956bf64f"}, - {file = "tokenizers-0.20.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:05e41e302c315bd2ed86c02e917bf03a6cf7d2f652c9cee1a0eb0d0f1ca0d32c"}, - {file = "tokenizers-0.20.1-cp37-none-win32.whl", hash = "sha256:212231ab7dfcdc879baf4892ca87c726259fa7c887e1688e3f3cead384d8c305"}, - {file = "tokenizers-0.20.1-cp37-none-win_amd64.whl", hash = "sha256:896195eb9dfdc85c8c052e29947169c1fcbe75a254c4b5792cdbd451587bce85"}, - {file = "tokenizers-0.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:741fb22788482d09d68e73ece1495cfc6d9b29a06c37b3df90564a9cfa688e6d"}, - {file = "tokenizers-0.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10be14ebd8082086a342d969e17fc2d6edc856c59dbdbddd25f158fa40eaf043"}, - {file = "tokenizers-0.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:514cf279b22fa1ae0bc08e143458c74ad3b56cd078b319464959685a35c53d5e"}, - {file = "tokenizers-0.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a647c5b7cb896d6430cf3e01b4e9a2d77f719c84cefcef825d404830c2071da2"}, - {file = "tokenizers-0.20.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cdf379219e1e1dd432091058dab325a2e6235ebb23e0aec8d0508567c90cd01"}, - {file = "tokenizers-0.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ba72260449e16c4c2f6f3252823b059fbf2d31b32617e582003f2b18b415c39"}, - {file = "tokenizers-0.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:910b96ed87316e4277b23c7bcaf667ce849c7cc379a453fa179e7e09290eeb25"}, - {file = "tokenizers-0.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e53975a6694428a0586534cc1354b2408d4e010a3103117f617cbb550299797c"}, - {file = "tokenizers-0.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:07c4b7be58da142b0730cc4e5fd66bb7bf6f57f4986ddda73833cd39efef8a01"}, - {file = "tokenizers-0.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b605c540753e62199bf15cf69c333e934077ef2350262af2ccada46026f83d1c"}, - {file = "tokenizers-0.20.1-cp38-none-win32.whl", hash = "sha256:88b3bc76ab4db1ab95ead623d49c95205411e26302cf9f74203e762ac7e85685"}, - {file = "tokenizers-0.20.1-cp38-none-win_amd64.whl", hash = "sha256:d412a74cf5b3f68a90c615611a5aa4478bb303d1c65961d22db45001df68afcb"}, - {file = "tokenizers-0.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a25dcb2f41a0a6aac31999e6c96a75e9152fa0127af8ece46c2f784f23b8197a"}, - {file = "tokenizers-0.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a12c3cebb8c92e9c35a23ab10d3852aee522f385c28d0b4fe48c0b7527d59762"}, - {file = "tokenizers-0.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02e18da58cf115b7c40de973609c35bde95856012ba42a41ee919c77935af251"}, - {file = "tokenizers-0.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f326a1ac51ae909b9760e34671c26cd0dfe15662f447302a9d5bb2d872bab8ab"}, - {file = "tokenizers-0.20.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b4872647ea6f25224e2833b044b0b19084e39400e8ead3cfe751238b0802140"}, - {file = "tokenizers-0.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce6238a3311bb8e4c15b12600927d35c267b92a52c881ef5717a900ca14793f7"}, - {file = "tokenizers-0.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57b7a8880b208866508b06ce365dc631e7a2472a3faa24daa430d046fb56c885"}, - {file = "tokenizers-0.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a908c69c2897a68f412aa05ba38bfa87a02980df70f5a72fa8490479308b1f2d"}, - {file = "tokenizers-0.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:da1001aa46f4490099c82e2facc4fbc06a6a32bf7de3918ba798010954b775e0"}, - {file = "tokenizers-0.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:42c097390e2f0ed0a5c5d569e6669dd4e9fff7b31c6a5ce6e9c66a61687197de"}, - {file = "tokenizers-0.20.1-cp39-none-win32.whl", hash = "sha256:3d4d218573a3d8b121a1f8c801029d70444ffb6d8f129d4cca1c7b672ee4a24c"}, - {file = "tokenizers-0.20.1-cp39-none-win_amd64.whl", hash = "sha256:37d1e6f616c84fceefa7c6484a01df05caf1e207669121c66213cb5b2911d653"}, - {file = "tokenizers-0.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48689da7a395df41114f516208d6550e3e905e1239cc5ad386686d9358e9cef0"}, - {file = "tokenizers-0.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:712f90ea33f9bd2586b4a90d697c26d56d0a22fd3c91104c5858c4b5b6489a79"}, - {file = "tokenizers-0.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:359eceb6a620c965988fc559cebc0a98db26713758ec4df43fb76d41486a8ed5"}, - {file = "tokenizers-0.20.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d3caf244ce89d24c87545aafc3448be15870096e796c703a0d68547187192e1"}, - {file = "tokenizers-0.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03b03cf8b9a32254b1bf8a305fb95c6daf1baae0c1f93b27f2b08c9759f41dee"}, - {file = "tokenizers-0.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:218e5a3561561ea0f0ef1559c6d95b825308dbec23fb55b70b92589e7ff2e1e8"}, - {file = "tokenizers-0.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f40df5e0294a95131cc5f0e0eb91fe86d88837abfbee46b9b3610b09860195a7"}, - {file = "tokenizers-0.20.1-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:08aaa0d72bb65058e8c4b0455f61b840b156c557e2aca57627056624c3a93976"}, - {file = "tokenizers-0.20.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:998700177b45f70afeb206ad22c08d9e5f3a80639dae1032bf41e8cbc4dada4b"}, - {file = "tokenizers-0.20.1-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62f7fbd3c2c38b179556d879edae442b45f68312019c3a6013e56c3947a4e648"}, - {file = "tokenizers-0.20.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31e87fca4f6bbf5cc67481b562147fe932f73d5602734de7dd18a8f2eee9c6dd"}, - {file = "tokenizers-0.20.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:956f21d359ae29dd51ca5726d2c9a44ffafa041c623f5aa33749da87cfa809b9"}, - {file = "tokenizers-0.20.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1fbbaf17a393c78d8aedb6a334097c91cb4119a9ced4764ab8cfdc8d254dc9f9"}, - {file = "tokenizers-0.20.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ebe63e31f9c1a970c53866d814e35ec2ec26fda03097c486f82f3891cee60830"}, - {file = "tokenizers-0.20.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:81970b80b8ac126910295f8aab2d7ef962009ea39e0d86d304769493f69aaa1e"}, - {file = "tokenizers-0.20.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130e35e76f9337ed6c31be386e75d4925ea807055acf18ca1a9b0eec03d8fe23"}, - {file = "tokenizers-0.20.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd28a8614f5c82a54ab2463554e84ad79526c5184cf4573bbac2efbbbcead457"}, - {file = "tokenizers-0.20.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9041ee665d0fa7f5c4ccf0f81f5e6b7087f797f85b143c094126fc2611fec9d0"}, - {file = "tokenizers-0.20.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:62eb9daea2a2c06bcd8113a5824af8ef8ee7405d3a71123ba4d52c79bb3d9f1a"}, - {file = "tokenizers-0.20.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f861889707b54a9ab1204030b65fd6c22bdd4a95205deec7994dc22a8baa2ea4"}, - {file = "tokenizers-0.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:89d5c337d74ea6e5e7dc8af124cf177be843bbb9ca6e58c01f75ea103c12c8a9"}, - {file = "tokenizers-0.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0b7f515c83397e73292accdbbbedc62264e070bae9682f06061e2ddce67cacaf"}, - {file = "tokenizers-0.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e0305fc1ec6b1e5052d30d9c1d5c807081a7bd0cae46a33d03117082e91908c"}, - {file = "tokenizers-0.20.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dc611e6ac0fa00a41de19c3bf6391a05ea201d2d22b757d63f5491ec0e67faa"}, - {file = "tokenizers-0.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5ffe0d7f7bfcfa3b2585776ecf11da2e01c317027c8573c78ebcb8985279e23"}, - {file = "tokenizers-0.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e7edb8ec12c100d5458d15b1e47c0eb30ad606a05641f19af7563bc3d1608c14"}, - {file = "tokenizers-0.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:de291633fb9303555793cc544d4a86e858da529b7d0b752bcaf721ae1d74b2c9"}, - {file = "tokenizers-0.20.1.tar.gz", hash = "sha256:84edcc7cdeeee45ceedb65d518fffb77aec69311c9c8e30f77ad84da3025f002"}, -] - -[package.dependencies] -huggingface-hub = ">=0.16.4,<1.0" - -[package.extras] -dev = ["tokenizers[testing]"] -docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] -testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests", "ruff"] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - -[[package]] -name = "torch" -version = "2.4.1" -description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "torch-2.4.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:362f82e23a4cd46341daabb76fba08f04cd646df9bfaf5da50af97cb60ca4971"}, - {file = "torch-2.4.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:e8ac1985c3ff0f60d85b991954cfc2cc25f79c84545aead422763148ed2759e3"}, - {file = "torch-2.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:91e326e2ccfb1496e3bee58f70ef605aeb27bd26be07ba64f37dcaac3d070ada"}, - {file = "torch-2.4.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:d36a8ef100f5bff3e9c3cea934b9e0d7ea277cb8210c7152d34a9a6c5830eadd"}, - {file = "torch-2.4.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:0b5f88afdfa05a335d80351e3cea57d38e578c8689f751d35e0ff36bce872113"}, - {file = "torch-2.4.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:ef503165f2341942bfdf2bd520152f19540d0c0e34961232f134dc59ad435be8"}, - {file = "torch-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:092e7c2280c860eff762ac08c4bdcd53d701677851670695e0c22d6d345b269c"}, - {file = "torch-2.4.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:ddddbd8b066e743934a4200b3d54267a46db02106876d21cf31f7da7a96f98ea"}, - {file = "torch-2.4.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:fdc4fe11db3eb93c1115d3e973a27ac7c1a8318af8934ffa36b0370efe28e042"}, - {file = "torch-2.4.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:18835374f599207a9e82c262153c20ddf42ea49bc76b6eadad8e5f49729f6e4d"}, - {file = "torch-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:ebea70ff30544fc021d441ce6b219a88b67524f01170b1c538d7d3ebb5e7f56c"}, - {file = "torch-2.4.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:72b484d5b6cec1a735bf3fa5a1c4883d01748698c5e9cfdbeb4ffab7c7987e0d"}, - {file = "torch-2.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:c99e1db4bf0c5347107845d715b4aa1097e601bdc36343d758963055e9599d93"}, - {file = "torch-2.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b57f07e92858db78c5b72857b4f0b33a65b00dc5d68e7948a8494b0314efb880"}, - {file = "torch-2.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:f18197f3f7c15cde2115892b64f17c80dbf01ed72b008020e7da339902742cf6"}, - {file = "torch-2.4.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:5fc1d4d7ed265ef853579caf272686d1ed87cebdcd04f2a498f800ffc53dab71"}, - {file = "torch-2.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:40f6d3fe3bae74efcf08cb7f8295eaddd8a838ce89e9d26929d4edd6d5e4329d"}, - {file = "torch-2.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c9299c16c9743001ecef515536ac45900247f4338ecdf70746f2461f9e4831db"}, - {file = "torch-2.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:6bce130f2cd2d52ba4e2c6ada461808de7e5eccbac692525337cfb4c19421846"}, - {file = "torch-2.4.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:a38de2803ee6050309aac032676536c3d3b6a9804248537e38e098d0e14817ec"}, -] - -[package.dependencies] -filelock = "*" -fsspec = "*" -jinja2 = "*" -networkx = "*" -nvidia-cublas-cu12 = {version = "12.1.3.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-cupti-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-nvrtc-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-runtime-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cudnn-cu12 = {version = "9.1.0.70", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cufft-cu12 = {version = "11.0.2.54", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-curand-cu12 = {version = "10.3.2.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusolver-cu12 = {version = "11.4.5.107", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nccl-cu12 = {version = "2.20.5", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -setuptools = "*" -sympy = "*" -triton = {version = "3.0.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.13\""} -typing-extensions = ">=4.8.0" - -[package.extras] -opt-einsum = ["opt-einsum (>=3.3)"] -optree = ["optree (>=0.11.0)"] - -[[package]] -name = "tornado" -version = "6.4.1" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -optional = false -python-versions = ">=3.8" -files = [ - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, - {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, - {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, - {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, -] - -[[package]] -name = "tqdm" -version = "4.66.5" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, - {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "traitlets" -version = "5.14.3" -description = "Traitlets Python configuration system" -optional = false -python-versions = ">=3.8" -files = [ - {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, - {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, -] - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] - -[[package]] -name = "transformers" -version = "4.45.2" -description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "transformers-4.45.2-py3-none-any.whl", hash = "sha256:c551b33660cfc815bae1f9f097ecfd1e65be623f13c6ee0dda372bd881460210"}, - {file = "transformers-4.45.2.tar.gz", hash = "sha256:72bc390f6b203892561f05f86bbfaa0e234aab8e927a83e62b9d92ea7e3ae101"}, -] - -[package.dependencies] -filelock = "*" -huggingface-hub = ">=0.23.2,<1.0" -numpy = ">=1.17" -packaging = ">=20.0" -pyyaml = ">=5.1" -regex = "!=2019.12.17" -requests = "*" -safetensors = ">=0.4.1" -tokenizers = ">=0.20,<0.21" -tqdm = ">=4.27" - -[package.extras] -accelerate = ["accelerate (>=0.26.0)"] -agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch"] -all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm (<=0.9.16)", "tokenizers (>=0.20,<0.21)", "torch", "torchaudio", "torchvision"] -audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -benchmark = ["optimum-benchmark (>=0.3.0)"] -codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.26.0)", "deepspeed (>=0.9.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.26.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk (<=3.8.1)", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "libcst", "librosa", "nltk (<=3.8.1)", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.20,<0.21)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "libcst", "librosa", "nltk (<=3.8.1)", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.20,<0.21)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.26.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "libcst", "librosa", "nltk (<=3.8.1)", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rich", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.20,<0.21)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)", "scipy (<1.13.0)"] -flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -ftfy = ["ftfy"] -integrations = ["optuna", "ray[tune] (>=2.7.0)", "sigopt"] -ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -modelcreation = ["cookiecutter (==1.7.3)"] -natten = ["natten (>=0.14.6,<0.15.0)"] -onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] -onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] -optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "isort (>=5.5.4)", "libcst", "rich", "ruff (==0.5.1)", "urllib3 (<2.0.0)"] -ray = ["ray[tune] (>=2.7.0)"] -retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] -ruff = ["ruff (==0.5.1)"] -sagemaker = ["sagemaker (>=2.31.0)"] -sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] -serving = ["fastapi", "pydantic", "starlette", "uvicorn"] -sigopt = ["sigopt"] -sklearn = ["scikit-learn"] -speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk (<=3.8.1)", "parameterized", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] -tf = ["keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] -tf-cpu = ["keras (>2.9,<2.16)", "keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow-cpu (>2.9,<2.16)", "tensorflow-probability (<0.24)", "tensorflow-text (<2.16)", "tf2onnx"] -tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -tiktoken = ["blobfile", "tiktoken"] -timm = ["timm (<=0.9.16)"] -tokenizers = ["tokenizers (>=0.20,<0.21)"] -torch = ["accelerate (>=0.26.0)", "torch"] -torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.23.2,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.20,<0.21)", "torch", "tqdm (>=4.27)"] -video = ["av (==9.2.0)", "decord (==0.6.0)"] -vision = ["Pillow (>=10.0.1,<=15.0)"] - -[[package]] -name = "triton" -version = "3.0.0" -description = "A language and compiler for custom Deep Learning operations" -optional = false -python-versions = "*" -files = [ - {file = "triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1efef76935b2febc365bfadf74bcb65a6f959a9872e5bddf44cc9e0adce1e1a"}, - {file = "triton-3.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ce8520437c602fb633f1324cc3871c47bee3b67acf9756c1a66309b60e3216c"}, - {file = "triton-3.0.0-1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:34e509deb77f1c067d8640725ef00c5cbfcb2052a1a3cb6a6d343841f92624eb"}, - {file = "triton-3.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bcbf3b1c48af6a28011a5c40a5b3b9b5330530c3827716b5fbf6d7adcc1e53e9"}, - {file = "triton-3.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6e5727202f7078c56f91ff13ad0c1abab14a0e7f2c87e91b12b6f64f3e8ae609"}, -] - -[package.dependencies] -filelock = "*" - -[package.extras] -build = ["cmake (>=3.20)", "lit"] -tests = ["autopep8", "flake8", "isort", "llnl-hatchet", "numpy", "pytest", "scipy (>=1.7.1)"] -tutorials = ["matplotlib", "pandas", "tabulate"] - -[[package]] -name = "types-python-dateutil" -version = "2.9.0.20240906" -description = "Typing stubs for python-dateutil" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-python-dateutil-2.9.0.20240906.tar.gz", hash = "sha256:9706c3b68284c25adffc47319ecc7947e5bb86b3773f843c73906fd598bc176e"}, - {file = "types_python_dateutil-2.9.0.20240906-py3-none-any.whl", hash = "sha256:27c8cc2d058ccb14946eebcaaa503088f4f6dbc4fb6093d3d456a49aef2753f6"}, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - -[[package]] -name = "umap-learn" -version = "0.5.6" -description = "Uniform Manifold Approximation and Projection" -optional = false -python-versions = "*" -files = [ - {file = "umap-learn-0.5.6.tar.gz", hash = "sha256:5b3917a862c23ba0fc83bfcd67a7b719dec85b3d9c01fdc7d894cce455df4e03"}, - {file = "umap_learn-0.5.6-py3-none-any.whl", hash = "sha256:881cc0c2ee845b790bf0455aa1664f9f68b838d9d0fe12a1291b85c5a559c913"}, -] - -[package.dependencies] -numba = ">=0.51.2" -numpy = ">=1.17" -pynndescent = ">=0.5" -scikit-learn = ">=0.22" -scipy = ">=1.3.1" -tqdm = "*" - -[package.extras] -parametric-umap = ["tensorflow (>=2.1)"] -plot = ["bokeh", "colorcet", "datashader", "holoviews", "matplotlib", "pandas", "scikit-image", "seaborn"] -tbb = ["tbb (>=2019.0)"] - -[[package]] -name = "uri-template" -version = "1.3.0" -description = "RFC 6570 URI Template Processor" -optional = false -python-versions = ">=3.7" -files = [ - {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, - {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, -] - -[package.extras] -dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] - -[[package]] -name = "urllib3" -version = "2.2.3" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "virtualenv" -version = "20.26.5" -description = "Virtual Python Environment builder" -optional = false -python-versions = ">=3.7" -files = [ - {file = "virtualenv-20.26.5-py3-none-any.whl", hash = "sha256:4f3ac17b81fba3ce3bd6f4ead2749a72da5929c01774948e243db9ba41df4ff6"}, - {file = "virtualenv-20.26.5.tar.gz", hash = "sha256:ce489cac131aa58f4b25e321d6d186171f78e6cb13fafbf32a840cee67733ff4"}, -] - -[package.dependencies] -distlib = ">=0.3.7,<1" -filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<5" - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] - -[[package]] -name = "watchdog" -version = "5.0.3" -description = "Filesystem events monitoring" -optional = false -python-versions = ">=3.9" -files = [ - {file = "watchdog-5.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:85527b882f3facda0579bce9d743ff7f10c3e1e0db0a0d0e28170a7d0e5ce2ea"}, - {file = "watchdog-5.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:53adf73dcdc0ef04f7735066b4a57a4cd3e49ef135daae41d77395f0b5b692cb"}, - {file = "watchdog-5.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e25adddab85f674acac303cf1f5835951345a56c5f7f582987d266679979c75b"}, - {file = "watchdog-5.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f01f4a3565a387080dc49bdd1fefe4ecc77f894991b88ef927edbfa45eb10818"}, - {file = "watchdog-5.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91b522adc25614cdeaf91f7897800b82c13b4b8ac68a42ca959f992f6990c490"}, - {file = "watchdog-5.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d52db5beb5e476e6853da2e2d24dbbbed6797b449c8bf7ea118a4ee0d2c9040e"}, - {file = "watchdog-5.0.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:94d11b07c64f63f49876e0ab8042ae034674c8653bfcdaa8c4b32e71cfff87e8"}, - {file = "watchdog-5.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:349c9488e1d85d0a58e8cb14222d2c51cbc801ce11ac3936ab4c3af986536926"}, - {file = "watchdog-5.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:53a3f10b62c2d569e260f96e8d966463dec1a50fa4f1b22aec69e3f91025060e"}, - {file = "watchdog-5.0.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:950f531ec6e03696a2414b6308f5c6ff9dab7821a768c9d5788b1314e9a46ca7"}, - {file = "watchdog-5.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ae6deb336cba5d71476caa029ceb6e88047fc1dc74b62b7c4012639c0b563906"}, - {file = "watchdog-5.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1021223c08ba8d2d38d71ec1704496471ffd7be42cfb26b87cd5059323a389a1"}, - {file = "watchdog-5.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:752fb40efc7cc8d88ebc332b8f4bcbe2b5cc7e881bccfeb8e25054c00c994ee3"}, - {file = "watchdog-5.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a2e8f3f955d68471fa37b0e3add18500790d129cc7efe89971b8a4cc6fdeb0b2"}, - {file = "watchdog-5.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b8ca4d854adcf480bdfd80f46fdd6fb49f91dd020ae11c89b3a79e19454ec627"}, - {file = "watchdog-5.0.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:90a67d7857adb1d985aca232cc9905dd5bc4803ed85cfcdcfcf707e52049eda7"}, - {file = "watchdog-5.0.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:720ef9d3a4f9ca575a780af283c8fd3a0674b307651c1976714745090da5a9e8"}, - {file = "watchdog-5.0.3-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:223160bb359281bb8e31c8f1068bf71a6b16a8ad3d9524ca6f523ac666bb6a1e"}, - {file = "watchdog-5.0.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:560135542c91eaa74247a2e8430cf83c4342b29e8ad4f520ae14f0c8a19cfb5b"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dd021efa85970bd4824acacbb922066159d0f9e546389a4743d56919b6758b91"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_armv7l.whl", hash = "sha256:78864cc8f23dbee55be34cc1494632a7ba30263951b5b2e8fc8286b95845f82c"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_i686.whl", hash = "sha256:1e9679245e3ea6498494b3028b90c7b25dbb2abe65c7d07423ecfc2d6218ff7c"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_ppc64.whl", hash = "sha256:9413384f26b5d050b6978e6fcd0c1e7f0539be7a4f1a885061473c5deaa57221"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:294b7a598974b8e2c6123d19ef15de9abcd282b0fbbdbc4d23dfa812959a9e05"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_s390x.whl", hash = "sha256:26dd201857d702bdf9d78c273cafcab5871dd29343748524695cecffa44a8d97"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:0f9332243355643d567697c3e3fa07330a1d1abf981611654a1f2bf2175612b7"}, - {file = "watchdog-5.0.3-py3-none-win32.whl", hash = "sha256:c66f80ee5b602a9c7ab66e3c9f36026590a0902db3aea414d59a2f55188c1f49"}, - {file = "watchdog-5.0.3-py3-none-win_amd64.whl", hash = "sha256:f00b4cf737f568be9665563347a910f8bdc76f88c2970121c86243c8cfdf90e9"}, - {file = "watchdog-5.0.3-py3-none-win_ia64.whl", hash = "sha256:49f4d36cb315c25ea0d946e018c01bb028048023b9e103d3d3943f58e109dd45"}, - {file = "watchdog-5.0.3.tar.gz", hash = "sha256:108f42a7f0345042a854d4d0ad0834b741d421330d5f575b81cb27b883500176"}, -] - -[package.extras] -watchmedo = ["PyYAML (>=3.10)"] - -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - -[[package]] -name = "webcolors" -version = "24.8.0" -description = "A library for working with the color formats defined by HTML and CSS." -optional = false -python-versions = ">=3.8" -files = [ - {file = "webcolors-24.8.0-py3-none-any.whl", hash = "sha256:fc4c3b59358ada164552084a8ebee637c221e4059267d0f8325b3b560f6c7f0a"}, - {file = "webcolors-24.8.0.tar.gz", hash = "sha256:08b07af286a01bcd30d583a7acadf629583d1f79bfef27dd2c2c5c263817277d"}, -] - -[package.extras] -docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] -tests = ["coverage[toml]"] - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -optional = false -python-versions = "*" -files = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] - -[[package]] -name = "websocket-client" -version = "1.8.0" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, - {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, -] - -[package.extras] -docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "werkzeug" -version = "2.1.2" -description = "The comprehensive WSGI web application library." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, - {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, -] - -[package.extras] -watchdog = ["watchdog"] - -[[package]] -name = "widgetsnbextension" -version = "4.0.13" -description = "Jupyter interactive widgets for Jupyter Notebook" -optional = false -python-versions = ">=3.7" -files = [ - {file = "widgetsnbextension-4.0.13-py3-none-any.whl", hash = "sha256:74b2692e8500525cc38c2b877236ba51d34541e6385eeed5aec15a70f88a6c71"}, - {file = "widgetsnbextension-4.0.13.tar.gz", hash = "sha256:ffcb67bc9febd10234a362795f643927f4e0c05d9342c727b65d2384f8feacb6"}, -] - -[[package]] -name = "wordcloud" -version = "1.9.3" -description = "A little word cloud generator" -optional = false -python-versions = ">=3.7" -files = [ - {file = "wordcloud-1.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5fce423a24e6ca1b89b2770a7c6917d6e26f04bcfefa601cf61819b2fc0770c4"}, - {file = "wordcloud-1.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3b6adfc1465b9176b8bc602745dd3ed8ea782b006a81cb59eab3dde92ad9f94c"}, - {file = "wordcloud-1.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad6db37a6f5abeba51a5d503228ea320d4f2fa774864103e7b24acd9dd86fd0e"}, - {file = "wordcloud-1.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e74ac99e9582873d7ee28bd03e125dcf73ae46666d55fb4c13e82e90c0e074a"}, - {file = "wordcloud-1.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4001317c0e3b5cb6fd106228ddcd27524d1caf9ae468b3c2c2fc571c6ce56b22"}, - {file = "wordcloud-1.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f86042e5ce12e2795798033a56f0246906b4d7d9027d554b6cd951ce2fd342a"}, - {file = "wordcloud-1.9.3-cp310-cp310-win32.whl", hash = "sha256:3b90f0390c0a05ba4b4580fb765a3d45d8d21519b50ca5006d6dbdc2a0b86507"}, - {file = "wordcloud-1.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:6f7977285df9254b8704d3f895c06814a6183c6c89e140d6281848c076635e91"}, - {file = "wordcloud-1.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ced0d5c946d82cfc778febafe3eedeb0bae07dd57ea4f21fe06b9ec8225ab31"}, - {file = "wordcloud-1.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6f5499e6360219e61808dc0d2b00cd5104f78a82d2ae8f7986df04731713835f"}, - {file = "wordcloud-1.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb1e8bb7d60f7a90fa8439c7b56dd1df60766115fd57480ac0d83ca5204e0117"}, - {file = "wordcloud-1.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e33328044db5c01487f2a3a023b5476947942dacd6a5dc8c217fa039f6c5bd9"}, - {file = "wordcloud-1.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:998dc0dc8fcbff88f566f17cb5e0eb3bb21fcafd387b0670be6c14feacaf4cdc"}, - {file = "wordcloud-1.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e1a1c3cfa86b605a19711ec58920ccb694dca9d5c9d00b373f4d5952d63793e9"}, - {file = "wordcloud-1.9.3-cp311-cp311-win32.whl", hash = "sha256:f504e3291256c0b6fca044602f8f0e5cb56b7c33724cde9d279c4077fa5b6d27"}, - {file = "wordcloud-1.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:103c9b0465e1cf5b7a38b49ab1c3a0b0301762fa56602ac79287f9d22b46ade3"}, - {file = "wordcloud-1.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dfea303fc1dec4811e4a5671a8021a89724b6fa70639d059ad30c492932be447"}, - {file = "wordcloud-1.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:512f3c9a2e8579269a33ac9219d042fd0cc5a3a524ee68079238a3e4efe2b879"}, - {file = "wordcloud-1.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d00d916509a17b432032161d492ed7f30b2ebd921303090fe1d2b57011a49cc0"}, - {file = "wordcloud-1.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5e0e7bbd269a62baa63ea2175faea4d74435c0ad828f3d5999fa4c33ebe0629"}, - {file = "wordcloud-1.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:483aa4f8d17b9744a3b238269593d1794b962fc757a72a9e7e8468c2665cffb7"}, - {file = "wordcloud-1.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:64b342a79553970fa04083761d041067323219ad62b5550a496e42436d23cbb3"}, - {file = "wordcloud-1.9.3-cp312-cp312-win32.whl", hash = "sha256:419acfe0b1d1227b9e3e14ec1bb6c40fd7fa652df4adf81f0ba3e00daca500b5"}, - {file = "wordcloud-1.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:2061a9978a6243107ce1a8a9fa24f421b03a0f7e620769b6f5075857e75aa615"}, - {file = "wordcloud-1.9.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:21f47fabe189f39532378759300a624ae166519dfafbd6a22cfe65b14a7d104d"}, - {file = "wordcloud-1.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:524065f8a5a79e00748f45efbeacd25ac1d15850e0d0588753b17a8b2de2a6a7"}, - {file = "wordcloud-1.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b2bb53492bc8663ba90a300bbd2da7be5059f9ad192ed1150e9bbbda8016c9a"}, - {file = "wordcloud-1.9.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:643243474faee460e7d08944d3e529c58d0cbf8be11626fbb918ee8ccb913a23"}, - {file = "wordcloud-1.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d95f44739a6972abfb97c12656999952dd28ed03700ee8b6efe35d688d489b36"}, - {file = "wordcloud-1.9.3-cp37-cp37m-win32.whl", hash = "sha256:e56364c8829d399397a649501f834c12751ab106cba488ba8d86d532889b528c"}, - {file = "wordcloud-1.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:78f4a3fd3526884e4f526ae070bcb47401766c48c9cb6488933f608f810fadae"}, - {file = "wordcloud-1.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0058cf08573c99283fe189e93354d20ca8c9a8aac7207d96e74b93aedd02cdcc"}, - {file = "wordcloud-1.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47d6918381a8a816141bdd391376bff703ec5aa3a6bd88631097a5e2963ebd1a"}, - {file = "wordcloud-1.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05aa3269c5af573cfb11e269de0fe73c2c72aefdd90cdb41368744e7d8bc7507"}, - {file = "wordcloud-1.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d74e206f42af172db4d3c0054853523bf46070b12f0626493a56599957dd2196"}, - {file = "wordcloud-1.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1932726635c8ed12bb74201d2a6b07f18c2f732aecadb9ae915832485241991f"}, - {file = "wordcloud-1.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:038de1701e7853c41850644453f1c9e69f878e480d42efae154684a47fd59f1a"}, - {file = "wordcloud-1.9.3-cp38-cp38-win32.whl", hash = "sha256:19aa05f60d9261301e4942fd1b1c4b458d903f24c12d2bd1c6ecbb752697a2f3"}, - {file = "wordcloud-1.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:ab5bae12cf27d8de986e4d4518d4778f2b56c660b250b631ff805024038311a1"}, - {file = "wordcloud-1.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:888d088f54a897b8597da2fae3954d74b1f7251f7d311bbcc30ec3c6987d3605"}, - {file = "wordcloud-1.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:daa6cfa11ce24e7eb4e42dc896dae4f74ae2166cf90ec997996300566e6811d1"}, - {file = "wordcloud-1.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387dc2bd528ff6bb661451f2a9fd4ccf74b86072d7a2c868285d4c0cf26abeb4"}, - {file = "wordcloud-1.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40c32a324319db610b40f387a2a0b42d091817958a5272e0a4c4eb6a158588b5"}, - {file = "wordcloud-1.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8078c6c58db4ccb893f120354e7e08bc48a5a5aac3e764f9008bc96a769b208c"}, - {file = "wordcloud-1.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81f15eb60abc1676808bb85e2edfdbdc0a9011383f2a729c1c2a0cb941516768"}, - {file = "wordcloud-1.9.3-cp39-cp39-win32.whl", hash = "sha256:1d1680bf6c3d1b2f8e3bd02ccfa868fee2655fe13cf5b9e9905251050448fbbd"}, - {file = "wordcloud-1.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:c0f458681e4d49be36064f21bfb1dc8d8c3021fe30e474ee634666b4f84fd851"}, - {file = "wordcloud-1.9.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:baea9ac88ec1ab317461c75834b64ad5dad12a02c4f2384dd546eac3c316dbbb"}, - {file = "wordcloud-1.9.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6956b9f0d0eb14a12f46d41aebb4e7ad2d4c2ec417cc7c586bebd2ddc9c8311"}, - {file = "wordcloud-1.9.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d221b4d0d1d2a1d79286c41d8a4c0ce70065488f153e5d81cc0be7fb494ff10f"}, - {file = "wordcloud-1.9.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:db39dbe91dd31ffb667edcd496f4eeb85ceea397fef4ad51d0766ab934088cc7"}, - {file = "wordcloud-1.9.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a6ae5db43807ca10f5c77dd2d22c78f8f9399758cc5ac6afd7f3c19e58b75d66"}, - {file = "wordcloud-1.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a1c431f20ee28a8840f2552a89bd8332c455c318f4de7b6c2ca3159b76df4f0"}, - {file = "wordcloud-1.9.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1847ca4466e2b1588478dd8eb87fa7baa28515b37ab7926471595e8ac81e6578"}, - {file = "wordcloud-1.9.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:7b0e14e4dfcff7dee331df7880a2031e352e95a7d30e74ff152f162488b04179"}, - {file = "wordcloud-1.9.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f1c0cff6037a3dc46437537a31925f3895d742fb6d67af71194149763de16a76"}, - {file = "wordcloud-1.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a36788c5c79604653327675023cbd97c68813640887b51ce651bb4f5c28c88b"}, - {file = "wordcloud-1.9.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e3907c6496e197a9c4be76770c5ff8a03eddbdfe5a151a55e4eedeaa45ab3ad"}, - {file = "wordcloud-1.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:65e6f6b68eecb85c326ae19729dd4151fcdebffc2142c9ee882dc2de955210d0"}, - {file = "wordcloud-1.9.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0c8e18c4afa025819332efffe8008267a83a9c54fe72ae1bc889ddce0eec470d"}, - {file = "wordcloud-1.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4df25cb5dd347e43d53e02a009418f5776e7651063aff991865da8f6336bf193"}, - {file = "wordcloud-1.9.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53489ad22d58be3896ec16ed47604832e393224c89f7d7eed040096b07141ac4"}, - {file = "wordcloud-1.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:61de4a5f3bfd33e0cb013cce6143bcf71959f3cd8536650b90134d745a553c2c"}, - {file = "wordcloud-1.9.3.tar.gz", hash = "sha256:a9aa738d63ed674a40f0cc31adb83f4ca5fc195f03a6aff6e010d1f5807d1c58"}, -] - -[package.dependencies] -matplotlib = "*" -numpy = ">=1.6.1" -pillow = "*" - -[[package]] -name = "xmltodict" -version = "0.13.0" -description = "Makes working with XML feel like you are working with JSON" -optional = false -python-versions = ">=3.4" -files = [ - {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, - {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, -] - -[[package]] -name = "yamllint" -version = "1.35.1" -description = "A linter for YAML files." -optional = false -python-versions = ">=3.8" -files = [ - {file = "yamllint-1.35.1-py3-none-any.whl", hash = "sha256:2e16e504bb129ff515b37823b472750b36b6de07963bd74b307341ef5ad8bdc3"}, - {file = "yamllint-1.35.1.tar.gz", hash = "sha256:7a003809f88324fd2c877734f2d575ee7881dd9043360657cc8049c809eba6cd"}, -] - -[package.dependencies] -pathspec = ">=0.5.3" -pyyaml = "*" - -[package.extras] -dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.11" -content-hash = "af5bb6f93e0ca00a16513276b649e2a3d17c390945fe36029e59232fd3a49258" From 6042f3daea82e0b89890889e81205bcda712c4e2 Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Thu, 7 Nov 2024 11:42:25 +0000 Subject: [PATCH 17/25] Add dependencies and remove unused scripts --- .../pipeline/prep_output_tables.py | 7 +- .../pipeline/topic_modelling.py | 6 + .../utils/data_cleaning.py | 121 ++++++++++++----- .../utils/topic_modelling_utils.py | 22 ---- pyproject.toml | 2 + tests/test_concatenate_consecutive_roles.py | 123 ++++++++---------- tests/test_create_q_and_a_column.py | 79 ----------- 7 files changed, 163 insertions(+), 197 deletions(-) delete mode 100644 dsp_interview_transcripts/utils/topic_modelling_utils.py delete mode 100644 tests/test_create_q_and_a_column.py diff --git a/dsp_interview_transcripts/pipeline/prep_output_tables.py b/dsp_interview_transcripts/pipeline/prep_output_tables.py index 046d95b..0fa6b33 100644 --- a/dsp_interview_transcripts/pipeline/prep_output_tables.py +++ b/dsp_interview_transcripts/pipeline/prep_output_tables.py @@ -6,6 +6,7 @@ import pandas as pd from dsp_interview_transcripts import PROJECT_DIR +from dsp_interview_transcripts import logger OUTPUT_PATH_FULL_DATA = PROJECT_DIR / "outputs/final/final_df.csv" @@ -128,11 +129,15 @@ def create_scatterplot( } ) + logger.info("Saving output table...") final_df.sort_values(["conversation", "timestamp"]).to_csv(OUTPUT_PATH_FULL_DATA, index=False) # Visualise clusters + logger.info("Saving figures...") - fig = create_scatterplot() + fig = create_scatterplot( + data_viz, + ) fig.save(PROJECT_DIR / "outputs/scatter_coloured_by_topic.html") fig_questions = create_scatterplot( diff --git a/dsp_interview_transcripts/pipeline/topic_modelling.py b/dsp_interview_transcripts/pipeline/topic_modelling.py index 82c36c4..062bf6c 100644 --- a/dsp_interview_transcripts/pipeline/topic_modelling.py +++ b/dsp_interview_transcripts/pipeline/topic_modelling.py @@ -83,8 +83,10 @@ def stratified_sample(group: pd.DataFrame, n: int = 10) -> pd.DataFrame: user_messages = pd.read_csv(DATA_PATH) docs = user_messages["text_clean"].tolist() + logger.info("Embedding user messages...") embeddings = SENTENCE_MODEL.encode(docs, show_progress_bar=True) + logger.info("Reducing dimensionality...") umap_vectors = umap_model.fit_transform(embeddings) umap_df = pd.DataFrame(umap_vectors) @@ -95,9 +97,11 @@ def stratified_sample(group: pd.DataFrame, n: int = 10) -> pd.DataFrame: model_vars = umap_vars # Normalise the umap vectors + logger.info("Normalising UMAP vectors...") scaler = StandardScaler() df_normalized = scaler.fit_transform(user_messages_w_umap[model_vars]) + logger.info("Fitting HDBSCAN...") clusters = hdbscan_model.fit_predict(df_normalized) cluster_probabilities = hdbscan_model.probabilities_ user_messages_w_umap["label"] = clusters @@ -105,10 +109,12 @@ def stratified_sample(group: pd.DataFrame, n: int = 10) -> pd.DataFrame: logger.info(user_messages_w_umap["label"].value_counts()) # 2d embeddings for visualisation + logger.info("Creating 2D embeddings for visualisation...") umap_2d = UMAP(random_state=RANDOM_SEED, n_components=2) embeddings_2d = umap_2d.fit_transform(embeddings) # topic representations + logger.info("Creating tfidf representation...") cluster_groups = user_messages_w_umap.groupby("label").agg({"text_clean": " ".join}).reset_index() tfidf_matrix = vectorizer_model.fit_transform(cluster_groups["text_clean"].to_list()) diff --git a/dsp_interview_transcripts/utils/data_cleaning.py b/dsp_interview_transcripts/utils/data_cleaning.py index 30ac581..04b4492 100644 --- a/dsp_interview_transcripts/utils/data_cleaning.py +++ b/dsp_interview_transcripts/utils/data_cleaning.py @@ -1,42 +1,72 @@ +import re + +from typing import Union + import emoji import ftfy import pandas as pd -import re from sentence_transformers import SentenceTransformer from sklearn.metrics.pairwise import cosine_similarity + # Load model -small_model = SentenceTransformer('paraphrase-MiniLM-L3-v2') +SMALL_MODEL = SentenceTransformer("paraphrase-MiniLM-L3-v2") # Embed the target sentence TARGET_SENTENCE = "Are these instructions clear or do you need any further clarification?" -target_embedding = small_model.encode([TARGET_SENTENCE]) +TARGET_EMBEDDING = SMALL_MODEL.encode([TARGET_SENTENCE]) -def remove_preamble(df, target_embedding=target_embedding, model=small_model): - """Get rid of everything up until the bot asks if the instructions are clear + +def remove_preamble( + df: pd.DataFrame, target_embedding: list = TARGET_EMBEDDING, model: SentenceTransformer = SMALL_MODEL +) -> pd.DataFrame: + """Get rid of everything up until the bot asks if the instructions are clear. + + Removes all messages up to and including the first message from the bot that is highly similar + to the target sentence "Are these instructions clear or do you need any further clarification?". + Everything before this question is not considered relevant to our analysis. + + Args: + df (pd.DataFrame): DataFrame containing conversation data with columns 'role', 'text_clean', and 'timestamp_clean'. + target_embedding (list): The embedding of the target sentence used for similarity comparison. + model (SentenceTransformer): The model used to generate embeddings for bot messages. + + Returns: + pd.DataFrame: Filtered DataFrame with messages occurring after the cutoff timestamp. """ # Filter BOT messages - bot_messages = df[df['role'] == 'BOT'] - + bot_messages = df[df["role"] == "BOT"] + # Embed BOT messages - bot_embeddings = model.encode(bot_messages['text_clean'].tolist()) - + bot_embeddings = model.encode(bot_messages["text_clean"].tolist()) + # Calculate cosine similarity similarities = cosine_similarity(target_embedding, bot_embeddings).flatten() - + # Find the index of the most similar BOT message most_similar_idx = similarities.argmax() - + # Get the timestamp of that message - cutoff_timestamp = bot_messages.iloc[most_similar_idx]['timestamp_clean'] - + cutoff_timestamp = bot_messages.iloc[most_similar_idx]["timestamp_clean"] + # Filter out messages prior to the cutoff timestamp - return df[df['timestamp_clean'] > cutoff_timestamp] + return df[df["timestamp_clean"] > cutoff_timestamp] + -def convert_timestamp(timestamp): +def convert_timestamp(timestamp: str) -> Union[pd.Timestamp, pd.NaT]: + """ + Converts a timestamp string to a pandas Timestamp object, removing any trailing timezone information. + If the timestamp is invalid, returns NaT. + + Args: + timestamp (str): The timestamp string to convert, potentially with timezone information. + + Returns: + Union[pd.Timestamp, pd.NaT]: A pandas Timestamp object if conversion is successful; NaT otherwise. + """ # Remove the daylight saving time '+01:00' and trailing whitespace - cleaned_timestamp = re.sub('\+01[\:]?00$', '', timestamp) + cleaned_timestamp = re.sub("\+01[\:]?00$", "", timestamp) cleaned_timestamp = cleaned_timestamp.rstrip() try: # Convert to datetime @@ -46,21 +76,47 @@ def convert_timestamp(timestamp): print(f"Cannot convert timestamp: {cleaned_timestamp}") return pd.NaT # Return NaT (Not a Time) for invalid timestamps -def fill_text_with_transcript(data_df): + +def fill_text_with_transcript(data_df: pd.DataFrame) -> pd.DataFrame: """ - Fills the 'text' column with the 'transcript' column where 'text' is missing. + Fills missing values in the 'text' column with corresponding values from the 'transcript' column + (some users supplied audio messages that got transcribed). + + Args: + data_df (pd.DataFrame): DataFrame containing 'text' and 'transcript' columns. + + Returns: + pd.DataFrame: Updated DataFrame with missing 'text' values filled from 'transcript'. """ data_df = data_df.assign(text=lambda x: x["text"].fillna(x["transcript"])) return data_df.fillna({"text": ""}) -def add_text_length(data_df, text_col='text_clean'): + +def add_text_length(data_df: pd.DataFrame, text_col: str = "text_clean") -> pd.DataFrame: """ - Adds a new column 'text_length' with the length of the 'text' column. + Adds a 'text_length' column to the DataFrame, representing the number of words in the text column. + + Args: + data_df (pd.DataFrame): DataFrame containing a text column. + text_col (str): The name of the column containing text data (default is 'text_clean'). + + Returns: + pd.DataFrame: Updated DataFrame with a new 'text_length' column. """ data_df = data_df.assign(text_length=lambda x: x[text_col].apply(lambda text: len(text.split()))) return data_df -def replace_punct(text): + +def replace_punct(text: str) -> str: + """ + Replaces or removes specific punctuation characters. + + Args: + text (str): The text string to process. + + Returns: + str: The cleaned text with replacements made. + """ text = ( text.replace("&", "and") .replace("\xa0", " ") @@ -72,19 +128,26 @@ def replace_punct(text): return text.strip() -def clean_data(data_df): + +def clean_data(data_df: pd.DataFrame) -> pd.DataFrame: """ Pulls together all the previous cleaning steps + + Args: + data_df (pd.DataFrame): DataFrame containing conversation data, with at least 'text' and 'text_clean' columns. + + Returns: + pd.DataFrame: Cleaned DataFrame with processed text. """ data_df = fill_text_with_transcript(data_df) - + # Fix improperly coded characters - data_df['text_clean'] = data_df['text'].apply(lambda x: ftfy.fix_text(x)) - + data_df["text_clean"] = data_df["text"].apply(lambda x: ftfy.fix_text(x)) + # Remove emojis - data_df['text_clean'] = data_df['text_clean'].apply(lambda x: emoji.demojize(x)) - + data_df["text_clean"] = data_df["text_clean"].apply(lambda x: emoji.demojize(x)) + # Replace punctuation - data_df['text_clean'] = data_df['text_clean'].apply(replace_punct) - + data_df["text_clean"] = data_df["text_clean"].apply(replace_punct) + return data_df diff --git a/dsp_interview_transcripts/utils/topic_modelling_utils.py b/dsp_interview_transcripts/utils/topic_modelling_utils.py deleted file mode 100644 index 19007e3..0000000 --- a/dsp_interview_transcripts/utils/topic_modelling_utils.py +++ /dev/null @@ -1,22 +0,0 @@ -import pandas as pd -from typing import List - -def concat_topics_probabilities( - input_df: pd.DataFrame, topics: List[str], docs, probs, axis=1 -): - """Bring together the original dataframe of docs, and the probabilities and topic - labels from the topic model. - """ - df = pd.DataFrame( - { - "doc_index": range(len(docs)), - "topic": topics, - "probability": probs.max(axis=axis), - } - ) - - output_df = pd.concat( - [input_df.reset_index(drop=True), df.reset_index(drop=True)], axis=1 - ) - - return output_df diff --git a/pyproject.toml b/pyproject.toml index 1342dab..a3969ef 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,6 +28,8 @@ torch = "^2.4.1" wordcloud = "^1.9.3" seaborn = "^0.13.2" openpyxl = "^3.1.5" +langchain = "^0.3.7" +langchain-community = "^0.3.5" [tool.poetry.group.test] diff --git a/tests/test_concatenate_consecutive_roles.py b/tests/test_concatenate_consecutive_roles.py index fb35311..bfe6f8e 100644 --- a/tests/test_concatenate_consecutive_roles.py +++ b/tests/test_concatenate_consecutive_roles.py @@ -1,78 +1,69 @@ import pandas as pd import pytest -from dsp_interview_transcripts.pipeline.archive.process_data import concatenate_consecutive_roles +from dsp_interview_transcripts.pipeline.process_data import concatenate_consecutive_roles def test_concatenate_consecutive_roles(): # Sample made-up conversation data - df = pd.DataFrame({ - 'uuid': [ - 'uuid1', 'uuid2', 'uuid3', 'uuid4', - 'uuid5', 'uuid6', 'uuid7', 'uuid8', - 'uuid9', 'uuid10' - ], - 'timestamp': [ - '2024-05-01 10:00:00', '2024-05-01 10:01:00', '2024-05-01 10:02:00', - '2024-05-01 10:03:00', '2024-05-01 10:04:00', '2024-05-01 10:05:00', - '2024-05-01 10:06:00', '2024-05-01 10:07:00', '2024-05-01 10:08:00', - '2024-05-01 10:09:00' - ], - 'conversation': [ - 'conv1', 'conv1', 'conv1', 'conv1', - 'conv1', 'conv1', 'conv1', 'conv1', - 'conv1', 'conv1' - ], - 'role': [ - 'USER', 'USER', 'BOT', 'BOT', - 'USER', 'BOT', 'USER', 'USER', - 'BOT', 'USER' - ], - 'text_clean': [ - "Hi!", - "Can you help me?", - "Sure, how can I assist?", - "Do you need more details?", - "Yes, I need help with my account.", - "What exactly seems to be the issue?", - "I forgot my password.", - "Also, I can't access my email.", - "Let me help you with that.", "Thank you!" - ] - }) + df = pd.DataFrame( + { + "uuid": ["uuid1", "uuid2", "uuid3", "uuid4", "uuid5", "uuid6", "uuid7", "uuid8", "uuid9", "uuid10"], + "timestamp": [ + "2024-05-01 10:00:00", + "2024-05-01 10:01:00", + "2024-05-01 10:02:00", + "2024-05-01 10:03:00", + "2024-05-01 10:04:00", + "2024-05-01 10:05:00", + "2024-05-01 10:06:00", + "2024-05-01 10:07:00", + "2024-05-01 10:08:00", + "2024-05-01 10:09:00", + ], + "conversation": ["conv1", "conv1", "conv1", "conv1", "conv1", "conv1", "conv1", "conv1", "conv1", "conv1"], + "role": ["USER", "USER", "BOT", "BOT", "USER", "BOT", "USER", "USER", "BOT", "USER"], + "text_clean": [ + "Hi!", + "Can you help me?", + "Sure, how can I assist?", + "Do you need more details?", + "Yes, I need help with my account.", + "What exactly seems to be the issue?", + "I forgot my password.", + "Also, I can't access my email.", + "Let me help you with that.", + "Thank you!", + ], + } + ) # Expected result after concatenating consecutive roles - expected_df = pd.DataFrame({ - 'uuid': [ - 'uuid1', 'uuid3', 'uuid5', 'uuid6', - 'uuid7', 'uuid9', 'uuid10' - ], - 'timestamp': [ - '2024-05-01 10:00:00', '2024-05-01 10:02:00', - '2024-05-01 10:04:00', '2024-05-01 10:05:00', - '2024-05-01 10:06:00', '2024-05-01 10:08:00', - '2024-05-01 10:09:00' - ], - 'conversation': [ - 'conv1', 'conv1', 'conv1', 'conv1', - 'conv1', 'conv1', - 'conv1' - ], - 'role': [ - 'USER', 'BOT', 'USER', 'BOT', - 'USER', 'BOT', 'USER' - ], - 'text_clean': [ - "Hi! Can you help me?", - "Sure, how can I assist? Do you need more details?", - "Yes, I need help with my account.", - "What exactly seems to be the issue?", - "I forgot my password. Also, I can't access my email.", - "Let me help you with that.", - "Thank you!" - - ] - })[['conversation', 'timestamp', 'text_clean', 'role', 'uuid']] + expected_df = pd.DataFrame( + { + "uuid": ["uuid1", "uuid3", "uuid5", "uuid6", "uuid7", "uuid9", "uuid10"], + "timestamp": [ + "2024-05-01 10:00:00", + "2024-05-01 10:02:00", + "2024-05-01 10:04:00", + "2024-05-01 10:05:00", + "2024-05-01 10:06:00", + "2024-05-01 10:08:00", + "2024-05-01 10:09:00", + ], + "conversation": ["conv1", "conv1", "conv1", "conv1", "conv1", "conv1", "conv1"], + "role": ["USER", "BOT", "USER", "BOT", "USER", "BOT", "USER"], + "text_clean": [ + "Hi! Can you help me?", + "Sure, how can I assist? Do you need more details?", + "Yes, I need help with my account.", + "What exactly seems to be the issue?", + "I forgot my password. Also, I can't access my email.", + "Let me help you with that.", + "Thank you!", + ], + } + )[["conversation", "timestamp", "text_clean", "role", "uuid"]] # Call the function to test result_df = concatenate_consecutive_roles(df) diff --git a/tests/test_create_q_and_a_column.py b/tests/test_create_q_and_a_column.py deleted file mode 100644 index 825441b..0000000 --- a/tests/test_create_q_and_a_column.py +++ /dev/null @@ -1,79 +0,0 @@ -import pandas as pd -import pytest - -from dsp_interview_transcripts.pipeline.archive.process_data import create_q_and_a_column, concatenate_consecutive_roles - - -@pytest.fixture -def sample_df(): - return pd.DataFrame({ - 'conversation': [1, 1, 1, 2, 2, 2], - 'uuid': [1, 2, 3, 4, 5, 6], - 'timestamp': ['2024-05-01 10:00:00', - '2024-05-01 10:01:00', - '2024-05-01 10:02:00', - '2024-05-01 10:03:00', - '2024-05-01 10:04:00', - '2024-05-01 10:05:00'], - 'role': ['BOT', 'USER', 'USER', 'BOT', 'USER', 'USER'], - 'text_clean': [ - "Hello, I am a bot", - "Hello", - "Are you going to ask me questions?", - "Hello, I am still a bot", - "Hi, I am human", - "Do you have questions for me?" - ] - }) - -def test_incorrect_behavior_with_consecutive_roles(sample_df): - """Test that the function performs incorrectly with consecutive BOT or USER messages.""" - # Call the original function without preprocessing - result_df = create_q_and_a_column(sample_df, text_col='text_clean', conversation_col='conversation') - - print(result_df['q_and_a'].tolist()) - # Expected incorrect output: Bot message followed by incorrect user messages - incorrect_expected_q_and_a = [ - '', # Bot message has no Q&A - 'Hello, I am a bot\nHello', # First user response - 'Hello, I am a bot\nAre you going to ask me questions?', # Second user response - '', # New conversation, first bot message - "Hello, I am still a bot\nHi, I am human", # First user response in new conversation - 'Hello, I am still a bot\nDo you have questions for me?' # Second user response in new conversation - ] - - # The function should incorrectly process consecutive user messages without proper reset - assert result_df['q_and_a'].tolist() == incorrect_expected_q_and_a, "The function incorrectly processed consecutive USER messages." - -def test_correct_behavior_after_preprocessing(sample_df): - """Test that the function works correctly after ensuring alternating BOT and USER roles.""" - - preprocessed_df = concatenate_consecutive_roles(sample_df) - - # Call the original function on the preprocessed DataFrame - result_df = create_q_and_a_column(preprocessed_df, text_col='text_clean', conversation_col='conversation') - print(result_df['q_and_a'].tolist()) - - # Expected correct output after preprocessing - expected_q_and_a = [ - '', # Bot message has no Q&A - 'Hello, I am a bot\nHello Are you going to ask me questions?', - '', # New conversation, first bot message - "Hello, I am still a bot\nHi, I am human Do you have questions for me?", # First user response in new conversation - ] - - # Check that the 'q_and_a' column was created and matches expected output after preprocessing - assert 'q_and_a' in result_df.columns - assert result_df['q_and_a'].tolist() == expected_q_and_a, "The function processed the Q&A correctly after preprocessing." - -def test_empty_bot_text_reset_after_preprocessing(sample_df): - """Test that the bot text resets at the end of each conversation after preprocessing.""" - # Preprocess the DataFrame to ensure alternating BOT and USER roles - preprocessed_df = concatenate_consecutive_roles(sample_df) - - # Call the function after preprocessing - result_df = create_q_and_a_column(preprocessed_df, text_col='text_clean', conversation_col='conversation') - print(result_df['q_and_a'].tolist()) - - # Ensure bot message was reset after conversation 1 ends - assert result_df.loc[2, 'q_and_a'] == '', "The bot text was reset correctly after conversation 1." From af286981575876befd882039ac88dae3a984c449 Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Thu, 7 Nov 2024 15:08:33 +0000 Subject: [PATCH 18/25] Add readme and add small fixes --- dsp_interview_transcripts/pipeline/README.md | 7 +++++++ .../pipeline/prep_output_tables.py | 9 +++++++-- .../pipeline/top_down_analysis.py | 11 ++++++----- 3 files changed, 20 insertions(+), 7 deletions(-) diff --git a/dsp_interview_transcripts/pipeline/README.md b/dsp_interview_transcripts/pipeline/README.md index 223250d..2ba00ba 100644 --- a/dsp_interview_transcripts/pipeline/README.md +++ b/dsp_interview_transcripts/pipeline/README.md @@ -11,3 +11,10 @@ - `python dsp_interview_transcripts/pipeline/name_clusters.py`. This uses llama3.2 to generate a name and a description for each topic. - `python dsp_interview_transcripts/pipeline/prep_output_tables.py`. This formats the output data and also saves scatterplots locally. - `python dsp_interview_transcripts/pipeline/top_down_analysis.py`. This mimics the top-down/framework approach. It subsets the data according to which question is being discussed and then runs BERTopic on each subset. + +The outputs are: + +- `outputs/final/final_df.csv`: this is the main output file for the bottom-up approach, containing all user responses above a set length, some preceding context for each, the topic they're assigned to, the sentiment etc. +- `outputs/final/summary_info.csv`: this contains just the topic names and descriptions for the bottom-up approach, and some representative responses for each topic. +- `outputs/scatterplot... .html`: there are three scatterplots for the bottom-up approach: one coloured by topic, one coloured by question, and one coloured by sentiment. +- `outputs/by_question/`: this directory contains the output for the top-down approach: an info file on the topics generated for each question, and a scatterplot for each question. diff --git a/dsp_interview_transcripts/pipeline/prep_output_tables.py b/dsp_interview_transcripts/pipeline/prep_output_tables.py index 0fa6b33..232c9dd 100644 --- a/dsp_interview_transcripts/pipeline/prep_output_tables.py +++ b/dsp_interview_transcripts/pipeline/prep_output_tables.py @@ -1,3 +1,4 @@ +from pathlib import Path from typing import List from typing import Optional from typing import Union @@ -9,8 +10,12 @@ from dsp_interview_transcripts import logger -OUTPUT_PATH_FULL_DATA = PROJECT_DIR / "outputs/final/final_df.csv" -OUTPUT_PATH_SUMMARY = PROJECT_DIR / "outputs/final/summary_info.csv" +OUTPUT_DIR = PROJECT_DIR / "outputs/final" +OUTPUT_PATH_FULL_DATA = OUTPUT_DIR / "final_df.csv" +OUTPUT_PATH_SUMMARY = OUTPUT_DIR / "summary_info.csv" + +# Create the output directory if it doesn't exist +OUTPUT_DIR.mkdir(parents=True, exist_ok=True) opacity_condition = alt.condition(alt.datum.Name == "None", alt.value(0.1), alt.value(0.6)) diff --git a/dsp_interview_transcripts/pipeline/top_down_analysis.py b/dsp_interview_transcripts/pipeline/top_down_analysis.py index 252a3c4..dd7bdd5 100644 --- a/dsp_interview_transcripts/pipeline/top_down_analysis.py +++ b/dsp_interview_transcripts/pipeline/top_down_analysis.py @@ -1,6 +1,7 @@ """Create topic models within each question""" import random +from pathlib import Path from typing import Any from typing import Dict from typing import List @@ -27,14 +28,11 @@ from dsp_interview_transcripts import logger -pd.set_option("display.max_colwidth", 500) - # Set random seeds RANDOM_SEED = 42 np.random.seed(RANDOM_SEED) random.seed(RANDOM_SEED) # PyTorch seed (used by SentenceTransformer) - torch.manual_seed(RANDOM_SEED) SENTENCE_MODEL = SentenceTransformer("all-MiniLM-L6-v2") @@ -143,6 +141,9 @@ class NameDescription(BaseModel): llm_chain = final_prompt | ollama_model | parser +OUTPUT_DIR = PROJECT_DIR / "outputs/by_question" +OUTPUT_DIR.mkdir(parents=True, exist_ok=True) + def name_topics( topic_info: pd.DataFrame, @@ -309,9 +310,9 @@ def name_topics( # Combine the charts and add a title combined_chart = (name_count_chart | sentiment_proportion_chart).properties(title=f"{question}") - combined_chart.save(PROJECT_DIR / f"outputs/by_question/{question}_name_sentiment_chart.html") + combined_chart.save(OUTPUT_DIR / f"{question}_name_sentiment_chart.html") - excel_file = PROJECT_DIR / "outputs/by_question/question_topic_models.xlsx" + excel_file = OUTPUT_DIR / "question_topic_models.xlsx" with pd.ExcelWriter(excel_file, engine="openpyxl") as writer: # Iterate over unique values of 'question' From 55a5434a8c5a46af85a45d9444c5f6464b59c3d5 Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Mon, 11 Nov 2024 15:00:39 +0000 Subject: [PATCH 19/25] Remove error saving --- .../pipeline/name_clusters.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/dsp_interview_transcripts/pipeline/name_clusters.py b/dsp_interview_transcripts/pipeline/name_clusters.py index b42ffd2..f1f6175 100644 --- a/dsp_interview_transcripts/pipeline/name_clusters.py +++ b/dsp_interview_transcripts/pipeline/name_clusters.py @@ -63,20 +63,6 @@ class NameDescription(BaseModel): INPUT_PATH = PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics_representative_docs.csv" -def save_list_of_dicts_as_jsonl(data: List[dict], output_path: str) -> None: - """ - Save a list of dictionaries as a JSONL file. - - Args: - data (List[dict]): The list of dictionaries to save. - output_path (str): The path to the output JSONL file. - """ - with open(output_path, "w") as f: - for entry in data: - json_line = json.dumps(entry) - f.write(json_line + "\n") - - def name_topics( topic_info: pd.DataFrame, llm_chain, @@ -142,8 +128,6 @@ def name_topics( if __name__ == "__main__": - errors = [] - topic_info = pd.read_csv(INPUT_PATH) topic_info = topic_info.groupby(["Cluster", "Top Words"])["text_clean"].apply(list).reset_index() @@ -172,6 +156,3 @@ def name_topics( PROJECT_DIR / "outputs/user_messages_min_len_9_w_sentiment_topics_with_names_descriptions.csv", index=False ) logger.info("Done!") - - # Save errors so that they can be loaded in and manually reprocessed if necessary - save_list_of_dicts_as_jsonl(errors, PROJECT_DIR / "outputs/interim/errors.jsonl") From d5b6fd3e3c1765cb3f065dcd0ef9a15c2552f704 Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Mon, 11 Nov 2024 15:04:12 +0000 Subject: [PATCH 20/25] Add ollama instructions to readme --- dsp_interview_transcripts/pipeline/README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dsp_interview_transcripts/pipeline/README.md b/dsp_interview_transcripts/pipeline/README.md index 2ba00ba..9ef8ff0 100644 --- a/dsp_interview_transcripts/pipeline/README.md +++ b/dsp_interview_transcripts/pipeline/README.md @@ -5,7 +5,9 @@ 2. Create a directory `outputs/` in the root of the repository. -3. Run `python dsp_interview_transcripts/pipeline/run_pipeline.py`. This runs the following scripts: +3. Install [Ollama](https://ollama.com/) according to your operating system's instructions. Install [Llama3.2](https://ollama.com/library/llama3.2) (defaults to 3B) by running in your terminal `ollama pull llama3.2`. + +4. Run `python dsp_interview_transcripts/pipeline/run_pipeline.py`. This runs the following scripts: - `process_data.py`. This will do some cleaning of the data (text cleaning, making sure the conversations are in order, concatenating consecutive messages by the same person within a conversation). It also applies a sentiment analysis model and also estimates which question from the interview guide is being discussed at each point. - `python dsp_interview_transcripts/pipeline/topic_modelling.py`. This runs a BERTopic style model, but the actual steps (dimensionality reduction, HDBSCAN, tfidf representation) are run separatley because I wanted to normalise the embedding vectors before reducing the dimensionality. - `python dsp_interview_transcripts/pipeline/name_clusters.py`. This uses llama3.2 to generate a name and a description for each topic. From 512f2e8b63d7e70fb4db1abd085fc8d45b1ac5e5 Mon Sep 17 00:00:00 2001 From: beingkk Date: Tue, 12 Nov 2024 11:14:12 +0000 Subject: [PATCH 21/25] adding tests --- .../pipeline/process_data.py | 2 +- tests/test.py | 158 ++++++++++++++++++ 2 files changed, 159 insertions(+), 1 deletion(-) create mode 100644 tests/test.py diff --git a/dsp_interview_transcripts/pipeline/process_data.py b/dsp_interview_transcripts/pipeline/process_data.py index 7dbac77..9e98da4 100644 --- a/dsp_interview_transcripts/pipeline/process_data.py +++ b/dsp_interview_transcripts/pipeline/process_data.py @@ -100,7 +100,7 @@ def process_bot_qs(interviews_df: pd.DataFrame) -> Tuple[List[str], pd.DataFrame Returns: - Tuple[List[str], pd.DataFrame]: A list of unique sentences and DataFrame with exploded sentences. """ - bot_qs = interviews_df[interviews_df["role"] == "BOT"] + bot_qs = interviews_df[interviews_df["role"] == "BOT"].copy() # split into individual sentences so that if the original question is contained within the utterance, # we have a better chance of catching it bot_qs["sentences"] = bot_qs["text_clean"].apply(lambda x: sent_tokenize(x)) diff --git a/tests/test.py b/tests/test.py new file mode 100644 index 0000000..4bf2972 --- /dev/null +++ b/tests/test.py @@ -0,0 +1,158 @@ +from unittest.mock import patch + +import pandas as pd +import pytest + +from dsp_interview_transcripts.pipeline.process_data import concatenate_consecutive_roles +from dsp_interview_transcripts.pipeline.process_data import create_context +from dsp_interview_transcripts.pipeline.process_data import get_best_matches +from dsp_interview_transcripts.pipeline.process_data import get_sentiment +from dsp_interview_transcripts.pipeline.process_data import match_questions +from dsp_interview_transcripts.pipeline.process_data import process_bot_qs + + +@pytest.fixture +def mock_dataframe(): + """Fixture for a mock DataFrame simulating a conversation structure.""" + data = { + "conversation": [1, 1, 1, 2, 2], + "text_clean": ["Hi", "Hello", "How are you?", "Good morning", "Good night"], + "role": ["USER", "USER", "BOT", "USER", "BOT"], + "timestamp": [ + "2021-01-01 10:00:00", + "2021-01-01 10:01:00", + "2021-01-01 10:02:00", + "2021-01-02 10:00:00", + "2021-01-02 10:01:00", + ], + "uuid": [1, 2, 3, 4, 5], + } + return pd.DataFrame(data) + + +@pytest.fixture +def mock_questions(): + return ["How are you?", "Good morning?", "Good night?"] + + +def test_concatenate_consecutive_roles(): + # Sample made-up conversation data + df = pd.DataFrame( + { + "uuid": ["uuid1", "uuid2", "uuid3", "uuid4", "uuid5", "uuid6", "uuid7", "uuid8", "uuid9", "uuid10"], + "timestamp": [ + "2024-05-01 10:00:00", + "2024-05-01 10:01:00", + "2024-05-01 10:02:00", + "2024-05-01 10:03:00", + "2024-05-01 10:04:00", + "2024-05-01 10:05:00", + "2024-05-01 10:06:00", + "2024-05-01 10:07:00", + "2024-05-01 10:08:00", + "2024-05-01 10:09:00", + ], + "conversation": ["conv1", "conv1", "conv1", "conv1", "conv1", "conv1", "conv1", "conv1", "conv1", "conv1"], + "role": ["USER", "USER", "BOT", "BOT", "USER", "BOT", "USER", "USER", "BOT", "USER"], + "text_clean": [ + "Hi!", + "Can you help me?", + "Sure, how can I assist?", + "Do you need more details?", + "Yes, I need help with my account.", + "What exactly seems to be the issue?", + "I forgot my password.", + "Also, I can't access my email.", + "Let me help you with that.", + "Thank you!", + ], + } + ) + + # Expected result after concatenating consecutive roles + expected_df = pd.DataFrame( + { + "uuid": ["uuid1", "uuid3", "uuid5", "uuid6", "uuid7", "uuid9", "uuid10"], + "timestamp": [ + "2024-05-01 10:00:00", + "2024-05-01 10:02:00", + "2024-05-01 10:04:00", + "2024-05-01 10:05:00", + "2024-05-01 10:06:00", + "2024-05-01 10:08:00", + "2024-05-01 10:09:00", + ], + "conversation": ["conv1", "conv1", "conv1", "conv1", "conv1", "conv1", "conv1"], + "role": ["USER", "BOT", "USER", "BOT", "USER", "BOT", "USER"], + "text_clean": [ + "Hi! Can you help me?", + "Sure, how can I assist? Do you need more details?", + "Yes, I need help with my account.", + "What exactly seems to be the issue?", + "I forgot my password. Also, I can't access my email.", + "Let me help you with that.", + "Thank you!", + ], + } + )[["conversation", "timestamp", "text_clean", "role", "uuid"]] + + # Call the function to test + result_df = concatenate_consecutive_roles(df) + print(result_df) + + # Assert that the result matches the expected dataframe + pd.testing.assert_frame_equal(result_df, expected_df) + + +def test_process_bot_qs(mock_dataframe): + expected_bot_qs_list = ["How are you?", "Good night"] + bot_qs_list, bot_qs_df = process_bot_qs(mock_dataframe) + assert bot_qs_list == expected_bot_qs_list, "Bot question list should match expected sentences" + # Check that the sentences are exploded correctly + assert bot_qs_df["sentences"].tolist() == expected_bot_qs_list, "Exploded sentences should match expected values" + + +@patch("dsp_interview_transcripts.pipeline.process_data.SENTENCE_MODEL.encode") +def test_match_questions(mock_encode, mock_questions): + # Simulate encoding outputs + mock_encode.side_effect = lambda x: [[1, 0], [0, 1], [0.5, 0.5]] if x == mock_questions else [[1, 0], [0.5, 0.5]] + bot_qs_list = ["How are you?", "Good night"] + + expected_matches = pd.DataFrame( + { + "bot_q": ["How are you?", "Good night"], + "question": ["How are you?", "Good night?"], + "cosine_similarity": [1.0, 1.0], + } + ) + + final_matches = match_questions(bot_qs_list, mock_questions, threshold=0.8) + print(final_matches) + pd.testing.assert_frame_equal(final_matches.reset_index(drop=True), expected_matches) + + +def test_get_best_matches(mock_dataframe, mock_questions): + # Mock the results of previous steps + bot_qs_list, bot_qs_df = process_bot_qs(mock_dataframe) + final_matches = pd.DataFrame( + { + "bot_q": ["How are you?", "Good night"], + "question": ["How are you?", "Good night?"], + "cosine_similarity": [1.0, 1.0], + } + ) + questions_df = pd.DataFrame(enumerate(mock_questions), columns=["q_number", "question"]) + + # Expected output DataFrame + expected_data = { + "conversation": [1, 2], + "text_clean": ["How are you?", "Good night"], + "role": ["BOT", "BOT"], + "sentences": ["How are you?", "Good night"], + "question": ["How are you?", "Good night?"], + "cosine_similarity": [1.0, 1.0], + } + expected_df = pd.DataFrame(expected_data) + + best_matches = get_best_matches(bot_qs_df, final_matches, questions_df) + pd.testing.assert_frame_equal(best_matches[expected_df.columns], expected_df) From 5230b9d5c91841055e63c9c7540c9139fc7198f3 Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Tue, 12 Nov 2024 13:29:35 +0000 Subject: [PATCH 22/25] Remove deprecated script --- tests/test_concatenate_consecutive_roles.py | 73 --------------------- 1 file changed, 73 deletions(-) delete mode 100644 tests/test_concatenate_consecutive_roles.py diff --git a/tests/test_concatenate_consecutive_roles.py b/tests/test_concatenate_consecutive_roles.py deleted file mode 100644 index bfe6f8e..0000000 --- a/tests/test_concatenate_consecutive_roles.py +++ /dev/null @@ -1,73 +0,0 @@ -import pandas as pd -import pytest - -from dsp_interview_transcripts.pipeline.process_data import concatenate_consecutive_roles - - -def test_concatenate_consecutive_roles(): - # Sample made-up conversation data - df = pd.DataFrame( - { - "uuid": ["uuid1", "uuid2", "uuid3", "uuid4", "uuid5", "uuid6", "uuid7", "uuid8", "uuid9", "uuid10"], - "timestamp": [ - "2024-05-01 10:00:00", - "2024-05-01 10:01:00", - "2024-05-01 10:02:00", - "2024-05-01 10:03:00", - "2024-05-01 10:04:00", - "2024-05-01 10:05:00", - "2024-05-01 10:06:00", - "2024-05-01 10:07:00", - "2024-05-01 10:08:00", - "2024-05-01 10:09:00", - ], - "conversation": ["conv1", "conv1", "conv1", "conv1", "conv1", "conv1", "conv1", "conv1", "conv1", "conv1"], - "role": ["USER", "USER", "BOT", "BOT", "USER", "BOT", "USER", "USER", "BOT", "USER"], - "text_clean": [ - "Hi!", - "Can you help me?", - "Sure, how can I assist?", - "Do you need more details?", - "Yes, I need help with my account.", - "What exactly seems to be the issue?", - "I forgot my password.", - "Also, I can't access my email.", - "Let me help you with that.", - "Thank you!", - ], - } - ) - - # Expected result after concatenating consecutive roles - expected_df = pd.DataFrame( - { - "uuid": ["uuid1", "uuid3", "uuid5", "uuid6", "uuid7", "uuid9", "uuid10"], - "timestamp": [ - "2024-05-01 10:00:00", - "2024-05-01 10:02:00", - "2024-05-01 10:04:00", - "2024-05-01 10:05:00", - "2024-05-01 10:06:00", - "2024-05-01 10:08:00", - "2024-05-01 10:09:00", - ], - "conversation": ["conv1", "conv1", "conv1", "conv1", "conv1", "conv1", "conv1"], - "role": ["USER", "BOT", "USER", "BOT", "USER", "BOT", "USER"], - "text_clean": [ - "Hi! Can you help me?", - "Sure, how can I assist? Do you need more details?", - "Yes, I need help with my account.", - "What exactly seems to be the issue?", - "I forgot my password. Also, I can't access my email.", - "Let me help you with that.", - "Thank you!", - ], - } - )[["conversation", "timestamp", "text_clean", "role", "uuid"]] - - # Call the function to test - result_df = concatenate_consecutive_roles(df) - print(result_df) - - # Assert that the result matches the expected dataframe - pd.testing.assert_frame_equal(result_df, expected_df) From 4e5cbf3a4c68a809983d695c8ca166182fbb678f Mon Sep 17 00:00:00 2001 From: RFOxbury Date: Tue, 12 Nov 2024 13:53:23 +0000 Subject: [PATCH 23/25] Add chaining for readability --- .../pipeline/name_clusters.py | 6 +-- .../pipeline/prep_output_tables.py | 23 +++++----- .../pipeline/process_data.py | 42 ++++++++++--------- 3 files changed, 36 insertions(+), 35 deletions(-) diff --git a/dsp_interview_transcripts/pipeline/name_clusters.py b/dsp_interview_transcripts/pipeline/name_clusters.py index f1f6175..79fd212 100644 --- a/dsp_interview_transcripts/pipeline/name_clusters.py +++ b/dsp_interview_transcripts/pipeline/name_clusters.py @@ -66,7 +66,6 @@ class NameDescription(BaseModel): def name_topics( topic_info: pd.DataFrame, llm_chain, - topics: List[str], text_col: str = "text_clean", top_words_col: str = "Top Words", topic_label_col: str = "Cluster", @@ -103,6 +102,7 @@ def name_topics( "Topic 2": {"name": "Product Quality", "description": "Documents focusing on product durability and performance."} } """ + topics = topic_info["Cluster"].unique().tolist() results = {} @@ -133,10 +133,8 @@ def name_topics( topic_info = topic_info.groupby(["Cluster", "Top Words"])["text_clean"].apply(list).reset_index() topic_info["Cluster"] = topic_info["Cluster"].astype(str) - topics = topic_info["Cluster"].unique().tolist() - results = name_topics( - topic_info, llm_chain, topics, text_col="text_clean", top_words_col="Top Words", topic_label_col="Cluster" + topic_info, llm_chain, text_col="text_clean", top_words_col="Top Words", topic_label_col="Cluster" ) # Some complicated conditionals to check that what's in `results` can be parsed diff --git a/dsp_interview_transcripts/pipeline/prep_output_tables.py b/dsp_interview_transcripts/pipeline/prep_output_tables.py index 232c9dd..b6bcc80 100644 --- a/dsp_interview_transcripts/pipeline/prep_output_tables.py +++ b/dsp_interview_transcripts/pipeline/prep_output_tables.py @@ -94,26 +94,27 @@ def create_scatterplot( rep_docs, data_w_names[["Cluster", "Name", "Description", "N responses in topic"]], on="Cluster", how="left" ) - data_viz = pd.merge(data, data_w_names[["Cluster", "Name", "Description"]], on="Cluster", how="left") - - data_viz["Name"] = data_viz["Name"].fillna("None") - data_viz["Description"] = data_viz["Description"].fillna("None") + data_viz = ( + data.merge(data_w_names[["Cluster", "Name", "Description"]], on="Cluster", how="left") + .assign(Name=lambda df: df["Name"].fillna("None")) + .assign(Description=lambda df: df["Description"].fillna("None")) + ) # Create binary column to indicate whether the user response is representative of the topic - merged_df = data_viz.merge( - rep_docs[["conversation", "uuid", "Name"]], on=["conversation", "uuid", "Name"], how="left", indicator=True + merged_df = ( + data_viz.merge( + rep_docs[["conversation", "uuid", "Name"]], on=["conversation", "uuid", "Name"], how="left", indicator=True + ) + .assign(Representative_of_topic=lambda df: (df["_merge"] == "both").astype(int)) + .drop(columns=["_merge"]) ) - merged_df["Representative of topic"] = (merged_df["_merge"] == "both").astype(int) - - merged_df = merged_df.drop(columns=["_merge"]) - final_df = merged_df[ [ "Name", "Description", "Top Words", - "Representative of topic", + "Representative_of_topic", "question", "context", "text_clean", diff --git a/dsp_interview_transcripts/pipeline/process_data.py b/dsp_interview_transcripts/pipeline/process_data.py index 9e98da4..8fee943 100644 --- a/dsp_interview_transcripts/pipeline/process_data.py +++ b/dsp_interview_transcripts/pipeline/process_data.py @@ -244,42 +244,44 @@ def create_context(row: pd.Series, df: pd.DataFrame) -> str: logger.info(f"Number of interviews: {len(interviews_df['conversation'].unique())}") - # Make sure the conversations are sorted by time, so that the replies go in the right order - interviews_df["timestamp_clean"] = interviews_df["timestamp"].apply(convert_timestamp) - interviews_df = interviews_df.groupby("conversation", group_keys=False).apply( - lambda x: x.sort_values("timestamp_clean") + interviews_cleaned_df = ( + interviews_df + # Make sure the conversations are sorted by time, so that the replies go in the right order + .assign(timestamp_clean=lambda df: df["timestamp"].apply(convert_timestamp)) + .groupby("conversation", group_keys=False) + .apply(lambda x: x.sort_values("timestamp_clean")) + # Remove everything up to when bot asks if the instructions are clear - everything before is just noise + .pipe(lambda df: df.groupby("conversation").apply(remove_preamble).reset_index(drop=True)) + # Group together consecutive responses by the same role + .pipe(concatenate_consecutive_roles) ) - # Remove everything up to when bot asks if the instructions are clear - everything before is just noise - interviews_cleaned_df = interviews_df.groupby("conversation").apply(remove_preamble).reset_index(drop=True) - - # Group together consecutive responses by the same role - interviews_df = concatenate_consecutive_roles(interviews_df) - questions_df = pd.DataFrame(enumerate(QUESTIONS), columns=["q_number", "question"]) - bot_qs_list, bot_qs = process_bot_qs(interviews_df) + bot_qs_list, bot_qs = process_bot_qs(interviews_cleaned_df) final_matches = match_questions(bot_qs_list, QUESTIONS) questions_highest_similarity = get_best_matches(bot_qs, final_matches, questions_df) # Merge back into the original df - interviews_df = pd.merge( - interviews_df, + interviews_cleaned_df = pd.merge( + interviews_cleaned_df, questions_highest_similarity[["uuid", "question", "q_number", "cosine_similarity"]], on="uuid", how="left", ) # Forward fill the matched questions and their question numbers - interviews_q_filled = interviews_df.copy() - interviews_q_filled["question"] = interviews_q_filled.groupby("conversation")["question"].ffill() - interviews_q_filled["q_number"] = interviews_q_filled.groupby("conversation")["q_number"].ffill() - - interviews_q_filled = add_text_length(interviews_q_filled) - - interviews_q_filled["context"] = interviews_q_filled.apply(create_context, df=interviews_q_filled, axis=1) + interviews_q_filled = ( + interviews_cleaned_df.copy() + .assign( + question=lambda df: df.groupby("conversation")["question"].ffill(), + q_number=lambda df: df.groupby("conversation")["q_number"].ffill(), + ) + .pipe(add_text_length) + .assign(context=lambda df: df.apply(create_context, df=df, axis=1)) + ) user_messages = interviews_q_filled[ (interviews_q_filled["role"] == "USER") & (interviews_q_filled["text_length"] > MIN_LEN) From 39c1eb97dcd8fedf9a8db2a271c1b671f5395e29 Mon Sep 17 00:00:00 2001 From: Rosie Oxbury <49554920+RFOxbury@users.noreply.github.com> Date: Tue, 12 Nov 2024 13:53:50 +0000 Subject: [PATCH 24/25] Update dsp_interview_transcripts/pipeline/name_clusters.py Co-authored-by: Karlis Kanders --- dsp_interview_transcripts/pipeline/name_clusters.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dsp_interview_transcripts/pipeline/name_clusters.py b/dsp_interview_transcripts/pipeline/name_clusters.py index 79fd212..955b2c4 100644 --- a/dsp_interview_transcripts/pipeline/name_clusters.py +++ b/dsp_interview_transcripts/pipeline/name_clusters.py @@ -42,7 +42,7 @@ class NameDescription(BaseModel): Example: {{ "name": "Energy Efficiency", - "description": "This cluster contains users responses about energy efficiency when choosing home heating options. The users have varying degrees of knowledge about the efficiency of different systems. Some reasons for wanting to improve efficiency include environmental concerns and cost concerns." + "description": "This cluster contains user responses about energy efficiency when choosing home heating options. The users have varying degrees of knowledge about the efficiency of different systems. Some reasons for wanting to improve efficiency include environmental concerns and cost concerns." }} """ From 61e975146073863b8d70091ff814bfe478c3a797 Mon Sep 17 00:00:00 2001 From: Rosie Oxbury <49554920+RFOxbury@users.noreply.github.com> Date: Tue, 12 Nov 2024 13:53:56 +0000 Subject: [PATCH 25/25] Update dsp_interview_transcripts/pipeline/README.md Co-authored-by: Karlis Kanders --- dsp_interview_transcripts/pipeline/README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dsp_interview_transcripts/pipeline/README.md b/dsp_interview_transcripts/pipeline/README.md index 9ef8ff0..4636719 100644 --- a/dsp_interview_transcripts/pipeline/README.md +++ b/dsp_interview_transcripts/pipeline/README.md @@ -9,7 +9,8 @@ 4. Run `python dsp_interview_transcripts/pipeline/run_pipeline.py`. This runs the following scripts: - `process_data.py`. This will do some cleaning of the data (text cleaning, making sure the conversations are in order, concatenating consecutive messages by the same person within a conversation). It also applies a sentiment analysis model and also estimates which question from the interview guide is being discussed at each point. - - `python dsp_interview_transcripts/pipeline/topic_modelling.py`. This runs a BERTopic style model, but the actual steps (dimensionality reduction, HDBSCAN, tfidf representation) are run separatley because I wanted to normalise the embedding vectors before reducing the dimensionality. + - `python dsp_interview_transcripts/pipeline/topic_modelling.py`. This runs a BERTopic style model, but the actual steps (dimensionality reduction, HDBSCAN, tfidf representation) are run separately + because I wanted to normalise the embedding vectors before reducing the dimensionality. - `python dsp_interview_transcripts/pipeline/name_clusters.py`. This uses llama3.2 to generate a name and a description for each topic. - `python dsp_interview_transcripts/pipeline/prep_output_tables.py`. This formats the output data and also saves scatterplots locally. - `python dsp_interview_transcripts/pipeline/top_down_analysis.py`. This mimics the top-down/framework approach. It subsets the data according to which question is being discussed and then runs BERTopic on each subset.