diff --git a/demos/LIT_Integration_Demo.ipynb b/demos/LIT_Integration_Demo.ipynb new file mode 100644 index 000000000..8defa89af --- /dev/null +++ b/demos/LIT_Integration_Demo.ipynb @@ -0,0 +1,725 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0c9bd826", + "metadata": {}, + "source": [ + "\n", + " \"Open\n", + "" + ] + }, + { + "cell_type": "markdown", + "id": "28bd5641", + "metadata": {}, + "source": [ + "# TransformerLens + LIT Integration Demo\n", + "\n", + "This notebook demonstrates how to use Google's **Learning Interpretability Tool (LIT)** with **TransformerLens** for interactive model analysis and visualization.\n", + "\n", + "**What you'll learn:**\n", + "1. Setting up the LIT integration\n", + "2. Visualizing attention patterns\n", + "3. Exploring token predictions\n", + "4. Using embedding projector\n", + "5. Token salience/gradient analysis\n", + "\n", + "**References:**\n", + "- [TransformerLens](https://github.com/TransformerLensOrg/TransformerLens)\n", + "- [LIT Documentation](https://pair-code.github.io/lit/)\n", + "- [LIT Paper](https://arxiv.org/abs/2008.05122)" + ] + }, + { + "cell_type": "markdown", + "id": "9dfe9cf6", + "metadata": {}, + "source": [ + "# Setup\n", + "(No need to read - just run these cells)" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "fcb1d8d4", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Running locally (VSCode/Jupyter)\n" + ] + } + ], + "source": [ + "# Detect environment\n", + "import os\n", + "\n", + "DEVELOPMENT_MODE = False\n", + "\n", + "# Detect if we're running in Google Colab\n", + "try:\n", + " import google.colab\n", + " IN_COLAB = True\n", + " print(\"Running in Google Colab\")\n", + "except ImportError:\n", + " IN_COLAB = False\n", + " print(\"Running locally (VSCode/Jupyter)\")\n", + "\n", + "# Install packages if in Colab\n", + "if IN_COLAB:\n", + " %pip install -q transformer_lens lit-nlp\n", + "\n", + "# Hot reload in development mode\n", + "if not IN_COLAB and DEVELOPMENT_MODE:\n", + " from IPython import get_ipython\n", + " ip = get_ipython()\n", + " if ip is not None and not ip.extension_manager.loaded:\n", + " ip.extension_manager.load('autoreload')\n", + " %autoreload 2" + ] + }, + { + "cell_type": "markdown", + "id": "a5bcc3dc", + "metadata": {}, + "source": [ + "# Imports" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "36404560", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/Hetansh/Github/open-source-contribution/venv/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "LIT is installed and available\n", + "Using device: cpu\n" + ] + } + ], + "source": [ + "# Core imports\n", + "import torch\n", + "import numpy as np\n", + "from transformer_lens import HookedTransformer\n", + "\n", + "# LIT integration imports\n", + "from transformer_lens.lit import (\n", + " HookedTransformerLIT,\n", + " HookedTransformerLITConfig,\n", + " SimpleTextDataset,\n", + " PromptCompletionDataset,\n", + " IOIDataset,\n", + " InductionDataset,\n", + " serve,\n", + " check_lit_installed,\n", + ")\n", + "\n", + "# Verify LIT is available\n", + "if check_lit_installed():\n", + " print(\"LIT is installed and available\")\n", + "else:\n", + " print(\"WARNING: LIT (lit-nlp) is not installed. Please run: pip install lit-nlp\")\n", + "\n", + "# Set device\n", + "device = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n", + "print(f\"Using device: {device}\")" + ] + }, + { + "cell_type": "markdown", + "id": "654cbfeb", + "metadata": {}, + "source": [ + "# Load a Model\n", + "\n", + "Let's load GPT-2 Small as our example model. TransformerLens supports 50+ models out of the box." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "18cbabbd", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loading gpt2-small...\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "`torch_dtype` is deprecated! Use `dtype` instead!\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loaded pretrained model gpt2-small into HookedTransformer\n", + "Loaded model: gpt2\n", + " Layers: 12\n", + " Heads: 12\n", + " d_model: 768\n" + ] + } + ], + "source": [ + "# Load GPT-2 Small (124M parameters)\n", + "# Other options: \"gpt2-medium\", \"gpt2-large\", \"gpt2-xl\", \"pythia-70m\", etc.\n", + "model_name = \"gpt2-small\"\n", + "\n", + "print(f\"Loading {model_name}...\")\n", + "model = HookedTransformer.from_pretrained(\n", + " model_name,\n", + " device=device,\n", + ")\n", + "print(f\"Loaded model: {model.cfg.model_name}\")\n", + "print(f\" Layers: {model.cfg.n_layers}\")\n", + "print(f\" Heads: {model.cfg.n_heads}\")\n", + "print(f\" d_model: {model.cfg.d_model}\")" + ] + }, + { + "cell_type": "markdown", + "id": "c78a7a86", + "metadata": {}, + "source": [ + "# Create LIT Wrapper\n", + "\n", + "Now we wrap the HookedTransformer with our LIT wrapper to enable all the visualization features." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "e6967650", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Created LIT wrapper: TransformerLens: gpt2 (12L, 12H, d=768)\n", + "\n", + "Input spec keys: ['text', 'tokens', 'token_embeddings', 'target_mask']\n", + "Output spec keys: ['tokens', 'top_k_tokens', 'input_embeddings', 'cls_embedding', 'mean_embedding', 'layer_0/embeddings', 'layer_6/embeddings', 'layer_11/embeddings', 'layer_0/attention', 'layer_1/attention', 'layer_2/attention', 'layer_3/attention', 'layer_4/attention', 'layer_5/attention', 'layer_6/attention', 'layer_7/attention', 'layer_8/attention', 'layer_9/attention', 'layer_10/attention', 'layer_11/attention', 'grad_l2', 'grad_dot_input']\n" + ] + } + ], + "source": [ + "# Configure the wrapper\n", + "config = HookedTransformerLITConfig(\n", + " max_seq_length=256, # Maximum input length\n", + " batch_size=4, # Batch size for inference\n", + " top_k=10, # Number of top predictions to show\n", + " compute_gradients=True, # Enable gradient-based salience\n", + " output_attention=True, # Output attention patterns\n", + " output_embeddings=True, # Output embeddings for projector\n", + " output_all_layers=False, # Just key layers (faster)\n", + " prepend_bos=True, # Prepend BOS token\n", + ")\n", + "\n", + "# Create the wrapper\n", + "lit_model = HookedTransformerLIT(model, config=config)\n", + "\n", + "print(f\"Created LIT wrapper: {lit_model.description()}\")\n", + "print(f\"\\nInput spec keys: {list(lit_model.input_spec().keys())}\")\n", + "print(f\"Output spec keys: {list(lit_model.output_spec().keys())}\")" + ] + }, + { + "cell_type": "markdown", + "id": "ed3f9755", + "metadata": {}, + "source": [ + "# Create Datasets\n", + "\n", + "LIT needs datasets to visualize. We provide several dataset types for different analysis scenarios." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "8c90484a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Created: General Examples: 7 examples\n" + ] + } + ], + "source": [ + "# Simple text dataset for general exploration\n", + "simple_examples = [\n", + " \"The capital of France is Paris.\",\n", + " \"Machine learning is a subset of artificial intelligence.\",\n", + " \"The quick brown fox jumps over the lazy dog.\",\n", + " \"To be or not to be, that is the question.\",\n", + " \"The meaning of life is\",\n", + " \"In the beginning, there was\",\n", + " \"The best programming language is\",\n", + "]\n", + "\n", + "simple_dataset = SimpleTextDataset.from_strings(\n", + " simple_examples,\n", + " name=\"General Examples\"\n", + ")\n", + "print(f\"Created: {simple_dataset.description()}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "9ee619a4", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Created: Factual Completions: 5 prompt-completion pairs\n" + ] + } + ], + "source": [ + "# Prompt-completion dataset for generation analysis\n", + "prompt_completion_pairs = [\n", + " (\"The capital of France is\", \" Paris\"),\n", + " (\"The capital of Germany is\", \" Berlin\"),\n", + " (\"2 + 2 =\", \" 4\"),\n", + " (\"Water freezes at\", \" 0 degrees\"),\n", + " (\"The sun rises in the\", \" east\"),\n", + "]\n", + "\n", + "prompt_dataset = PromptCompletionDataset.from_pairs(\n", + " prompt_completion_pairs,\n", + " name=\"Factual Completions\"\n", + ")\n", + "print(f\"Created: {prompt_dataset.description()}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "d13362bb", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Created: IOI Task: 50 IOI examples\n", + "\n", + "Example: When Ivy and John went to the store, Ivy gave a phone to\n", + "Expected answer: John\n" + ] + } + ], + "source": [ + "# IOI (Indirect Object Identification) dataset\n", + "# This is commonly used in mechanistic interpretability research\n", + "ioi_dataset = IOIDataset.generate(\n", + " n_examples=50,\n", + " seed=42,\n", + " name=\"IOI Task\"\n", + ")\n", + "print(f\"Created: {ioi_dataset.description()}\")\n", + "\n", + "# Show an example\n", + "print(f\"\\nExample: {ioi_dataset.examples[0]['text']}\")\n", + "print(f\"Expected answer: {ioi_dataset.examples[0]['answer']}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "1954d6dc", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Created: Induction Patterns: 30 induction examples\n", + "\n", + "Example: The cat sat on the mat Then, later, The\n", + "Pattern: The cat sat on the mat\n" + ] + } + ], + "source": [ + "# Induction dataset for analyzing induction heads\n", + "induction_dataset = InductionDataset.generate_simple(\n", + " n_examples=30,\n", + " seed=42,\n", + " name=\"Induction Patterns\"\n", + ")\n", + "print(f\"Created: {induction_dataset.description()}\")\n", + "\n", + "# Show an example\n", + "print(f\"\\nExample: {induction_dataset.examples[0]['text']}\")\n", + "print(f\"Pattern: {induction_dataset.examples[0]['pattern']}\")" + ] + }, + { + "cell_type": "markdown", + "id": "b234837f", + "metadata": {}, + "source": [ + "# Test the Wrapper\n", + "\n", + "Before launching LIT, let's verify the wrapper works correctly." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "3a86ea43", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Running prediction...\n", + "\n", + "Input: The capital of France is\n", + "\n", + "Tokens: ['<|endoftext|>', 'The', '▁capital', '▁of', '▁France', '▁is']\n", + "\n", + "Top predictions for last position:\n", + " ' now' 0.0475\n", + " ' the' 0.0374\n", + " ' a' 0.0355\n", + " ' home' 0.0309\n", + " ' in' 0.0270\n", + "\n", + "Available output fields: ['tokens', 'top_k_tokens', 'input_embeddings', 'cls_embedding', 'mean_embedding', 'layer_0/embeddings', 'layer_6/embeddings', 'layer_11/embeddings', 'layer_0/attention', 'layer_1/attention', 'layer_2/attention', 'layer_3/attention', 'layer_4/attention', 'layer_5/attention', 'layer_6/attention', 'layer_7/attention', 'layer_8/attention', 'layer_9/attention', 'layer_10/attention', 'layer_11/attention', 'grad_l2', 'grad_dot_input']\n" + ] + } + ], + "source": [ + "# Test a single prediction\n", + "test_input = {\"text\": \"The capital of France is\"}\n", + "\n", + "print(\"Running prediction...\")\n", + "outputs = list(lit_model.predict([test_input]))\n", + "output = outputs[0]\n", + "\n", + "print(f\"\\nInput: {test_input['text']}\")\n", + "print(f\"\\nTokens: {output['tokens']}\")\n", + "print(f\"\\nTop predictions for last position:\")\n", + "for token, prob in output['top_k_tokens'][-1][:5]:\n", + " print(f\" {repr(token):15s} {prob:.4f}\")\n", + "\n", + "# Check available outputs\n", + "print(f\"\\nAvailable output fields: {list(output.keys())}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "0894241c", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CLS embedding shape: (768,)\n", + "Mean embedding shape: (768,)\n", + "Input embeddings shape: (6, 768)\n" + ] + } + ], + "source": [ + "# Check embedding shapes\n", + "if 'cls_embedding' in output:\n", + " print(f\"CLS embedding shape: {output['cls_embedding'].shape}\")\n", + "if 'mean_embedding' in output:\n", + " print(f\"Mean embedding shape: {output['mean_embedding'].shape}\")\n", + "if 'input_embeddings' in output:\n", + " print(f\"Input embeddings shape: {output['input_embeddings'].shape}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "0ebc66a3", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Attention layers: 12\n", + "Attention shape per layer: (12, 6, 6)\n", + " (heads, query_pos, key_pos)\n" + ] + } + ], + "source": [ + "# Check attention pattern shapes\n", + "attention_keys = [k for k in output.keys() if 'attention' in k]\n", + "print(f\"Attention layers: {len(attention_keys)}\")\n", + "\n", + "if attention_keys:\n", + " first_attn = output[attention_keys[0]]\n", + " print(f\"Attention shape per layer: {first_attn.shape}\")\n", + " print(f\" (heads, query_pos, key_pos)\")" + ] + }, + { + "cell_type": "markdown", + "id": "d56b728a", + "metadata": {}, + "source": [ + "# Launch LIT Server\n", + "\n", + "Start the LIT server to visualize and analyze the model. This opens an interactive UI in your browser at http://localhost:5433.\n", + "\n", + "**Note:** The server blocks the notebook. Restart the kernel to stop it." + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "765aa71b", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:werkzeug:\u001b[31m\u001b[1mWARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead.\u001b[0m\n", + " * Running on http://localhost:5433\n", + "INFO:werkzeug:\u001b[33mPress CTRL+C to quit\u001b[0m\n", + "INFO:werkzeug:127.0.0.1 - - [26/Jan/2026 15:39:51] \"POST /get_info HTTP/1.1\" 200 -\n", + "INFO:werkzeug:127.0.0.1 - - [26/Jan/2026 15:39:51] \"POST /get_dataset?dataset_name=general HTTP/1.1\" 200 -\n", + "INFO:werkzeug:127.0.0.1 - - [26/Jan/2026 15:39:52] \"POST /get_interpretations?model=gpt2-small&dataset_name=general&interpreter=pca&do_predict=1 HTTP/1.1\" 200 -\n", + "INFO:werkzeug:127.0.0.1 - - [26/Jan/2026 15:39:52] \"POST /get_preds?model=gpt2-small&dataset_name=general&requested_types=MulticlassPreds,RegressionScore&requested_fields= HTTP/1.1\" 200 -\n", + "INFO:werkzeug:127.0.0.1 - - [26/Jan/2026 15:39:52] \"POST /get_interpretations?model=gpt2-small&dataset_name=general&interpreter=Grad%20L2%20Norm&do_predict=1 HTTP/1.1\" 200 -\n", + "INFO:werkzeug:127.0.0.1 - - [26/Jan/2026 15:39:52] \"POST /get_interpretations?model=gpt2-small&dataset_name=general&interpreter=Grad%20⋅%20Input&do_predict=1 HTTP/1.1\" 200 -\n" + ] + } + ], + "source": [ + "# Suppress LIT's internal warnings about NoneDataset\n", + "import logging\n", + "logging.getLogger('absl').setLevel(logging.ERROR)\n", + "\n", + "# Launch LIT server - open http://localhost:5433 in your browser\n", + "serve(\n", + " models={\"gpt2-small\": lit_model},\n", + " datasets={\n", + " \"general\": simple_dataset,\n", + " \"factual\": prompt_dataset,\n", + " \"ioi\": ioi_dataset,\n", + " \"induction\": induction_dataset,\n", + " },\n", + " port=5433,\n", + " page_title=\"TransformerLens + LIT Demo\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "a2879811", + "metadata": {}, + "source": [ + "# Manual Analysis (Without LIT UI)\n", + "\n", + "You can also use the wrapper directly for programmatic analysis." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "8897a15e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Tokens: ['<|endoftext|>', 'When', '▁Mary', '▁and', '▁John', '▁went', '▁to', '▁the', '▁store', ',', '▁Mary', '▁gave', '▁a', '▁book', '▁to']\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA3wAAAMWCAYAAACqe4QeAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjgsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvwVt1zgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAoaFJREFUeJzs3Qd4FNX3//Gz9A7Se5OidBREwYKKoiiKvSBNxIooWBAbICoWUEBUFAWsX7BhRSwoFkBBBBuCiiAoIiodpCXzfz7X3+x/E5KYQLKzs3m/fEays5udm9nd2Tlz7j034nmeZwAAAACApFMg6AYAAAAAAPIGAR8AAAAAJCkCPgAAAABIUgR8AAAAAJCkCPgAAAAAIEkR8AEAAABAkiLgAwAAAIAkRcAHAAAAAEmKgA8AAAAAkhQBHwCETCQSsWHDhgXdDCCpdezY0S0AEHYEfADylUceecQFTO3atcvw/iVLlrhgauXKlRn+7pQpU+LQSrMZM2YkXFCn9mjf+UuJEiWsSZMmduutt9rmzZtz9Fzbt293zzd79uxQ/O37Qn+b9tNLL71kySD96+8vxYoV2+/n/OuvvzK8v27dunbqqadaops7d64deeSR7jNRtWpVGzBggG3dujXoZgGAU+jffwAgf3juuefcSeT8+fPtp59+sgYNGuwV8A0fPtxd2dfj0gd8FStWtN69e+d5OxX0PPzwwxkGPv/8848VKhTc4fvRRx+1UqVKuRPad9991+666y774IMPbM6cOe7kPbsBn/azpM+iZPW3I3j+6+8rWLCg5WeLFy+2448/3g4++GB74IEH7Ndff7VRo0bZjz/+aG+//XbQzQMAAj4A+ceKFSvclfhXXnnFLrvsMhf8DR061MJmfzIqueHss892ga9cfvnldtZZZ7l9+tlnn9kRRxxhicjzPNuxY4cVL1486KYkrOzuo9jXH2Y333yzHXDAAS6jW6ZMGbdLdLGoX79+7oLIiSeeyG4CECi6dALINxTg6cTslFNOcSetuh1L3TXPOecc9/Oxxx4b7bKmEzmdwH333Xf20UcfRdfHZqY2btxo1157rdWqVcuKFi3qMof33nuvpaamRh+jbqL6PV39f/zxx+3AAw90j23btq0tWLAg+jhlEJXhktiuc1mN4Vu0aJGdfPLJ7oRT2RdlHBSApf/79LvKxA0aNMgqVapkJUuWtDPOOMP+/PPPfd6vxx13XDSg3rVrl91+++126KGHWtmyZd3zH3XUUfbhhx+m2Q/atijL5/99+pv+62/X/hwzZow1bdrUBb5VqlRxwfuGDRsy7Ar4zjvvWJs2bVwQ89hjj0W7Wb7wwgsuM1mzZk33PNpfyvgGQe+H9u3bW4UKFVw7te/SdwM95phjrGXLlhn+fuPGja1z5865to+yExiqC6/+DUJ2/77XXnvNfdarV6/uPmf6vI0YMcJSUlL2ek7/86h9cNhhh9knn3ySrbZoP7z33nt20UUXRYM96dmzp/sc6n0GAEGjSyeAfEMB3plnnmlFihSxCy64wHVNU6ClgEuOPvpoN/Zm3Lhx7qq9umiJ/tUJ5tVXX+1O4m655Ra3XieafvdEnZD/9ttv7sSzdu3aLpM4ZMgQ+/33393vxnr++edty5Yt7rEKPu677z7Xrp9//tkKFy7s1q9Zs8adSD7zzDP/+XcpEFVQpRPOG2+80T2HTtwVkCpATT9eUX+HAl9lNxV8qX39+/e3adOm7dN+Xb58uftXAYtOgJ944gm3f5Xh0N/55JNPuoBE3WhbtWrlgj3t+yuuuMIFm/rbpUWLFrZt27Ys/3btGwWuffr0ca+Vgszx48e7gFeBrP5237Jly1w79DtqiwIj3z333GMFChSw66+/3jZt2uReg+7du9vnn39u8TZ27Fg77bTT3PYVME+dOtVdeHjzzTddwCI9evRwf8O3335rzZo1i/6u3r8//PCDG0eZ2/soM/Xr13fdeRXMd+vWzUaPHh39LOyr9evXZ7g+9oJJTv8+PUafV13c0L/qdqyLEXqP3n///dHn0/tTz6mgWxdt9DnU61G+fHl3AScr33zzje3Zs8cFzLF0jNF7XW0CgMB5AJAPfPHFF0pHeO+99567nZqa6tWsWdO75ppr0jzuxRdfdI/78MMP93qOpk2besccc8xe60eMGOGVLFnS++GHH9Ksv+mmm7yCBQt6q1atcrdXrFjhnrtChQre+vXro4977bXX3Po33ngjuu6qq65y6zKi9UOHDo3e7tatm1ekSBFv+fLl0XVr1qzxSpcu7R199NHRdZMnT3a/26lTJ/f3+wYOHOjauXHjRi8r2qZ+f9myZd6ff/7p/p7HHnvMK1q0qFelShVv27Zt3p49e7ydO3em+b0NGza4+y+++OLoOv1++r/jv/72Tz75xK1/7rnn0qyfOXPmXuvr1Knj1um+WHpdtf7ggw9O086xY8e69d98842XW/xt6T2Vle3bt6e5vWvXLq9Zs2becccdF12n16ZYsWLe4MGD0zx2wIAB7r23devWXNtHmRkzZozXv39/9xwvvfSS++wUKlTIa9iwobdp0yZvX/jvqayWU045Jfr4nPx96ferXHbZZV6JEiW8HTt2RPd15cqVvVatWqV5Pzz++OPu+TL6vGd0vPj444/3uu+cc87xqlatmsM9AgC5jy6dAPJNdk9ZCHXVFGXWzjvvPJdNyaiLV068+OKLLsOmrJmqDfpLp06d3HN//PHHaR6v7eqxPv2uKLOQU3p+jRNSpkWZF1+1atXswgsvtE8//XSvCpqXXnppmm6S2r6e55dffsnWNpUFUpauXr16LjOi7qtvvfWWq1CoAh7KbvjZGWVu/AzIl19+afu7n9VN9IQTTkizn9UFUhmc2G6jovbFdnWMpeyQ305/H+zra7C/YsfMqVuiMo5qT+z+0t99+umn2//+979oV0q9ZsrK6rVXti2391F611xzjT300EPufaVxm8oMP/XUU644iQoa7Y+XX37ZZXXTL+kzhzn5+2L3qzLNepz2qzLyS5cudeu/+OILW7dunRuLGvt+UNdibee/qICSqMtoeupu6t8PAEGiSyeApKcTYwV2CvbU/cunro7qjjZr1qz9KqygE96vv/46Oi4tPZ1QxlKXz1h+8Jd+DFJ2aOydTmAz6oqnrqgKulavXu3GO+XW9nVyru6j6jqnMXAa+xRLQYD2q06qd+/enSa42B/azwqGKleunK39nNX29mUfqLtl+q6Hes33t0qlum7eeeedrtrjzp07o+vTVzzVuDAFeBpfpu7H77//vv3xxx+uu2de7KPsUPB33XXXubbcdNNN+/w8+nsyKgSTvkBRTv4+dXVWV1d15Ux/0UPPIf5FjoYNG6a5X+/t2AsomfGDytjXzUeRIACJgoAPQNLTCZ/G0ino05JR9m9/Aj4FVco4aPxcRho1apTmdmYBQryKYOzv9jM7OZdnn33WZUeUdbrhhhvcibm2N3LkyOhYv/3Zz3q+9MV2fOkD7qyqTe7LPtC4TD9D7NMFhPTTd+SEgjeNF9M+VZZMmVkFG5MnT3ZjPWMpE6eMl/axHq9/NeebMsl5sY+yS+PcMhuDl9uy+/epiJLG1erCxB133OEuSih4VNZ08ODBGY4N3Bd6vUTHl/S0TgVjACBoBHwAkp5ODnWS6Fd/jKXpBKZPn24TJkxwJ79ZzSOX2X06mVQRi9gT7/2V3fnsdIKrrpQqvpGeMmwqTPJfhSdyk6pLKjOi/Rr7N6Sf/mJf97MySR06dAhkegVVyVQ3w1gKuPaHsqUKRFQpM7ZboAK+jIJUZdRUjEQVYF999VVXaCU2eI33PlKArMI/rVu3tnjI7t+naqx///23ex8qOPbFZvilTp060cyhX21WlJnWYzOrjOpTAR3Niamuoeeee26abLAytrHrACAojOEDkNQ0hkYnfSo/r6kY0i+qTqnxPa+//rp7vD8WShmC9HRfRut1Ujdv3jx30p6eHq8xbDmVVTti6WRf2UmVoNeJt09d/ZQhOvLII9OUi89rfvARmylT5Uvtn1gKUrPazxndp/2s7rkqrZ+e9vF/7av9pW6fCupjl/2dE1H7SwFu7DhSvY4K5jKi7pvqdqqxk7rIoOkA4rWPMpq6Q9VWtf6kk06yeMju35fR+1BBWPqxhhpbqosmuuCj+30KqrOzrzTOT+8DZVt1HPGpwqxeH3+aFwAIEhk+AElNgZxOxNRtLiOHH364O+FTFlDFVFRKXSeLyqBonI+yLrryrwyhCkPoBFfjrVSoROt0n7ouajsKKtWdUY/T9AIq2a6Ml07gczpRtZ5DVHZeXfnUpvPPPz/Dx6o9yjwpuLvyyitdxkHTMmhckaYbiCftAwXYmm5BUwooS6KT6SZNmrgTYJ+yM1qnMWnq8qoS+MqWaMnsb1cXPQU66h6q7IkCXXV/VHZGxTw0vYGC+ESjLJ5fJCRWr1693D564IEHXMCk7J3GoCkTrfeXxoWmp0ya9pH+Xo3RPOSQQ9Lcn5f7SNkwfUaaN2/uAl0VBFIXaX1mtM1Y/pQgud1NObt/n6ZYUICufaz3kYJqBWHp26Pf1edHz6nPsv4+vWeVYc3OGD7RfI7antqmgki//vqrG8OqtsUrEAaALOVB5U8ASBhdu3Z15ew1ZUBmevfu7RUuXNj766+/3O2JEyd69evXd1MVxE7RsHbtWlciXtMdpC/ZvmXLFm/IkCFegwYN3BQJFStW9Nq3b++NGjXKlX6PnZbh/vvv36sN6aco0PQGV199tVepUiUvEomkmaYgo+kMvvzyS69z585eqVKlXNn5Y4891ps7d26ax/jTMixYsCDD6QMymooioxL6mlIhM5ru4e6773Yl/zVdQ+vWrb0333zT69Wrl1sXS+079NBD3f6K/Zuy+tv9kvn6veLFi7vXonnz5t6NN97opqLwaVux5fz/a6oE/7XRPsot/rYyWzTFgDz55JNuagPtr4MOOsi1wd/XGbnvvvvcfdrPmdmffZSZSy65xGvSpIl7Pn1e9F7XNBGbN2/e67HadnamJPiv91RmbczO3zdnzhzv8MMPd4+pXr26u/+dd97J8L3+yCOPePXq1XOvQZs2bdw0C/p8/9e0DD69lvq861ij962mFslovwBAECL6X9YhIQAASBTKYg0cONBljtNXG00EyqgrY6tpG6666qqgmwMA+R4BHwAAIaFrtCokUqFChb3m1EsUmpNRgd4PP/yQZm47AEAwCPgAAEhwGhOqcaIK8iZOnOiK9GQ2LhUAgFgEfAAAJDh139Qk6eXKlXOFeVQoBACA7GBaBgAAEpwmd1d3Tk3JQLAHAOH08ccfW9euXa169equenBmU/Ckn1dUFZlVNVwVnDVtTE4R8AEAAABAHLrnaxy2pt/JDk0To+l7jj32WDcVzbXXXmuXXHJJhvP+ZoUunQAAAAAQR8rwTZ8+3bp165bpYwYPHuwKYX377bfRdZqXduPGjTZz5sxsb4uJ15EtqamptmbNGitdurR7gwIAACA5qQu5plhR18MCBRK7Q+COHTts165dge6rSLpzY3W/1LK/5s2bZ506dUqzrnPnzi7TlxMEfMgWBXu1atVibwEAAOQTq1evtpo1a1oiB3v16pSytetSAmtDqVKlbOvWrWnWDR061IYNG7bfz7127VqrUqVKmnW6vXnzZvvnn3+sePHi2XoeAj5kizJ78suXda1MqcS+0iNnNGoedBMAAABCaY/ttk9tRvT8L1Eps6dg75eFda1M6fifn27ekmp1Dl3pAuMyZcpE1+dGdi83EfAhW/xUtYK9ID5QOVUoUjjoJgAAAIST9+8/YRnGo3PTMqULBrf9MmXSBHy5pWrVqvbHH3+kWafb2lZ2s3tCwAcAAAAgtFLNs1RLDWS7eemII46wGTNmpFn33nvvufU5kfipGgAAAAAIua1bt7rpFbT40y7o51WrVrnbQ4YMsZ49e0Yff/nll9vPP/9sN954oy1dutQeeeQRe+GFF2zgwIE52i4ZPgAAAAChleKlWooXzHZz4osvvnBz6vkGDRrk/u3Vq5ebUP3333+PBn9Sr149Ny2DAryxY8e6AjpPPPGEq9SZEwR8AAAAAJDHOnbs6KZxyIyCvox+Z9GiRfu1Xbp0AgAAAECSIsMHAAAAIORFW+LfpzM1gG3uCzJ8AAAAAJCkyPABAAAACK3UQCZlsIC2mnNk+AAAAAAgSRHwAQAAAECSoksnAAAAgNBK8Ty3BLHdMCDDBwAAAABJigwfAAAAgNBiWoaskeEDAAAAgCQVmoCvd+/eNmzYsDzfzuzZsy0SidjGjRuz/Tvbt2+3s846y8qUKZPj380NK1eudNsFAAAAgFAGfInsqaeesk8++cTmzp1rv//+u23YsMEFYIsXL86TwC63nxcAAAAIc5fOlACWVKNoy35T4LR161ZLdMuXL7eDDz7YmjVrZlWrVk2IbNuqVauCbgIAAACAgCVchm/Pnj321ltv2TnnnGPVqlVzwVRmXnvtNTvkkEOsWLFiVr9+fRs+fLj7fZ8CryeeeMLOOOMMK1GihDVs2NBef/31NM8xY8YMa9SokRUvXtyOPfZYl0VL7+WXX7amTZta0aJFrW7dujZ69OjofR07dnS3P/74Y7c93a5Xr567r3Xr1tF1PrVHwaHafNBBB9kjjzwSve/iiy+2Fi1a2M6dO93tXbt2uefo2bOnu53V86bXq1cvF4Def//9LusIAAAAJHPRliCWMEiYgO+bb76x6667zmrWrOkCnEqVKtmHH35oLVu2zPDx6kKpx11zzTW2ZMkSe+yxx2zKlCl21113pXmcgsBzzz3Xvv76a+vSpYt1797d1q9f7+5bvXq1nXnmmda1a1fXTfKSSy6xm266Kc3vL1y40P3++eef79qocYS33Xab25a88sor1q9fPzviiCNcYKXb8+fPd/e9//770XXy3HPP2e233+7a+P3339vdd9/tnktdQmXcuHG2bdu2aBtuueUWNx5w/Pjx7nZmz5uRF154wS699FKbNm2a1apVy/3t+nnHjh3Zej0UdG7evDnNAgAAACBcAg34/v77bxs7dqzL0rVp08Z+/vlnl/FSMKN/FURlRoGcAiNlspTdO+GEE2zEiBEu8Etf7OWCCy6wBg0auABLXUT9wOnRRx+1Aw880GXoGjdu7IJBPT7WAw88YMcff7wLzJQJ1P39+/d3mTMpX768yx4WKVLEdefUbQWrUqFCheg6GTp0qNuWgkxl6/TvwIEDo20uVaqUPfvss/bwww+7wHDMmDH2zDPPuGIwktnzZkSPHTBggH3xxRcuUFXm8Prrr3dZ08svv9w+++yzLF+bkSNHWtmyZaOLgkYAAAAA4RJowPfQQw/Ztdde6wKdn376yaZPn+6CIAVP/+Wrr76yO+64w/2uvyjTpmBRVTN9CnR8JUuWdMHTunXr3G1l2dq1a5fmedMHmXpMhw4d0qzT7R9//NFSUlKy/bcqc6fuqX379k3T5jvvvDNNt1VtX4GZgldlPI888kjbX+pCes8999gvv/ziguRJkybZSSedlOXvDBkyxDZt2hRdlA0FAAAAEk2K5wW2hEGgE6+ry2GhQoXs6aefdmPkNLVBjx493Ni0AgWyjkWVqVOWTwFiehof5ytcuHCa+zT2LTU11eLNLz4zceLEvYLMggULRn9W2+bMmePWKQjODQrW1J1U2cIVK1a48ZF9+vTJ8nc0XlELAAAAgPAKNMNXvXp1u/XWW+2HH36wmTNnusyeArg6deq4TNR3332X6e+qG+iyZctcV830y38Fi7GZL797py99V0c9RgFYLN1W987YQC2Wn6GMzQBWqVLF/b3qtpq+vX4xFlFX0aVLl9pHH33k9snkyZOzfN7MbNmyxY0zPO6441yhGRXCGTRokK1du9YFf506dfrP5wAAAAASXWqASxgEmuGL1b59e7doTN+rr77qgpVRo0bZokWLrHnz5ns9XmPcTj31VKtdu7adffbZLshTN89vv/3WdZPMDo1l05i6G264wRVsUYEWvxiLT90q27Zt67pYnnfeeTZv3jxXRCW2umZ6lStXdlU/FbCpCI0yjhoHp4ykxtXpZ3WpVGEUjbHT9BMKxvS36u966aWXXLdRjR9UUZpjjjnGjVPM7Hkz0q1bNxdcKmOqrKLGKgIAAADIXxKmSqdPQYwqYiqo0VxyyvZlpHPnzvbmm2/au+++6wKyww8/3B588MFMH58RBYuackEBpqqBTpgwwRV2SZ9JVMXLqVOnumkOFJBp7GD64i6x1E1VFTdVjEVZvdNPP92tV1CpaRmUtVMQq0BOAaYyfKqeedFFF7nnVdVQv8urpopQ0KasXmbPmxEFpAr41FaCPQAAACB/inheOEYbKhBS10RNi4C0NHeggsa8fCk1LYOyiRt+qG9lSifcdYK9dK7eKugmAAAAhNIeb7fNttdc4T6/Wnwi8s9Pv/u+spUO4Px0y5ZUa3rwuoTfT4l/5g4AAAAACPcYPgAAAADIqRTv3yXeUkLRTzJEAZ+KkJQrVy7oZiQk7RdN6g4AAAAAoQ34kHnAx9hGAAAAAKEN+AAAAAAgvaDmxEsNyUtB0RYAAAAASFJk+AAAAACEVqpFLMUigWw3DMjwAQAAAECSIuADAAAAgCRFl04AAAAAoZXq/bsEsd0wIMMHAAAAAEmKDB8AAACA0EoJqGhLCkVbAAAAAABBIsOHHNntpdhuLwQdlgsUtFBITQm6BQAAAEhiBHwAAAAAQosunVmjaAsAAAAAJCkyfAAAAABCK9WLuCWI7YYBGT4AAAAASFIEfAAAAACQpOjSCQAAACC0KNqSNTJ8AAAAAJCkyPABAAAACK0UK+CW+G83HMjwAQAAAECSIuADAAAAgCRFl04AAAAAoeUFNA+fxzx8AAAAAIAgkeEDAAAAEFpMy5A1xvABAAAAQJIi4AMAAACAJEWXTgAAAAChleIVcEv8t2uhQIYvwUUiEXv11VeDbgYAAACAECLgi5MJEyZY6dKlbc+ePdF1W7dutcKFC1vHjh3TPHb27Nku0Fu+fHm8mgcAAACEUqpFLNUKBLBELAwI+OLk2GOPdQHeF198EV33ySefWNWqVe3zzz+3HTt2RNd/+OGHVrt2bTvwwAPj1TwAAAAASYiAL04aN25s1apVc9k7n34+/fTTrV69evbZZ5+lWa8A0ffXX3/ZGWecYSVKlLCGDRva66+/nua5v/32Wzv55JOtVKlSVqVKFevRo4f7HZ8yiAMGDLAbb7zRypcv74LMYcOG5fnfDAAAACBYBHxxpCBO2TufflYwdswxx0TX//PPPy7jFxvwDR8+3M4991z7+uuvrUuXLta9e3dbv369u2/jxo123HHHWevWrV32cObMmfbHH3+4x8d66qmnrGTJku6577vvPrvjjjvsvffey7StO3futM2bN6dZAAAAgESdhy+IJQwI+OJIQdycOXPcOL4tW7bYokWLXLB39NFHRzN/8+bNc8FWbMDXu3dvu+CCC6xBgwZ29913u66h8+fPd/eNHz/eBXtaf9BBB7mfJ02a5ALIH374IfocLVq0sKFDh7oMYc+ePa1NmzY2a9asTNs6cuRIK1u2bHSpVatWnu4bAAAAALmPaRniSNm8bdu22YIFC2zDhg3WqFEjq1Spkgv6+vTp48bxKfCrX7++G8MXG6z5lKUrU6aMrVu3zt3+6quvXHCn7pzpqeiLtpH+OUTdS/3nyMiQIUNs0KBB0dvK8BH0AQAAINEENy2DZ2FAwBdHytDVrFnTBWgK+BToSfXq1V0wNXfuXHefumjGUiXPWKrgmZqa6n5Wtq9r165277337rU9BXXZeY6MFC1a1C0AAAAAwouAL87UVVNZPAV8N9xwQ3S9unW+/fbbrqvmFVdcke3nO+SQQ+zll1+2unXrWqFCvJwAAAAA/j/G8AUQ8H366ae2ePHiaIZP9PNjjz1mu3btSjN+779cddVVroCLxvipq6i6cb7zzjuui2hKSkoe/RUAAABAIs3DF8wSBgR8caZgTpU41b1TUyjEBnwq5OJP35Bd6g6qQjAK7k488URr3ry5XXvttVauXDkrUICXFwAAAMjP6AMYZ+p66WUwwLNOnToZrs9onaZiiKXKm6+88kqm24yd+8/36quv5qDVAAAAQGJKtQKWEkAeK9XCUbSFFBAAAAAAJCkCPgAAAABIUnTpBAAAABBazMOXNTJ8AAAAAJCkyPABAAAACHXRFi3x365nYUCGDwAAAACSFAEfAAAAACQpunQCAAAACK0UL+KWILYbBmT4AAAAACBJkeEDAAAAEFopVsAt8d+uZ2FAhg8AAAAAkhQBHwAAAAAkKbp0AgAAAAitVK+AW+K/Xc/CgAwfAAAAACQpMnzIkVT3XwikplgYRAqF5yPo7dkTdBMAAAD2QtGWrJHhAwAAAIAkRcAHAAAAAEkqPP3JAAAAACAdDTdK8SJx3y+pIXklyPABAAAAQJIiwwcAAAAgtFKtgFuC2G4YhKOVAAAAAIAcI+ADAAAAgCRFl04AAAAAoZXiFXBLENsNg3C0EgAAAACQY2T4AAAAAIRWqkXcEsR2w4AMHwAAAAAkKQI+AAAAAEhSdOkEAAAAEFoUbckaGT4AAAAASFJk+AAAAACEVooVcEsQ2w2DcLQSAAAAAJBjZPgAAAAAhFaqF3FLENsNAzJ8AYtEIm757LPP0qzfuXOnVahQwd03e/bswNoHAAAAILwI+BJArVq1bPLkyWnWTZ8+3UqVKrXfz71r1679fg4AAAAA4UTAlwB69eplU6dOtX/++Se6btKkSW59eoMHD7ZGjRpZiRIlrH79+nbbbbfZ7t27o/cPGzbMWrVqZU888YTVq1fPihUrZk8//bTLFiprGKtbt27Wo0ePPP7rAAAAgLyT+n9FW+K9pIYklApHK5PcoYceanXr1rWXX37Z3V61apV9/PHHGQZjpUuXtilTptiSJUts7NixNnHiRHvwwQfTPOann35yz/XKK6/Y4sWL7ZxzzrGUlBR7/fXXo49Zt26dvfXWW3bxxRdn2CYFh5s3b06zAAAAAAgXAr4EocBLWT1RQNelSxerVKnSXo+79dZbrX379i5A7Nq1q11//fX2wgsv7NWNU1m91q1bW4sWLax48eJ24YUXpuk2+uyzz1rt2rWtY8eOGbZn5MiRVrZs2eiibqcAAABAokn1CgS2hEE4WpkPXHTRRTZv3jz7+eefXcCXWeZt2rRp1qFDB6tataob46cAUBnBWHXq1NkrWOzXr5+9++679ttvv7nb2kbv3r1dUZiMDBkyxDZt2hRdVq9enWt/KwAAAID4IOBLEBpjd+qpp1rfvn1tx44ddvLJJ+/1GAWE3bt3d9m/N9980xYtWmS33HLLXoVZSpYsudfvKtvXsmVLl/lbuHChfffddy7gy0zRokWtTJkyaRYAAAAA4cI8fAlEWT0FcyrMUrBgwb3unzt3rsveKcjz/fLLL9l+/ksuucTGjBnjsnydOnWimyYAAABCL8Uibgliu2FAwJdATjrpJPvzzz8zzaY1bNjQdd9URc+2bdu6oiuaviG7NI5PY/5U6EWZPgAAAADJjS6dCUTj6SpWrGhFihTJ8P7TTjvNBg4caP3793dTLyjjp2kZskvFV8466yw39k9TMgAAAABhR9GWrEU8z/P+4zFIIscff7w1bdrUxo0bl6Pf07QMChjXLqtlZUon/nWC02q0tTCIFApPkt3bsyfoJgAAgDjY4+222faaK9yXyHUc/PPT4Z93smKl4n9OtWPrHhva7v2E30/hOdvEftmwYYPNnj3bLY888gh7EwAAAMgHCPhy2cqVK61evXrZeqyqZmpi9HhQlU4Ffffee681btw4LtsEAAAA8lpKQAVUUiwcCPhyWeHChbMdUGU3MMytQBQAAABA/kLAl8tq1KhhS5cuze2nBQAAAJBF0ZZ4Sw1gm/siHK0EAAAAAOQYAR8AAAAAJCm6dAIAAAAIrRSvgFuC2G4YhKOVAAAAAIAcI8MHAAAAILQ8i1hqANMyeAFsc1+Q4QMAAACAOHj44Yetbt26VqxYMWvXrp3Nnz8/y8ePGTPGTflWvHhxq1Wrlg0cONB27NiRo20S8AEAAABAHps2bZoNGjTIhg4dal9++aW1bNnSOnfubOvWrcvw8c8//7zddNNN7vHff/+9Pfnkk+45br755hxtl4APAAAAQOiLtgSx5MQDDzxg/fr1sz59+liTJk1swoQJVqJECZs0aVKGj587d6516NDBLrzwQpcVPPHEE+2CCy74z6xgegR8AAAAAJCHdu3aZQsXLrROnTpF1xUoUMDdnjdvXoa/0759e/c7foD3888/24wZM6xLly452jZFWwAAAACEVqoXcUsQ25XNmzdbrKJFi7ol1l9//WUpKSlWpUqVNOt1e+nSpZYRZfb0e0ceeaR5nmd79uyxyy+/PMddOgn4kCOr9+yyUntIDOeWSJEioXkHRtIduBJV6rZtQTcBAADkI7Vq1UpzW2Puhg0btt/PO3v2bLv77rvtkUcecQVefvrpJ7vmmmtsxIgRdtttt2X7eQj4AAAAAGAfrV692sqUKRO9nT67JxUrVrSCBQvaH3/8kWa9bletWjXD51VQ16NHD7vkkkvc7ebNm9u2bdvs0ksvtVtuucV1Cc0OUjUAAAAAQivFCgS2iIK92CWjgK9IkSJ26KGH2qxZs6LrUlNT3e0jjjjCMrJ9+/a9gjoFjaIuntlFhg8AAAAA8pimZOjVq5e1adPGDjvsMDfHnjJ2qtopPXv2tBo1atjIkSPd7a5du7rKnq1bt4526VTWT+v9wC87CPgAAAAAhFbQRVuy67zzzrM///zTbr/9dlu7dq21atXKZs6cGS3ksmrVqjQZvVtvvdUikYj797fffrNKlSq5YO+uu+6ynIh4OckHIt9S9aGyZcvagu+qWKnSid8T+Oo6HSwMCpQoYaERif+BdF9QtAUAgP2zx9tts+0127RpU5qxaYl6fjrg09OtaKnCcd/+zq27bdyRib+fEv/MHQAAAACwT+jSCQAAACC0Uq2AW4LYbhiEo5UAAAAAgBwjwwcAAAAgtFK8iFuC2G4YkOEDAAAAgCRFwAcAAAAASYounQAAAABCKyzz8AWFDB8AAAAAJCkyfAAAAABCy/MKWKpXIJDthkE4WgkAAAAAyDECPgAAAABIUgR8+VTHjh3t2muvDboZAAAAwH5JsUhgSxgQ8AEAAABAkqJoCwAAAIDQSvWCmSIh1bNQIMOX4AYPHmyNGjWyEiVKWP369e22226z3bt3R+8fNmyYtWrVyp555hmrW7eulS1b1s4//3zbsmVL9DHbtm2znj17WqlSpaxatWo2evTo/9zuzp07bfPmzWkWAAAAAOFCwJfgSpcubVOmTLElS5bY2LFjbeLEifbggw+meczy5cvt1VdftTfffNMtH330kd1zzz3R+2+44Qa37rXXXrN3333XZs+ebV9++WWW2x05cqQLHv2lVq1aefY3AgAAAMgbBHwJ7tZbb7X27du77F3Xrl3t+uuvtxdeeCHNY1JTU11Q2KxZMzvqqKOsR48eNmvWLHff1q1b7cknn7RRo0bZ8ccfb82bN7ennnrK9uzZk+V2hwwZYps2bYouq1evztO/EwAAANgXmoMvqCUMGMOX4KZNm2bjxo1zWTwFbwrUypQpk+YxCgaVCfSp2+a6devcz/q9Xbt2Wbt27aL3ly9f3ho3bpzldosWLeoWAAAAAOEVjrA0n5o3b551797dunTp4rpqLlq0yG655RYXwMUqXLhwmtuRSMRl/QAAAIBkl2qRwJYwIOBLYHPnzrU6deq4IK9NmzbWsGFD++WXX3L0HAceeKALCD///PPoug0bNtgPP/yQBy0GAAAAkEjo0pnAFOCtWrXKpk6dam3btrW33nrLpk+fnqPnUGXOvn37usItFSpUsMqVK7sAskABYn0AAAAg2RHwJbDTTjvNBg4caP3793fTJJxyyiluWgZNxZAT999/vxv/p6IvGut33XXXuUIsAAAAQNileBG3BLHdMIh4nheSKQMRJM3Dp+kZFnxXxUqVTvzs4NV1OlgYFChRwkIjEo6DWuq2bUE3AQCAUNvj7bbZ9ppLEKQvFpiI56cXfnChFSlVJO7b37V1lz1/3PMJv5/I8AEAAAAIraCmSEgNybQM4WhlElm5cqWropmdpVWrVkE3FwAAAECIkeGLM1XM/K858Hz16tXL8/YAAAAASF4EfHFWo0YNW7p0abw3CwAAACQlNydeAAVUUpmHDwAAAAAQJDJ8AAAAAELLU4YvgGybR4YPAAAAABAkqnQCAAAAQJKiSycAAACA0FLBlkCKtnjx3+a+IMMHAAAAAEmKDB8AAACA0Er1CrgliO2GQThaCQAAAADIMQI+AAAAAEhSdOlEjtx4YR8rVLBoCPbadxYGqdu3W1icseRPC4PpzapaaKSmBN0CAABCj6ItWSPDBwAAAABJigwfAAAAgNBKtYhbgthuGJDhAwAAAIAkRcAHAAAAAEmKLp0AAAAAQouiLVkjwwcAAAAASYoMHwAAAIDQIsOXNTJ8AAAAAJCkCPgAAAAAIEnRpRMAAABAaNGlM2tk+AAAAAAgSZHhAwAAABBaZPiyRoYPAAAAAJIUAR8AAAAAJCm6dAIAAAAILU/dOi0SyHbDgAwfAAAAACQpMnwh17t3b9u4caO9+uqrQTcFAAAAiDuKtmSNDF+CiUQiBG8AAAAAcgUBHwAAAAAkKQK+BPfNN9/YcccdZ8WLF7cKFSrYpZdealu3bt3rcaNGjbJq1aq5x1x11VW2e/fu6H1169a1u+++2y6++GIrXbq01a5d2x5//PE4/yUAAABA3nXpDGIJAwK+BLZt2zbr3LmzHXDAAbZgwQJ78cUX7f3337f+/funedyHH35oy5cvd/8+9dRTNmXKFLfEGj16tLVp08YWLVpkV155pV1xxRW2bNmyTLe9c+dO27x5c5oFAAAAQLgQ8CWw559/3nbs2GFPP/20NWvWzGX6xo8fb88884z98ccf0ccpINT6gw46yE499VQ75ZRTbNasWWmeq0uXLi7Qa9CggQ0ePNgqVqzoAsTMjBw50sqWLRtdatWqlad/KwAAALAvyPBljYAvgX3//ffWsmVLK1myZHRdhw4dLDU1NU12rmnTplawYMHobXXtXLduXZrnatGiRZrCMFWrVt3rMbGGDBlimzZtii6rV6/Oxb8MAAAAQDwwLUMSKFy4cJrbCugUFOb0MbGKFi3qFgAAAADhRYYvgR188MH21VdfubF8vjlz5liBAgWscePGgbYNAAAASAR06cwaAV8C6969uxUrVsx69epl3377rRtzd/XVV1uPHj2sSpUqQTcPAAAAQIIj4EsgfhfLQoX+7WlbokQJe+edd2z9+vXWtm1bO/vss+344493BVoAAAAAmHleJLAlDBjDl0D8IioqqOJr3ry5ffDBB5n+TvrpF2TMmDFpbq9cuXKvxyxevHg/WwsAAAAg0RHwJQDP8+yXX35xk6erq6amYAAAAACA/UXAl8eUXatXr162HqspGKZOnerG7QEAAAD4b6kWcUu8pQawzX1BwJfHNB1CditqKjDs2LFjXjcJAAAAQD5BwJfHatSoYUuXLs3rzQAAAAD5elqGILYbBlTpBAAAAIAkRcAHAAAAAEmKLp0AAAAAQiuoOfE8unQCAAAAAIJEhg8AAABAaFG0JWuM4QMAAACAJEXABwAAAABJii6dAAAAAEKLoi1ZI8MHAAAAAEmKDB8AAACAUGf4VLgliO2GAQEfcuTZac9amdKJnxg+o+ZhFgoFClpYTG9SyUIhkhp0CwAAABJG4p+5AwAAAAD2CRk+AAAAAKHlue6VwWw3DMjwAQAAAECSIsMHAAAAILRSLeL+C2K7YUCGDwAAAACSFAEfAAAAACQpunQCAAAACC3NhxfEnHheSObhI8MHAAAAAEmKDB8AAACA0Er1IhYJINuWSoYPAAAAABAkunQCAAAAQJKiSycAAACA0PK8f5cgthsGZPgAAAAAIEmR4QMAAAAQWkzLkDUyfAAAAACQpAj4AAAAACBJEfDlM8OGDbNWrVoF3QwAAAAgV7t0BrGEAQEfAAAAACQpAr4AnX322da/f//o7WuvvdYikYgtXbrU3d61a5eVLFnS3n//fXc7NTXVRo4cafXq1bPixYtby5Yt7aWXXor+/uzZs93vz5o1y9q0aWMlSpSw9u3b27Jly9z9U6ZMseHDh9tXX33lHqdF6wAAAICwSvUigS1hQMAXoGOOOcYFab6PPvrIKlasGF23YMEC2717twvaRMHe008/bRMmTLDvvvvOBg4caBdddJH7vVi33HKLjR492r744gsrVKiQXXzxxW79eeedZ9ddd501bdrUfv/9d7doXUZ27txpmzdvTrMAAAAACBcCvgB17NjRlixZYn/++adt2LDB/XzNNddEAz7927ZtW5epUwB2991326RJk6xz585Wv3596927twv4HnvssTTPe9ddd7lgskmTJnbTTTfZ3LlzbceOHS4rWKpUKRcEVq1a1S1alxEFl2XLlo0utWrViss+AQAAAJB7mIcvQM2aNbPy5cu7DF2RIkWsdevWduqpp9rDDz/s7td6BYXy008/2fbt2+2EE05I8xzq9qnfi9WiRYvoz9WqVXP/rlu3zmrXrp3ttg0ZMsQGDRoUva0MH0EfAAAAEo3n/bsEsd0wIOALkMbQHX300S6TV7RoURfcKVhTNu/bb791mbnrr7/ePXbr1q3u37feestq1KiR5nn0u7EKFy6cZhv++L+c0HOmf14AAAAA4ULAFzB1vZw4caILrtQVs0CBAi4IvP/++13g16FDB/c4dc/UY1atWuV+Z18pk5iSkpKLfwEAAAAQdIYv/gVUPDJ8yA5l9VR8RYHYkUceGV2nzJ7G76lKp5QuXdqt02OVrdNjN23aZHPmzLEyZcpYr169srW9unXr2ooVK2zx4sVWs2ZN97xk8gAAAIDkRIYvYM2bN7dy5cpZo0aNXEEVP+BTFs4fv+cbMWKEVapUyRVU+fnnn93vHXLIIXbzzTdne3tnnXWWvfLKK3bsscfaxo0bbfLkya74CwAAAIDkE/G8sCQjESQVbVG1zt+W1rQypRO/uOsZNQ+zUChQ0EIjNSRdgf9v3GoocPgFACSgPd5um22vud5k6kmW6OenDZ4ZYgVLFIv79lO277CfeoxM+P2U+GfuAAAAAIB9QsCXy1auXOkqY2ZnadWqVW5vHgAAAMhXvACXMGAMXy7TlAiNGzfO1mPr1auX25sHAAAAgCgCvlymOfKWLl2a208LAAAAADlGwAcAAAAgtDQHXzDz8EUsDBjDBwAAAABJigwfAAAAgPAKqoKKZ6FAhg8AAAAAkhQBHwAAAAAkqRx16dy4caNNnz7dPvnkE/vll19s+/btVqlSJWvdurV17tzZ2rdvn3ctBQAAAID0AiraYslUtGXNmjV2ySWXWLVq1ezOO++0f/75x00afvzxx1vNmjXtww8/tBNOOMGaNGli06ZNy/tWAwAAAEDIPPzww1a3bl0rVqyYtWvXzubPn/+fCberrrrKxWFFixa1Ro0a2YwZM3I/w6cMXq9evWzhwoUuqMuIgsBXX33VxowZY6tXr7brr78+Rw0BAAAAgJzyvH+XePNyuE0lxgYNGmQTJkxwwZ7iJvWSXLZsmVWuXHmvx+/atcsl1XTfSy+95Ob7Vi/LcuXK5X7At2TJEqtQoUKWjylevLhdcMEFbvn7779z1AgAAAAASGYPPPCA9evXz/r06eNuK/B76623bNKkSXbTTTft9XitX79+vc2dO9cKFy7s1ik7mCddOv8r2NvfxwMAAADA/ky8HsQimzdvTrPs3LnTMsrWqbdkp06dousKFCjgbs+bN88y8vrrr9sRRxzhunRWqVLFmjVrZnfffbelpKRYns/Dt3z5cpeC/P77791tdfO85ppr7MADD9yXp0OIFIxE3IJckpqzDyyyIYg+HckuTJ95Xn8AQJzVqlUrze2hQ4fasGHD0qz766+/XKCmwC2Wbi9dujTD5/3555/tgw8+sO7du7txez/99JNdeeWVtnv3breNPAv43nnnHTvttNNc0ZYOHTq4dXPmzLGmTZvaG2+84fqZAgAAAEB+sHr1aitTpkz0toqr5IbU1FQ3fu/xxx+3ggUL2qGHHmq//fab3X///Xkb8Kl/6cCBA+2ee+7Za/3gwYMJ+AAAAADEj7pWBjgtQ5kyZdIEfBmpWLGiC9r++OOPNOt1u2rVqhn+jipzauyefs938MEH29q1a10X0SJFiuTNxOvqxtm3b9+91l988cWuuAsAAAAA4P9TcKYM3axZs9Jk8HRb4/Qyot6U6sapx/l++OEHFwhmN9jbp4BPE60vXrx4r/Val1E5UQAAAADI62kZglhyQlMyTJw40Z566imXRLviiits27Zt0aqdPXv2tCFDhkQfr/tVpVO1UhToqaKniraoiEtO5LhLp0qJXnrppW4QYfv27aNj+O699173RwAAAAAA0jrvvPPszz//tNtvv911y1RNlJkzZ0YLuaxatcpV7owtBqP6KRpO16JFCzcPn4I/DaPLiYjn5Sw21cNVoXP06NG2Zs0at6569ep2ww032IABAywSpmpuyDaVmC1btqytXVbLypTOcWI47k6r0TboJgDJI0zHdap0AsB+2+Ptttn2mm3atOk/x6YlwvlpnSduswIlisV9+6nbd9gvl4xI+P2U4wyfAjpFmVq2bNni1pUuXTov2gYAAAAAWVP6KohZmTwLhRynao477jjbuHFjNNDzgz1F2LoPAAAAABDSDN/s2bNdGdD0duzYYZ988klutQsAAAAA/pPnRdwSb14QU0HkZcD39ddfR3/W9AsaaOjTrPEacKiBhAAAAACAkAV8qiKj8XtaMuq6Wbx4cXvooYdyu30AAAAAgLwO+FasWOEqdNavX9/mz5/v5uPzaeI/zcEXOws8AAAAAMRFSAqoJHTAV6dOHfdv7EzvAAAAAIAkKtoCAAAAAImCoi1ZS/wZtAEAAAAA+4SADwAAAACSFF06AQAAAIS7YEsQRVs8CwUyfElk5cqVbtqMxYsXB90UAAAAAGHM8B1wwAEuqEhP64oVK2YNGjSw3r17W58+fXKrjQAAAACQCcUme8cneS9iSZnhu/32261AgQJ2yimn2PDhw92in7XuqquuskaNGtkVV1xhEydOzJsWI1P16tVz/7Zu3doF4B07doxOpXHHHXdYzZo1rWjRotaqVSubOXMmexIAAABIcjnO8H366ad255132uWXX55m/WOPPWbvvvuuvfzyy9aiRQsbN26c9evXLzfbiv8wf/58O+yww+z999+3pk2bWpEiRdz6sWPH2ujRo91rpGBw0qRJdtppp9l3331nDRs2zPC5du7c6Rbf5s2b2f8AAABAyOQ4w/fOO+9Yp06d9lp//PHHu/ukS5cu9vPPP+dOC5FtlSpVcv9WqFDBqlatauXLl3e3R40aZYMHD7bzzz/fGjdubPfee6/L8o0ZMybT5xo5cqSVLVs2utSqVYtXAgAAAIlbtCWIJRkDPgURb7zxxl7rtc4PMLZt22alS5fOnRZivygzt2bNGuvQoUOa9br9/fffZ/p7Q4YMsU2bNkWX1atX80oAAAAAyd6l87bbbnNj9D788EPXfVAWLFhgM2bMsAkTJrjb7733nh1zzDG531rEjcb6aQEAAAASGtMy5G7Ap3F5TZo0sfHjx9srr7zi1qmb4EcffWTt27d3t6+77rqcPi1ygT9mLyUlJbquTJkyVr16dZszZ06aIFy3/YAdAAAAQHLap4nX1R0wfRdBBK9y5cpWvHhxV4FTFTk1TYbG391www02dOhQO/DAA93YvcmTJ7u5+p577rmgmwwAAAAg0QI+lfn/6aefbN26de7nWEcffXRutQ05VKhQIVcdVVMwaPqMo446ymbPnm0DBgxw4/CUedVrpgzt66+/nmmFTgAAACA0vMi/SxDbDYGI53k5qi/z2Wef2YUXXmi//PKLpf9Vzf0W250QyVX8RdnCtctqWZnSOa71E3en1WgbdBOA5BEJxxeak7OvNABABvZ4u222veYSBhoelOjnp7UeHm4FiheL+/ZT/9lhq68amvD7KccZPs2/16ZNG3vrrbesWrVqLsgDAAAAgKCu9QVxvc8LyTXGHAd8P/74o7300kvWoEGDvGkR0li5cqXVq1cvW3ulZcuWbmweAAAAAOxTwNeuXTs3fo+ALz4KFy7sqqBmR3YDQwAAAAD5Q44DvquvvtoV/1i7dq01b97cBSSxWrRokZvty/dq1KhhS5cuzff7AQAAAMgQ8/DlbsB31llnuX8vvvji6DqN41MBF4q2AAAAAECIA74VK1bkTUsAAAAAIKeYliF3A746derk9FcAAAAAAIka8GmS7pNPPtmN19PPWTnttNNyq20AAAAAgLwO+Lp16+aKtFSuXNn9nBnG8AEAAACIp4j37xJvkWSahy81NTXDnwEAAAAASTSGDwAAAAASBtMy7H/AN27cOMuuAQMGZPuxAAAAAICAA74HH3wwze0///zTtm/fbuXKlXO3N27caCVKlHBj/Aj4AAAAACAxFMju3Hv+ctddd1mrVq3s+++/t/Xr17tFPx9yyCE2YsSIvG8xAAAAAKSfhy+IJRnH8N1222320ksvWePGjaPr9LOygGeffbZ17949t9uIBLLD22NFvGxdJwCA+CtQMBx7PTUl6BYAAPKJHAd8v//+u+3Zs2ev9SkpKfbHH3/kVrsAAAAA4L9RtCVLOU7VHH/88XbZZZfZl19+GV23cOFCu+KKK6xTp045fToAAAAAQKIEfJMmTbKqVatamzZtrGjRom457LDDrEqVKvbEE0/kTSsBAAAAAHnfpbNSpUo2Y8YM++GHH2zp0qVu3UEHHWSNGjXK+dYBAAAAYH/QpTNvJl5XgEeQBwAAAABJFPCpOMuUKVNs1qxZtm7dOktNTU1z/wcffJCb7QMAAACAzJHhy92A75prrnEB3ymnnGLNmjWzSCQc808AAAAAQH6T44Bv6tSp9sILL1iXLl3ypkUAAAAAgGACviJFiliDBg1yZ+sAAAAAsD+8yL9LvHmR5JyW4brrrrOxY8ea56mzLAAAAAAgaTJ8n376qX344Yf29ttvW9OmTa1w4cJp7n/llVdys30AAAAAkKmI9+8SbxEvSQO+cuXK2RlnnJE3rQEAAAAABBfwTZ48Ofe2DgAAAABInDF8smfPHnv//fftsccesy1btrh1a9assa1bt+Z2+wAAAADgv+fhC2JJxgzfL7/8YieddJKtWrXKdu7caSeccIKVLl3a7r33Xnd7woQJedNSAAAAAEDeZvg08XqbNm1sw4YNVrx48eh6jeubNWtWTp8OAAAAAJAoGb5PPvnE5s6d6+bji1W3bl377bffcrNtyIbevXvbxo0b7dVXX2V/AQAAANi/DF9qaqqlpKTstf7XX391XTuRN1auXGmRSMQWL17MLgYAAACQNwHfiSeeaGPGjIneVhCiYi1Dhw61Ll265PTpAAAAAGCfRWLm4ovrYkka8I0ePdrmzJljTZo0sR07dtiFF14Y7c6pwi3IG/Xq1XP/tm7d2gXZHTt2THP/qFGjrFq1alahQgW76qqrbPfu3dH7VEzn+uuvtxo1aljJkiWtXbt2Nnv27Cy3p9/ZvHlzmgUAAABAko/hq1mzpn311Vc2bdo096+ye3379rXu3bunKeKC3DV//nw77LDD3HQYTZs2TTOG8sMPP3TBnv796aef7LzzzrNWrVpZv3793P39+/e3JUuW2NSpU6169eo2ffp0V2n1m2++sYYNG2a4vZEjR9rw4cN5GQEAAJDYvMi/SxDbDYGI53k5mkHi448/tvbt21uhQoX2mptPxVyOPvro3G4j/m8Mn7J8ixYtcsFcbNEWZeuWL19uBQsWdOvOPfdcK1CggAvwNH1G/fr13b8K9nydOnVyAeTdd9+daYZPi08Zvlq1atnKpdWsTOl9mr4xrs6teUTQTQCSRyQcX2hOJPGPT07q3mPhASBR7PF222x7zTZt2mRlypSxRKXz07Jly1qde+6yAsWKxX37qTt22C833ZLw+ynHGb5jjz3Wfv/9d6tcuXKa9fpDdV9GBV2Qt5Tx84M9UbZP2TvRv3pNGjVqlOZ3FMyp+2dmihYt6hYAAAAA4ZXjgE8JQY0hS+/vv/9248MQf4ULF05zW6+PqqmKutwqGFy4cGGaoFBKlSoV13YCAAAAuU79Fb2AtptMAd+ZZ54ZDSbUjTA2+6MM0tdff+26eiJv+GP2cppBVZEX/c66devsqKOOyqPWAQAAAAh1wKf+sX6GT/PtxRZoUTBy+OGHR4uEIPepC632+cyZM13hnGLFikVfk6yoK6cK6vTs2dNVWFUA+Oeff9qsWbOsRYsWdsopp/ByAQAAILzI8OVOwDd58mT3r6ZguOGGG6xEiRLZ/VXkAhXJGTdunN1xxx12++23u2zdf02tEPva3XnnnXbddde56TMqVqzoAvRTTz2V1wYAAABIYjmu0nncccfZK6+8YuXKldurSk63bt3sgw8+yO02IoGqIFGlE8iHqNKZ+6jSCSCBha5K590BVum8OQmrdH700Ue2a9euvdZrEvZPPvkkt9oFAAAAAP8p4v27xFsk2Yq2qCiLKCGoSbzXrl0bvU9FQTS2rEaNGnnTyiSfWy87WrZsaYsXL87zNgEAAABIHtkO+DTZtyp0alG3zvRUUOShhx7K7fYl/XQKjRs3ztZjsxsYAgAAAPkKRVtyJ+BbsWKFy+7Vr1/f5s+fb5UqVUpTpVNVJNPP84asKSO6dOlSdhMAAACAYAO+OnXquH/9Cb0BAAAAAIktx0VbfBrHt2rVqr0KuJx22mm50S4AAAAA+G906czdgO/nn3+2M844w7755hs3ns+f1UE/+wVcAAAAAADBK5DTX7jmmmtcAZF169a5yde/++47+/jjj61NmzbZnggcAAAAAHJzWoYglqTM8M2bN89Nrl6xYkUrUKCAW4488kgbOXKkDRgwwBYtWpQ3LQUAAAAA5G2GT102S5cu7X5W0LdmzZpoUZdly5bl9OkAAAAAAImS4WvWrJl99dVXrltnu3bt7L777nPTMjz++ONuygYAAAAAiBsv8u8Sb14A24xHwHfrrbfatm3b3M933HGHnXrqqXbUUUdZhQoVbNq0aXnRRgAAAABAPAK+zp07R39u0KCBmzh8/fr1dsABB0QrdQIAAABAXDAtQ97MwxerfPnyufE0CIF/UlOsUGpIShKFQZgukvzfFCzIfyIFC1pYeGGZGqhAePappYZknwIA9r1oy+WXX26//vprdh7qunU+99xz2XosAAAAACDgDF+lSpWsadOm1qFDB+vataubc6969epWrFgx27Bhgy1ZssQ+/fRTmzp1qluvAi4AAAAAkNeCmhMv4iVRwDdixAjr37+/PfHEE/bII4+4AC+Wpmno1KmTC/ROOumkvGorAAAAACAvxvBVqVLFbrnlFrcoq7dq1Sr7559/3Fx8Bx54IAVbAAAAAMQfRVtyv2iLKnJqAQAAAACEvGgLAAAAACB8cmVaBgAAAAAIREBFWywkRVvI8AEAAABAkiLDBwAAACC8KNqSuxm+oUOH2i+//JLTXwMAAAAAJHrA99prr7lpGI4//nh7/vnnbefOnXnTMgAAAABAfAO+xYsX24IFC6xp06Z2zTXXWNWqVe2KK65w6wAAAAAgkC6dQSzJWrSldevWNm7cOFuzZo09+eST9uuvv1qHDh2sRYsWNnbsWNu0aVPutxQAAAAAEL8qnZ7n2e7du23Xrl3uZ03GPn78eKtVq5ZNmzZtf54aAAAAAP6TpmQIaknagG/hwoXWv39/q1atmg0cONBl/L7//nv76KOP7Mcff7S77rrLBgwYkPutBQAAAADkXcDXvHlzO/zww23FihWuO+fq1avtnnvusQYNGkQfc8EFF9iff/6Z06cGAAAAAAQ5D9+5555rF198sdWoUSPTx1SsWNFSU1P3t20AAAAAgHhl+DReb8qUKbZ58+b92SayIRKJ2Kuvvsq+AgAAABCfDF/hwoVtx44d+741xJ2CdL1uAAAAQFIKaooEz5JzDN9VV11l9957r+3ZsydvWpRPqLKpX/imWLFiVqdOHRs5cqS7r27duu7fM844w2X6/Nvy6KOPuonvixQpYo0bN7ZnnnkmzfPq8XrMaaedZiVLlnQFdOS1116zQw45xG2rfv36Nnz4cF5DAAAAIMnleAyfJlifNWuWvfvuu66Ai4KKWK+88kputi9paR7D119/3V544QWrXbu2K36jxd/HlStXtsmTJ9tJJ51kBQsWdOunT5/uJrsfM2aMderUyd58803r06eP1axZ04499tjocw8bNswV0tHjChUqZJ988on17NnTbfOoo46y5cuX26WXXuoeO3To0Azbt3PnTrf46MYLAAAA5IOAr1y5cnbWWWflTWvykVWrVlnDhg3tyCOPdFk5Zfh8lSpViu7rqlWrRtePGjXKevfubVdeeaW7PWjQIPvss8/c+tiA78ILL3SBoE9Fdm666Sbr1auXu60M34gRI+zGG2/MNOBTtlFZQAAAACCRBTUnXsRL0oBPWSfsPwVuJ5xwguuWqSzeqaeeaieeeGKWv6O5Dv3MnK9Dhw42duzYNOvatGmT5vZXX31lc+bMiXbvlJSUFDcec/v27VaiRIm9tjVkyBAXUMZm+GrVqpXjvxMAAABAiAI+0fi92bNnu66ByiaVLl3a1qxZY2XKlLFSpUrlfiuTkMbTaS7Dt99+295//3033YW6ab700kv7/dzpu9lu3brVZevOPPPMvR6rMX0ZKVq0qFsAAACAhBeSbFsoAr5ffvnFZaTUJVFjvJSlUsCnQi66PWHChLxpaRJSgHzeeee55eyzz3b7df369Va+fHlXWVNZuFgHH3ywy9T5XTNFt5s0afKfweWyZcusQYMGefa3AAAAAEiCgE9FQ9RlUN0EK1SoEF2vipL9+vXL7fYlrQceeMBV6GzdurUVKFDAXnzxRTdeT+P2RJU5VRxHXTaVaTvggAPshhtucJlA/Y6ygW+88YYrkqMMYVZuv/1212VUxWEUWGp7ev2+/fZbu/POO+P0FwMAAABI+GkZVPHx1ltvddMCxFKA8ttvv+Vm25KasqL33XefC57btm1rK1eutBkzZrhgTEaPHm3vvfeeGzenAE+6devmxuupSEvTpk3tsccec2MqO3bsmOW2Onfu7Cp6qrKqtnX44Yfbgw8+mKZQDAAAABDqefiCWJIxw5eamrpXV0P59ddfXRCD7FE2NKuMaNeuXd2S3hVXXOGWzHiel2nQpwUAAABA/pHjDJ8qSWp+N5+mFFBREJX379KlS263DwAAAAD+c1qGIJakDPjU1dAvFKKy/qrS6XfnVOGW/EzdMhUAZ2dp1apV0M0FAAAAkORy3KWzZs2aruDH1KlT7euvv3bZvb59+1r37t2tePHilp+psqbm1cuOevXq5Xl7AAAAAORv+zQPX6FCheyiiy7K/daEXI0aNWzp0qVBNwMAAADIP4IqoOJZcgZ8Tz/9dJb39+zZc3/aAwAAAAAIch6+WLt377bt27e7aRpKlChBwAcAAAAgboIqoBJJ1qItGzZsSLNoDN+yZcvsyCOPtP/9739500oAAAAAQN4HfBlp2LCh3XPPPXtl/wAAAAAAISvakuETFSpka9asya2nAwAAAID/RtGW3A34Xn/99TS3Pc+z33//3caPH28dOnTI6dMBAAAAABIl4OvWrVua25pEvFKlSnbccce5SdkBAAAAIG5ClOF7+OGH7f7777e1a9day5Yt7aGHHrLDDjvsP39Pc6BfcMEFdvrpp9urr76atwFfampqTn8FAAAAAPK1adOm2aBBg2zChAnWrl07GzNmjHXu3NkVwKxcuXKmv7dy5Uq7/vrr7aijjopv0Za//vrLNm/evK+/DgAAAAD5xgMPPGD9+vWzPn36WJMmTVzgp2ntJk2alOnvpKSkWPfu3W348OFWv379vA/4Nm7caFdddZVVrFjRqlSpYgcccIBVrVrVhgwZ4ubiAwAAAIAg5uELYhElwWKXnTt3Wnq7du2yhQsXWqdOnaLrChQo4G7PmzfPMnPHHXe47F/fvn1tX2W7S+f69evtiCOOsN9++81FmQcffLBbv2TJEtf39L333rNPP/3Uvv76a/vss89swIAB+9woJK7d/7cgd0QKFQ7NrvR277JQKFDQQiM1xcLA27Mn6CYkoRANjwjLZyoknycAyadWrVppbg8dOtSGDRu2V+9IZeuUNIul20uXLs3weRVbPfnkk7Z48eL9al+2Az5Fl0WKFLHly5fv1VDdd+KJJ1qPHj3s3XfftXHjxu1XowAAAAAgDEVbVq9ebWXKlImuLlq06H4/9ZYtW1xsNXHiRNe7Mi4Bn6rBPPbYY3sFe6Junffdd5916dLFRbS9evXar0YBAAAAQBiUKVMmTcCXEQVtBQsWtD/++CPNet1WLJWekmwq1tK1a9e9imdq/nMVejnwwANzdwyf5tpr2rRppvc3a9bM9UNVwAcAAAAA+Jd6Sh566KE2a9asNAGcbmvYXHoHHXSQffPNN647p7+cdtppduyxx7qf03cjzZUMn6JSRZk1a9bM8P4VK1ZkWU4UAAAAAPLrPHyDBg1yPSHbtGnj5t7TtAzbtm1zVTulZ8+eVqNGDRs5cqQVK1bMJdRilStXzv2bfn2uBXyaI+KWW25xxVkUocZSJZrbbrvNTjrppBxtHAAAAADyg/POO8/+/PNPu/32293E661atbKZM2dGh8ytWrXK9ZjMbRHP87IVm/76668uGtUgRE3NoDSjfvX777+3Rx55xAV9CxYssNq1a+d6IxE8lZgtW7asfb2kspUunftvxNzWr/aRFgaRwmkvniQyqnTmAaoK5l+RiIVGJPGP+Q6fJyDX7PF222x7zTZt2vSfY9MS4fz0oAF3W8GixeK+/ZSdO2zpuJsTfj9lO8OnrpyaI+LKK6908+75cWIkErETTjjBxo8fT7AHAAAAAGEM+KRevXr29ttv24YNG+zHH3906xo0aGDly5fPq/YBAAAAAOIR8PkOOOAAN9AQAAAAAAIVkqItQQlJx3wAAAAAQFwyfAAAAACQCCLev0sQ2w0DMnwAAAAAkKQI+AAAAAAgSdGlEwAAAEB4UbQlS2T4AAAAACBJkeEDAAAAEF5k+LJEhg8AAAAAkhQZPgAAAAChFfm/JYjthgEZPgAAAABIUmT4kKGdO3e6xbd582b2FAAAABAyZPiQoZEjR1rZsmWjS61atdhTAAAASNyiLUEsIUDAhwwNGTLENm3aFF1Wr17NngIAAABChi6dyFDRokXdAgAAACSyiPfvEsR2w4AMHwAAAAAkKQI+AAAAAEhSBHz51JQpUywSCcvsIQAAAEAmKNqSJQK+fGrFihV2zDHHBN0MAAAAAHmIoi351Ntvv23jx48PuhkAAADA/gtJAZUgEPDlU/Pnzw+6CQAAAADyGF06AQAAACBJkeEDAAAAEFrMw5c1MnwAAAAAkKTI8AEAAAAI/7QMQWw3BMjwAQAAAECSIuADAAAAgCRFl04AAAAAoUXRlqyR4QMAAACAJEWGDwAAAEB4UbQlS2T4AAAAACBJEfABAAAAQJKiSycAAACA0KJoS9YI+JAjXacPsALFiiX8XjvQPrMw8FJSLCxSjj3EwqDg7EUWFlvPaWdhUOql+RYaXkhmwQ1LO8ULx3EqUig8pzTenj1BNwFAPhKeoyMAAAAApEfRliwxhg8AAAAAkhQBHwAAAAAkKbp0AgAAAAgvunRmiQwfAAAAACQpMnwAAAAAQotpGbJGhg8AAAAAkhQBHwAAAAAkKbp0AgAAAAgvirZkiQwfAAAAACQpMnwAAAAAQivieW4JYrthQIYPAAAAAJIUAR8AAAAAJCm6dAIAAAAIL4q2ZIkMHwAAAAAkKTJ8AAAAAEIr4v27BLHdMCDDBwAAAABJioAvYJFIxC2fffZZmvU7d+60ChUquPtmz54dWPsAAAAAhBcBXwKoVauWTZ48Oc266dOnW6lSpfb7uXft2rXfzwEAAAAkfNGWIJYQIOBLAL169bKpU6faP//8E103adIktz69wYMHW6NGjaxEiRJWv359u+2222z37t3R+4cNG2atWrWyJ554wurVq2fFihWzp59+2mULlTWM1a1bN+vRo0ce/3UAAAAAgkLAlwAOPfRQq1u3rr388svu9qpVq+zjjz/OMBgrXbq0TZkyxZYsWWJjx461iRMn2oMPPpjmMT/99JN7rldeecUWL15s55xzjqWkpNjrr78efcy6devsrbfesosvvjjDNik43Lx5c5oFAAAASNSiLUEsYUDAlyAUeCmrJwrounTpYpUqVdrrcbfeequ1b9/eBYhdu3a166+/3l544YW9unEqq9e6dWtr0aKFFS9e3C688MI03UafffZZq127tnXs2DHD9owcOdLKli0bXdTtFAAAAEC4EPAliIsuusjmzZtnP//8swv4Msu8TZs2zTp06GBVq1Z1Y/wUACojGKtOnTp7BYv9+vWzd99913777Td3W9vo3bu3KwqTkSFDhtimTZuiy+rVq3PtbwUAAAAQHwR8CUJj7E499VTr27ev7dixw04++eS9HqOAsHv37i779+abb9qiRYvslltu2aswS8mSJff6XWX7WrZs6TJ/CxcutO+++84FfJkpWrSolSlTJs0CAAAAJByKtmSJidcTiLJ6CuZUmKVgwYJ73T937lyXvVOQ5/vll1+y/fyXXHKJjRkzxmX5OnXqRDdNAAAAIMkR8CWQk046yf78889Ms2kNGzZ03TdV0bNt27au6Iqmb8gujePTmD8VelGmDwAAAAi7oAqoRCjaghy/aSIRq1ixohUpUiTD+0877TQbOHCg9e/f3029oIyfpmXILhVfOeuss9zYP03JAAAAACC5keELmOdlfmmgXLlye91/3333uSXWtddem2YePi2ZUXdOjQPUGD0AAAAAyY2AL5/YsGGDzZ492y2PPPJI0M0BAAAAcrdoS7x5FgoEfLls5cqVVq9evWw9VlUzNTF6PKhKp4K+e++91xo3bhyXbQIAAAAIFgFfLitcuHC2A6rsBoa5FYgCAAAAySgsBVSCQMCXy2rUqGFLly7N7acFAAAAgBxj4nUAAAAASFJk+AAAAACEl6raZ1H5Ps8Esc19QIYPAAAAAJIUGT4AAAAAoS7YEkTRlkg4Enxk+AAAAAAgWdGlEwAAAACSFF06AQAAAISXulYG0b3Ss1AgwwcAAAAASYoMHwAAAIDQiqT+uwSx3TAgwwcAAAAASYoMH3Lk9pNfshKlCyb8Xnvy+noWCqkpFhYFP/wy6CYknVIvfm5hEClcxMKiQPlyFgapGzdZWHi791gYeCnhOZ5aJGKhEJJJpQFkjYAPAAAAQHhRtCVLdOkEAAAAgCRFhg8AAABAaEW8f5cgthsGZPgAAAAAIEkR8AEAAABAkqJLJwAAAIDwUkXZIKrKeuHo00mGDwAAAACSFBk+AAAAAKFF0ZaskeEDAAAAgCRFwAcAAAAASYounQAAAADCS7VTgqif4lkokOEDAAAAgCRFhg8AAABAaFG0JWtk+AAAAAAgSRHwAQAAAECSoksnAAAAgPDyvH+XILYbAmT4AAAAACBJkeEDAAAAEFoUbckaGb6AzZ0711q1amXFihWzNm3a2KuvvmqRSMQWL17s7k9JSbG+fftavXr1rHjx4ta4cWMbO3Zs9Pffffdd97sbN25M87zXXHONHXfccdHbn376qR111FHuOWrVqmUDBgywbdu2xfEvBQAAABBvBHwB2rx5s3Xt2tWaN29uX375pY0YMcIGDx6c5jGpqalWs2ZNe/HFF23JkiV2++23280332wvvPCCu//444+3cuXK2csvvxz9HQWJ06ZNs+7du7vby5cvt5NOOsnOOuss+/rrr919CgD79++fadt27tzp2he7AAAAAAgXAr4APf/88y6bN3HiRGvSpImdfPLJdsMNN6R5TOHChW348OEu+6csn4K4Pn36RAO+ggUL2vnnn++eyzdr1iyX8VOAJyNHjnS/d+2111rDhg2tffv2Nm7cOHv66adtx44dGbZNv1O2bNnooqwgAAAAkHC8AJcQIOAL0LJly6xFixauS6bvsMMO2+txDz/8sB166KFWqVIlK1WqlD3++OO2atWq6P0K5mbPnm1r1qxxt5977jk75ZRTXOZPvvrqK5syZYr7XX/p3Lmzyx6uWLEiw7YNGTLENm3aFF1Wr16dB3sAAAAAQF6iaEuCmzp1ql1//fU2evRoO+KII6x06dJ2//332+effx59TNu2be3AAw90j73iiits+vTpLsDzbd261S677DI3bi+92rVrZ7jdokWLugUAAABIZBRtyRoBX4BUgOXZZ5914+X84GrBggVpHjNnzhzXBfPKK6+MrtOYvPSU5VNmT+P9ChQo4DJ8vkMOOcSN/2vQoEGe/j0AAAAAEgtdOgN04YUXum6Vl156qX3//ff2zjvv2KhRo9x9GtsnGnP3xRdfuPt++OEHu+222/YKCv2AT4Vf7rrrLjv77LPTZOdUCEbVQFWkRdU/f/zxR3vttdeyLNoCAAAAIPwI+AJUpkwZe+ONN1wQpqkZbrnlFleFU/xxfeqKeeaZZ9p5551n7dq1s7///jtNts+n7J3G/6kKp1+d06dxgh999JELGDU1Q+vWrd12qlevHqe/FAAAAMgjqV5wSwhEPM8LR0vzCXXLVBVOFUrRnHmJQtMyqFrnk1+2shKlC1qie7JRvaCbACSNSOEiFhYFyv9brCrRpW7cZGHh7d5joeClBt2C5MMpYr61x9tts+01dz6qBEWi8s9P258w3AoV/v9FEONlz+4dNve9oQm/nxjDFzBNjVC/fn2rUaOGq6ap7pfnnntuQgV7AAAAQMIKaooEz0KBLp25bOXKlW78XXYWdeNcu3atXXTRRXbwwQfbwIED7ZxzznHTLgAAAADA/iLDl8s0Ubqqb2aHJlK/8cYb3QIAAAAAuY2AL5epa+bSpUtz+2kBAAAAZCDyf3PxxVvEwoEunQAAAACQpMjwAQAAAAh3Rdkgqsp64ajaQoYPAAAAAJIUAR8AAAAAJCm6dAIAAAAILRVsCaRoi2ehQIYPAAAAAJIUAR8AAACA8PICXHLo4Ycftrp161qxYsWsXbt2Nn/+/EwfO3HiRDvqqKPsgAMOcEunTp2yfHxmCPgAAAAAII9NmzbNBg0aZEOHDrUvv/zSWrZsaZ07d7Z169Zl+PjZs2fbBRdcYB9++KHNmzfPatWqZSeeeKL99ttvOdouAR8AAAAA5LEHHnjA+vXrZ3369LEmTZrYhAkTrESJEjZp0qQMH//cc8/ZlVdeaa1atbKDDjrInnjiCUtNTbVZs2blaLsEfAAAAABCK+J5gS3ZtWvXLlu4cKHrlukrUKCAu63sXXZs377ddu/ebeXLl7ecoEoncuSY4r9b6eKJf53gSasXdBOApOHt3mVhkfJHxt1iEk4kYqERSfxjfuiEZLLmUL1Pw7JPkZQ2b96c5nbRokXdEuuvv/6ylJQUq1KlSpr1ur106dJsbWfw4MFWvXr1NEFjdnAUBwAAABBeqQEuZm5sXdmyZaPLyJEjc/1PvOeee2zq1Kk2ffp0V/AlJ8jwAQAAAMA+Wr16tZUpUyZ6O312TypWrGgFCxa0P/74I8163a5atWqWzz9q1CgX8L3//vvWokWLHLePDB8AAAAA7CMFe7FLRgFfkSJF7NBDD01TcMUvwHLEEUdk+tz33XefjRgxwmbOnGlt2rTZp/aR4QMAAAAQWjktoJJbcrpNTcnQq1cvF7gddthhNmbMGNu2bZur2ik9e/a0GjVqRLuE3nvvvXb77bfb888/7+buW7t2rVtfqlQpt2QXAR8AAAAA5LHzzjvP/vzzTxfEKXjTdAvK3PmFXFatWuUqd/oeffRRV93z7LPPTvM8msdv2LBh2d4uAR8AAACA8FKiLYhCrV7Of6V///5uyWyi9VgrV6603MAYPgAAAABIUgR8AAAAAJCk6NIJAAAAILxUPCWAoi0WxDb3ARk+AAAAAEhSZPgAAAAAhFbE+3cJYrthQIYPAAAAAJIUAR8AAAAAJCm6dAIAAAAIL4q2ZIkMHwAAAAAkKTJ8AAAAAEIrkvrvEsR2w4AMHwAAAAAkKQI+AAAAAEhSdOkEAAAAEF4UbckSGb58YvDgwdaoUSMrUaKE1a9f32677TbbvXt30M0CAAAAkIfI8OUTpUuXtilTplj16tXtm2++sX79+rl1N954Y4aP37lzp1t8mzdvjmNrAQAAgGzy/m+JN89CgQxfPnHrrbda+/btrW7duta1a1e7/vrr7YUXXsj08SNHjrSyZctGl1q1asW1vQAAAAD2HwFfPjFt2jTr0KGDVa1a1UqVKuUCwFWrVmX6+CFDhtimTZuiy+rVq+PaXgAAAAD7j4AvH5g3b551797dunTpYm+++aYtWrTIbrnlFtu1a1emv1O0aFErU6ZMmgUAAABINBHPC2wJA8bw5QNz5861OnXquCDP98svvwTaJgAAAAB5j4AvH2jYsKHrvjl16lRr27atvfXWWzZ9+vSgmwUAAADsP6ZlyBJdOvOB0047zQYOHGj9+/e3Vq1auYyfpmUAAAAAkNzI8OUT9913n1tiXXvttYG1BwAAAEDeI+ADAAAAEF6qnZIa0HZDgC6dIbVy5UqLRCLZWtSNEwAAAED+Q4YvpAoXLmyNGzfO1mPr1auX5+0BAAAAghDUFAkRpmVAXqpRo4YtXbqUnQwAAAAgU3TpBAAAAIAkRZdOAAAAAOGl3pxBdK/0LBTI8AEAAABAkiLDBwAAACC8lN0LJMPnWRiQ4QMAAACAJEXABwAAAABJii6dAAAAAMIrVZPiBbTdECDDBwAAAABJigwfAAAAgNCKeJ5bgthuGJDhAwAAAIAkRYYPObIupYBtT+E6AQDsl5BcFQ7VIBXk6/dpwXJlLQxSNm4KugnJiWkZssSZOwAAAAAkKQI+AAAAAEhSdOkEAAAAEF506cwSGT4AAAAASFJk+AAAAACEFxm+LJHhAwAAAIAkRcAHAAAAAEmKLp0AAAAAwkvThUYC2m4IkOEDAAAAgCRFhg8AAABAaEU8zy1BbDcMyPABAAAAQJIi4AMAAACAJEWXTgAAAADhxTx8WSLDBwAAAABJigwfAAAAgPBK9VRBJZjthgAZPgAAAABIUgR8AVq5cqVFIhFbvHhxnm5n9uzZbjsbN27M0+0AAAAASCx06QQAAAAQXhRtyRIZPgAAAABIUgR8CWDp0qXWvn17K1asmDVr1sw++uijNPfr9mGHHWZFixa1atWq2U033WR79uyJ3r9z504bMGCAVa5c2T3HkUceaQsWLMh0e9u3b7eTTz7ZOnTokGk3Tz3n5s2b0ywAAABA4vH+f5YvnotRtAXZdMMNN9h1111nixYtsiOOOMK6du1qf//9t7vvt99+sy5duljbtm3tq6++skcffdSefPJJu/POO6O/f+ONN9rLL79sTz31lH355ZfWoEED69y5s61fv36vbSnAO+GEEyw1NdXee+89K1euXIZtGjlypJUtWza61KpVi9cTAAAACBkyfAmgf//+dtZZZ9nBBx/sAjoFWArq5JFHHnHB1vjx4+2ggw6ybt262fDhw2306NEuaNu2bZv7nfvvv99l7Zo0aWITJ0604sWLR5/Dt3btWjvmmGNclvCNN96wEiVKZNqmIUOG2KZNm6LL6tWr83w/AAAAAMhdFG1JAMrq+QoVKmRt2rSx77//3t3Wv7pfVTZ96oq5detW+/XXX13Gbvfu3W6dr3Dhwq4LqP8cPmX2tH7atGlWsGDBLNuk7qNaAAAAgIRG0ZYskeHLR0455RT7+OOPbcmSJUE3BQAAAEAcEPAlgM8++yz6s4qxLFy40HXvFP07b94889zA0H/NmTPHSpcubTVr1rQDDzzQihQp4tb5lPFT0RZ174x1zz33WK9evez4448n6AMAAEBySPWCW0KAgC8BPPzwwzZ9+nRXrfOqq66yDRs22MUXX+zuu/LKK934uauvvtrd/9prr9nQoUNt0KBBVqBAAStZsqRdccUVrvDLzJkzXSDXr18/V4mzb9++e21r1KhR1r17dzvuuOPc8wEAAABIXozhSwDKvGlZvHixq7D5+uuvW8WKFd19NWrUsBkzZriArmXLlla+fHkXyN16661pfl8FXHr06GFbtmxxYwDfeecdO+CAAzLc3oMPPmgpKSku6Js9e7Y1atQobn8rAAAAgPiJeLF9BYFMaB4+VQ+d+201K1U68RPDg+r+/0I4AID9EFM0DLmEU69cV7BcWQuDlI2bLAz2eLtttr3mKrWXKVPGEv38tFPtK61QgfgXG9yTutPeX/VIwu+nxD9zBwAAAADsEwK+XLZy5Uo3hUJ2llatWuX25gEAAID8OS1DEEsIMIYvl2kOvMaNG2frsfXq1cvtzQMAAABAFAFfLlORFapfAgAAAEgEBHwAAAAAwsvNhxdA98rUcHTpZAwfAAAAACQpMnwAAAAAwiuoAioeGT4AAAAAQIDo0gkAAAAASYounQAAAADCy9VsCaJLp4UCGT4AAAAASFJk+AAAAACEF0VbskSGDwAAAACSFBk+5EjxSIqViISkwzIAIN+UHbdIJOgWIECp/+wIxf6PFArHqXdEn/s9QbcCuSUc7zoAAAAAyEhqqv4X0HYTH106AQAAACBJkeEDAAAAEF4UbckSGT4AAAAASFIEfAAAAACQpOjSCQAAACC86NKZJTJ8AAAAAJCkyPABAAAACK9UzRfqBbTdxEeGDwAAAACSFAEfAAAAACQpunQCAAAACC3PS3VLENsNAzJ8AAAAAJCkyPABAAAACPe0DEEUUPEo2gIAAAAACBBdOgEAAAAgSdGlEwAAAEB4ua6VdOnMDBk+AAAAAEhSBHxJZOXKlRaJRGzx4sVBNwUAAACIj9TU4JYQIOADAAAAgCRFwJdE6tWr5/5t3bq1y/R17NjR3U5NTbU77rjDatasaUWLFrVWrVrZzJkzA24tAAAAgLxGwJdE5s+f7/59//337ffff7dXXnnF3R47dqyNHj3aRo0aZV9//bV17tzZTjvtNPvxxx8zfa6dO3fa5s2b0ywAAABAQhZtCWoJAQK+JFKpUiX3b4UKFaxq1apWvnx5d1uB3uDBg+3888+3xo0b27333uuyfGPGjMn0uUaOHGlly5aNLrVq1Yrb3wEAAAAgdxDwJTll5tasWWMdOnRIs163v//++0x/b8iQIbZp06bosnr16ji0FgAAAMgZLzU1sCUMmIcPGdJYPy0AAAAAwosMXxIpUqSI+zclJSW6rkyZMla9enWbM2dOmsfqdpMmTeLeRgAAAADxQ4YviVSuXNmKFy/uKnCqImexYsXc+LsbbrjBhg4dagceeKAbuzd58mQ3V99zzz0XdJMBAACA/eOKpwRQQMWjaAvirFChQjZu3Dh77LHHXFbv9NNPd+sHDBhggwYNsuuuu86aN2/uAsLXX3/dGjZsyGsEAAAAJLGI54UkNEXgxV+ULVz0XWUrXTrxewJfXufIoJsAAIinSCQ8+5tTr1wXCUvdgZhhN4lsj7fbPtzzsivcp+FBiX5+elzRc61Q5N+hTfG0x9tlH+x8IeH3U+KfuQMAAAAA9gkBX4JbuXKlRSKRbC0anwcAAAAAPoq2JLjChQu7ydKzo169enneHgAAACDxukkHMCeeF46RcQR8Ca5GjRq2dOnSoJsBAAAAIIQI+AAAAACElpfqmReJf7bNC0mGjzF8AAAAAJCkCPgAAAAAIEnRpRMAAABAeHmpARVtSbUwIMMHAAAAAEmKDB8AAACA0KJoS9bI8AEAAABAHDz88MNWt25dK1asmLVr187mz5+f5eNffPFFO+igg9zjmzdvbjNmzMjxNgn4AAAAACCPTZs2zQYNGmRDhw61L7/80lq2bGmdO3e2devWZfj4uXPn2gUXXGB9+/a1RYsWWbdu3dzy7bff5mi7BHwAAAAAwkvFU4JacuCBBx6wfv36WZ8+faxJkyY2YcIEK1GihE2aNCnDx48dO9ZOOukku+GGG+zggw+2ESNG2CGHHGLjx4/PyWYZw4ecTSy5dWs4qhHt8XYH3QQAQFxFwrO/QzJZc5hEvJDkMLwUC9N5VFgmFt9ju828gLZrZps3b06zvmjRom6JtWvXLlu4cKENGTIkuq5AgQLWqVMnmzdvXobPr/XKCMZSRvDVV1/NUTsp2oJs2bJli/v3qHZ/hWSPvRZ0AwAA8RSO81LklZ3s2rw6/ytbtmzC7twiRYpY1apV7dO1OR/XlltKlSpltWrVSrNOXTaHDRuWZt1ff/1lKSkpVqVKlTTrdXvp0qUZPvfatWszfLzW5wQBH7KlevXqtnr1aitdurRFIrlzFVVXQ/QB0fOWKVMmYV+JsLQzTG0NSzuFtrJPw/Be5X3KPuV9mj8/U3nVTmX2FOzp/C+RqZDJihUrXPYsKJ7n7XVunD67FzQCPmSLUs41a9bMk72lA1QiH0zD1s4wtTUs7RTayj4Nw3uV9yn7lPdp/vxM5UU7Ezmzlz7o05LoKlasaAULFrQ//vgjzXrdVpYyI1qfk8dnJiQdngEAAAAgnIoUKWKHHnqozZo1K7ouNTXV3T7iiCMy/B2tj328vPfee5k+PjNk+AAAAAAgj6kAS69evaxNmzZ22GGH2ZgxY2zbtm2uaqf07NnTatSoYSNHjnS3r7nmGjvmmGNs9OjRdsopp9jUqVPtiy++sMcffzxH2yXgQ2DUv1mDWhOtn3NY2xmmtoalnUJb2adheK/yPmWf8j7Nn5+psLQT/zrvvPPszz//tNtvv90VXmnVqpXNnDkzWphl1apVbhiVr3379vb888/brbfeajfffLM1bNjQVehs1qyZ5UTEC0u9VQAAAABAjjCGDwAAAACSFAEfAAAAACQpAj4AAAAASFIEfAAAAACQpAj4gBAIQ22lsWPH2rx584JuRlLbvXt30E0AQi8Mx1MAyE0EfEAIRCIR9+/GjRstESnQe/DBB+3RRx+1hQsXBt2cpDFnzhzbtWuX+/mee+6xN954I2FPVhO1XWHm71P2bf46noZR7Hs0kd+vmuQ6DBJ5HyKcCPiAkBxcn3nmGbvxxhttz549CdfeI444wk0KunTpUhs3bpwtWLDAElVYvvB//vlnu+qqq+zCCy+0q6++2s3Bc/DBB0dPVhNtn/rt2rFjh23fvt0SVVhe/9h9qjmbNm/ebJs2bYreh+Q9nmYlUV97vT/VA0H7U+/bRGun/xr785tpHjMdqxJRmI6nCA8CPuRL/sFfX1L6ORFPotNTMPXuu+9G25soJyl+N8OzzjrLLrvssmjQ9/XXX1sifpH6X/hz5861GTNm2IYNGywlJcUSTe3atd0kqx9//LFNmjTJZfsU8CVat87Yfaos5DnnnOMmhB0yZIh7vyZqW5csWWKffvqp/fHHH7Zt2zZLJPps++28++673US9HTp0sLPPPts++eSTNJPyIrmOp+l98MEH9vrrr9sLL7zgbifia6+LffrcH3vssTZgwABbv369a2ciBX2xQeibb75p119/vRUrVswSTZiOpwiXxDtyIPQS9Ysz/cFf3ePOPfdca9++veuKuGLFCkvEfegHI8OHD7eSJUu6fyURglS1s3Dhwu7nO++803XnXLt2rT3//PN211132ZdffmmJxP8i1Zf9mWee6bJnyk4+9dRTtmXLFkukL/1ChQpZxYoVrXjx4la3bl3XZXbr1q1ufydSgOrv01tuucXuv/9+69ixoztJUTdfZSWnTp1qiRZEKZDWBYrzzz/fTjnlFLvmmmts5cqVlij8z/Ztt91mDzzwgDuJ1kUUXenv0qWLrVu3LugmhkpYjqfpDR482K644gq7/fbbbcSIEdaiRYuEep/6n6X77rvPunXr5r5P9bk/9dRT7a+//kqIoK9fv37Wtm1b97P/+S9YsKCVK1fO/ZxoF9DCcjxFCHlALkpNTY3+vGXLFm/lypUJuX/nzZvnFS9e3BsyZIh31llneS1btvT69Onjff/9916i2rlzp3fTTTd5Xbp08Xbs2OElktGjR3tlypTx3n//fe/rr7/2JkyY4DVq1Mjr3r27t2jRooR6X7733nveoYce6n344Yfe6tWrvR49engtWrTwxowZ423atCnQdqakpKS5vWHDBu/XX3/1nn32Wa9t27buvbp169Y0j0mE98IPP/zgNW/e3Hv77bej67788kvv0ksv9Tp06OAtXrzYSxQPPPCAV6VKFe+DDz5wt3v37u1VqFDB++STT7xEotf9iCOO8N599113+4033vDKlSvnPfLII2neK7HvbSTH8VQefvhhr2LFit4XX3zhbj/55JNeJBJxxy9f0K/9q6++6jVr1sz77LPP3O3XX3/dK1WqlFe7dm2vadOm3p9//unW79mzJ7A26nNeq1Yt7+STT46ue+qpp7wjjzwyIfZh2I+nCA8CPuQK/+TD//euu+7yTjzxRK9EiRLeihUrEmovqz3Dhw/37rvvvui6J554wh1Ie/bsGWjQF3vCrzYde+yxLjj9+++/3bpvv/3WBaqTJk3yEoG+LNXm008/3bvyyivT3Pfcc895lStX9s4777yECPrk+eef9wYNGuQC/Vj6IlXQN3bs2MCCvtjXXoHzrFmzovvtn3/+ca+5gr5zzz3X2759u1uvff7WW28F2lb58ccfXdCkE8BYCxYs8OrVq+e98MILXiK8V3Vif9ppp3njxo1z67TvSpcu7T322GPutu5PH1AHsU/VBl2MOOCAA7x169Z5M2bMcCfSjz76qLtfr78C1zVr1nhhENRJddiOp7GuueYa7/7773c/v/TSS+6Cmv8+Deo9mp7elzfeeKP7+c0333THgPHjx7uLFGXLlnUXLNauXRt0M705c+Z4NWrUcOck8vjjj7v3QqII4/EU4UOXTuyz2K4a6obw008/uW58rVu3tv/973+ui1y7du2sTJkyCbOXly9f7rqdTJw40XWZ8/Xt29f69Onj/gZ1T/n2228D7c6hrhylSpVyA+D79+/vup299dZbVrNmTdcFSWM6NPYoEajN6nboj4Pyu0ypu2SvXr1s5syZrkvS999/H2g7dYHrkUcecV0j9frGdvN67LHHXNfOKVOm2Pjx4wMZJO+/9iokoe6Gek+qu/GTTz7pxpp0797drrzyStf1uE2bNnbCCSe4wgMnnnhiXNup7oR+Wx9//HHXHnWNq169uvt86fX3963aWbVqVfv8888taOqyp65c//zzjxsP995777mxcfqsXXrppa4a6tNPP23z588PpH3+Ph06dKgbE6Xj0zHHHGOjRo1y7dS6yy+/3D1G+/yjjz4K/DOVXUF1QQ3j8dT3zTffuGPqrFmz3HeTxnPpfarvXU2Bo2NZ0E4++WQbOHCg+0zpe1Ndj1VoSt/79evXd136r7322qCb6Y6j06ZNs++++859/+uzpf04ZswYdy6g11/3ax///vvvcW1bWI+nCKGgI06E3zvvvOONHDnSK1mypHfmmWd6o0aNcleg69ev7zJ9iUbZvWrVqnmnnnqq99tvv6W5T1d6mzRp4l1++eWuy08QV/geeugh13Xnu+++i2Z7rr76aq969ereKaec4h122GGuu8xXX3211+/Gs52x7rnnHvf6p8/k3Xvvvd7hhx/u9e/fP67tlIy2p65FypDVrVvXZSDTd+XSfcryxjMjEbstZZf1/tOVXHXbufvuu70CBQq4bI7oPan3w7XXXutdddVV3u7du6N/Vzx8+umn7nXWe1NtqFSpUjSDP3ToUK9YsWLeK6+84u3atcutU7b0kEMOiWbU4imz95s+9w0bNnQZk9jMjrJluuqvq/9BtVNd4mrWrOktXLjQ3e7Xr587Fmhfx3aVV/e0zp07x/0ztS/Urb9gwYLeM888E7dthuV4mp7eey+//LL7Wdnc9u3buwykusj71q9f79od9Hdr7HFLGSntT3WTl99//9317FAvhSD2Z2bHfh2/6tSp494PRx11lHf88ce7Lqnt2rXzWrVq5XXq1Cmu3U/DdDxF+BHwYZ8P9joIaQzEgQce6E6iJk6cGD1YqluHunMEPc4kdruxP6s7pw706o6isTKxdGISVDdUfWEqINWJX3pz5851JwEKCPSFpX3ufxHE+0tU3XfU3cT/ghe1R4G0vsTUjUfdENXVU2NP/H0fry9/PxASfZnqNVbXOP8+nTDrC37atGl7BfZBvWd10eT66693i09tULCn1/vBBx/8z781r6k9OpFTV0N1hfzmm2/S3K8TaZ2kaFzcwIEDoydU8Wxj+veZLkLohNTv/vjTTz+5Mbtql+h9qpNovSc0rieo8UY67txxxx3eiBEj0qxXNzRdPOvVq5d7bxx99NFufI//2U/0oG/z5s1e3759XRfFeEvk42l6Oj4dd9xx0c+/xkLrYlnr1q1dF0l99pYvX+7GHKprd7w/U1lRIKKgSe/VmTNnusBJi//ejOdnKvbzoPGPCjr1+fe7wev7Sa+5P4ZP/H0Z7++psBxPkRwI+JBtKhiR/sqSvkh1NfqPP/5Is/6MM85w46R0YAo62NMBX2O0zj77bBfg+Sf4yp7oy/SGG27YK9MXhI8++shd3dfB3w+kMtp/OoFSRk0nAzoBiIfYNmgMnMbmVa1a1X35+NkHfaHqPaIvLmVQVLRFS/ov07ykq94ar+EbPHiwO1lWe5W58wse+EGfXn+Nh0if6Yv3SbS2f8UVV0RPPGP5QV/hwoUDv6ovCkjUzvLly7tCAumpqMhFF13knXDCCe5v8k+i43HSN2zYMDeuyKfPtq7oq9iJ3pv+ib+umus90aBBA3eiqotTei/Es63p328qLKH9qn2XnoKWc845x/WguPnmm6OfqbCc+OlE9qSTToprQJXIx9OsCrXo+Om34/PPP3fvz8aNG7vjrQI9vVeDep9m9f7VcVRt1QVgBa5BXJCIfW31Xa/XX58rZUl1/Pe/GxT0qXCTguesniO/H0+RXAj4kG3K4BUqVOg/B7hrYLkKDCRCsZbp06e7L1AFfCrIoZ915VFX9OXOO+90X6IqfhF0AQTtLwXJ6moW24Ur9gvTP8irW5eyabGFZ+JVPUzFbdT9acmSJS7rpJPmyy67LPoYnUxPmTLFXT2PZ5dDVYpT1q5r166ufTrh05e9uhyrnSrWoS5Syk6K2qauUeqKpIsC8ZTRSZDekzqZV/c3f1B+7MmHTvp1VTroqnIqeKET+AsvvNBVEfSrW6YPPmJf83gEJnrNddKuE6OPP/7YFedQsD979mz3flR33TZt2rj3p6iCoDJq6nas+/32xqOtGb2G2q4+WzoRVZv/60Q5bCd827Zti+v2wnA8Td8OvQeUJdP3kZ+RWrVqlctGqrun3hfxfJ/mhParXmNl0/x9HFTGVN896h6pCp36nKsnh7Jj+g7wu0sr+NP5jC5gBilRj6dIPgR8yHGVQ40pUje9jE5edPDSiZWfiQiyu5ECOFVe9LvCqduJrpKqm0Rsu5QFUjep9FnKvJTZftGXu076VdY6NpuT0UlKt27dXD//eAUAes2VFVP3Er8NGzdudF+uuqp7ySWXBH5iqpN5nfArm6MuMLHdIBUAKsuroMCvbKkTkuuuuy6wLkfLli1zgYm6wPpf4np/KujzA5OMuiUHFfTF7idlyrU/dZKiv8Gn92TsdCzxbKsyuMqQatF+VBbfpxM9XeXXlBxTp07N8Pfj8T6Iff11TFLFRT/DrPejxpMpcz5//vzAX++wCOPxVHTs1IUKHUdF29Z7Vheu/CkNwh7sB5HhU0ZM419jqaup9uttt90W3YfqNhvkvkz04ymSCwEf/lP6A6LmsMks6FOXBJU/jp0/Jii//PKLO8DrQKovfmVylOnzafC+L6sv17z8AlRmSSefCkD8Utu6Mn3LLbd4Bx10UJoT1tgDvR4fW4ggr+kKuLrHKWMWO/ZBdLKiogLqdqQvrCDE7tMXX3zRdR/TF2f68VAK+tQ1TpkUvziCLx5f/LGvobIPGkumLme6+qwxWrqqrzFlClbVhTN9We70zxE0BSjanyrBrhLyxxxzjAtY4n0SFZtJ0BVyZW5V1lzBfCwFfdrPKtQRRCn+2NdOvQuUzVEBIWXI1RtBdLzSGCON01PxnkR6vRNRGI+noos8Opbq86/sjn8RQhd+1FZdsAiT2P2pY2uQny/NrXrBBRe4n2OPReoloYu++j5LxAA6UY6nSE4EfMjSX3/9Ff1ZB3BV3xJVOIwN+vwDrcbI6Ms1EWgQvMaQqd3q2qVgzz8xVNfEjh07ugAgqC9FZRbVHVJt0xe/MlP+ld6ff/7Zu/XWW93BPv2ccb68nNQ+o5NMzQumL0yNh1LbYilTocnXld2Nd1bXb2vsl6KyYzqp14mzMiWxFBBojIkf/AdxQq2uYwpINbZI78k+ffq4am0aW+KPK1LQp5PQeL9H94Wq2iqIVpYk3mN3YjPz6naurrHq0qnXWJnn2Imq/YtS6vZ78cUXe0HR50djdnShRMdLjc3TxRRdTBPtQ/VO0DjDIOcFTXRhOZ5mRYGeqhjrAo8umD399NNuWIQuBoTltY99HZ599ll33NL72R86kVcyO8aom7aKnaQfE6d9q+NUvLsXh+l4iuRFwIdM6WRU/eB1oq8xEBpbEvulqAN7+kyfKEMRzxNpHQgz6vqkA6VO6jVuL30hDH3pa+xeUMVadPVOY0b8rhv+wG11N/O/JHWSMmDAAHelMvbvyuurfbFfLDphUsbBL3Sj94K6mBx88MHu31i6oh7vKmex29HrHZvpUXZMJ/06uVe1tlia8iCIL1DtHwVzej/6V8CVDdeYVwUrou59eo11UqJxp4k8XiP2fakLLP7teI3dSV/WXFk9/xil45eKMujEP/0YTXWlDeoESu1TaXU/o+e3R2OJVOzIH8OjfahMBVf3w308TU9BnCrHqgeKT59xZXNVsVEXqtSlW+1P3wshLMGexsZlVIAkN8V+fjV+Wxd5Ygt2qUK0snm6YKYqzTruavy+1idq1jzo4ymSGwEfMqXATVcZFfTFlgxOf4DPakxfXlJXndgDoUpXq6S1ukfpBEqU3VFVM82v9cQTT7ggQFdTNZBfJ/3xEnvSri96BSH+GC1VFdQJv7qfaeyOvvD9K9MahxjPcTyxX6KaT1Eny7rSqCuOfrdXtd8P+pTxSy+egb5PVSzVjU9f6Cp44F/B1eutdQqw0gd96Z8jHu30TzDVVUcnJ2+88YZ77TWWRxRYqzhD+gAl0YO+jF7zeOzb/yprrmOC3hd6H6uAQ1BtjKWiFmqrCknEUtsVoGTUFY6gL5zH0/SUzVU3br3+Gg8d28VUFJToe03jodXNO5E/95kFe8pUpp+PNS9pqIGyujpPUVVOHe/V40SLepxoX6tSr94Lypr75wyJHPQFdTxFciPgQ5YHFn1B6SCuq6c6UclI+u6d8TiYqnhM0aJFXQVGUbctfdHo6p0O/rrSr3aJrvyp+5bW+ZOr+pPsxoMqLqrs94YNG6LrdHVf4woVkKrri3/C7+9vfXHpC8sX7y8nZUDVnUxlwpVl0n5T4Ox3n9NJloI9dUtLn+GNN7VV3SPVZUsBv8aQ6qTK7w710ksvuUmq1f4gu0hpzKiyoLqQopM5BdIKVPzX3s/+KDjxu/aFQdBjd/6rrLmODarOp/ewX6EvXpQR9/ePCgipR4FO6pV51vs19jMumgz6qquuimsbwyaMx1NRRVgFJboIoWkXlGnUeza2i2nsd6/fxkQN+jIL9vI6sxfroYcecvtQVUx1wUQXyjSMQ8d6v32qyvy///3PnTMkaoXTRDqeInkR8CFT+lLSGD51NVHXKH2R+lfu0n9h+oVc8vrgFPuFqMIcCkR1kFd3LmVGfKokqS6o6rMf20VCg7X9wfx5TftIVcD0RahF4wpiT1L8ybZ1FdIvwa2/QbfVvSuoq/rKiulqqLrJ+OMydcVcle40Js6fwFxdpBTsBZl9UCZXAb5fdVN0NV8n9+qyG3tCoomfg+rGqc+S3gNqh+iCg4oIKejzu3Gq65k+ZzrpD0tGJ6ixO9kpax67D1XOXp+peL7+6m6q+b90wUldCfWzf9FMF0t0QUXTQfiBiI5NqiCrzDqS53gq+g5VN15/PkBd/ClRooS7QFmvXj3v9ttvjz42ttdKomahYj9HQQV7oiEb+mzF0veSvqtUnCkjiXxsTYTjKZIXAR8ypMBNJx8+XZVW5kEHH33p+tQlxQ8AdAVNB6jJkyfn6V7VAV1X8HSip4pWOnFWW1V2OZYKYSjo04HT79ITBBVkULEABcQqER5bIUzdS/WFry8hXXXUhPWq3hfkl5O6RGnSWv/qqDKj48ePd1emVT1M+9ov3hPvdiqz6I8RFZ1g6Kr50qVL05wsacyGTv5VFCMRunGKTup09dnvSqxqoiosoMBUExar0IQC1bBMrBvU2J2cljVXcBV7whTP/apgQ93HNc4wfZdinajqgpVO+tUNWd18daElkbMPiSBsx1NRW1TUSt9DukCp7yUNMdDFR73uen/oPRA2/nd+EMGeqOeGlvSvr4pi6ZiaPoOeyBLheIrkRsCHDCkzpkBKY6H8k1d9OWlMnwZCq8uMxsXpBCX2S1RdJ5Rd86+w5gV13VHg6Xd9UtcYHRwfeeSRvb7QNRaiSJEirhJavK+Wqi3adwqKVXFRXZHUTp2kKIAWZSPU/VB/j072NS7OP+EL8uquAjq9hpqf0B+npy/P1q1bu660559/flzbqO2oO55/QcEP+tQmneDryn7sftf+1f6MnYcvCBqjp32pNilY1XgyBX5++3XxQl29NCYyNlua6Cf9iTB2J7tlzfWZiueJvvaNvz11ida+UXuU1fHn2/PpNVeQoveFMtD+657owX4Qwng8VWY/tv3+hUi137+wo0BPF3v0XRWmcVoag6r9r8x1XgclmX0edOzRa5x+Xk0F0/quCvJCbzIcT5FcCPiQ4RehrkYr4PMLdcSeiKhIgq6eqftZRiWDczvYS98+tUHdeXSgV3cpv3unrppqvE76LwcFhpqGISg//fSTGziuboevvfZa9CRFGQntV52k6IRfV6KDPuGL3dfqfqb3gN8NSYGLXntdoQ7qxKRv375uXz7zzDPRK/saC6XuUvqS9+l9qXXKTAZF70W91sqM64qzgjxdSNG8a35RoYwk+sl+IozdSdSy5uknVVeXePWA8MdrqXu0Pvfpj2l+FdwwBPtBC8vxVD0i1Db9G0vBnSYGF703lQVW1954VzjOjYtZGoOs7qp5KfazorF66smjwjZ6bTWmXFlcff/7PYs0v6FuK9ufqF1iw3Y8RXIg4EOUTo5vuukmV+RCX1IaG6WDekbUZS4eg8r9L7/0fdh15U4Vt1SAxadCCApQMgr64tnF5bbbbnPdImMpQPXn/dKXe+xJSqKe8OuEVYG9srr6ktW+1uK/JvFsZ+y2dEVcXSEV9PlX0TWWQ3OuKbusfe1nn+N58pz+5ELjytQGzfeoYjLKOCij1759+zTv20R5vcM0dicRy5rH7hsVFenZs2eak2GdgOoEWYGK/77U2LLY7tFhOEGNp7AeTzWPngqK+GOw/OJieo8oENWxQJlojddt3rx5tI1he/3zssukxuApK+7TMAPNAavveHWBVaZUF87Uc0Ljd1WsSxd9dcyN7RqfyAF0oh9PkVwI+OC+ZFQdsHv37q4bhA6YGvSsg4/GSqj73pgxY1xmR3OGxYrHF5Su6Krbnsa66IqeX3L/888/dyf+saWtdSKtEsw6QYh3N65vv/02WlBAXziqxKhASVcc1TVDY0v8sWYqcqMD+9VXXx23IjL78mWkblMK+hRMKaAO8ks0fdCnrqV+UR5VQ1SGT/tdYzp0BT2osXD6svZPPFUpVicpftZB3c3OOuss181YJ4RhFfTYnUQua64y8TpeqcKeup+nH3+mMbHKSuuihLK9ZPSS63iqwNMvyKPAVNMvaByn/92piz4K+jSVhIKWsIzZjSftC33faD+q+JGKLukCtKZW0dh9XexTd22dEyjbpy69qsCsYR0qOhaWrvFhOJ4ieRDw5VNZnfzoIKkunSomoe4SutKmg6+6JGmsSbyvQqo7pk6adUBUtknBpz/flq6Qt2nTJlqZTzTvkrp7+oFhPOlkT+1UFz5149IXkgJoZR1V7ERBtT9+S1eB1e0ska/q6n2i/ajunf57JshJYDML+mLfz7E/x/sLX9kbXTTRib3fHVongBqf5d+vMVt6jygbGUbxHLsTtrLmmh5AF8tix96o67G6ovmuuOIKNy+kjq2c7CfX8VTTbqgwU+y0Knr9e/To4YK+9IXFfGEJTOJJ+0TdXVUzQBd11UsilrpH6wLasGHDMvz9sATQiX48RfIg4MuHYk+I9QWkAiw66KSfiFxXSzVOL7Ybp/+7ef2l6m/H/yLUXHAa6K55lTQ+R8Gorphq3qXGjRu7wiKxB3h184in2G3rhE5X+FWFUVcd1VVW7dWVfU2s7BcYiJXIQV9Gguwmk1n3zvRX9oPYp3ptldVTplnVQ3XipwsUupLvd+9T92RdoAjrSV68xu6Esay5irAoGyXKPulEtUGDBu5imbrwZdQVLqzvg7wU1uOpsk26OOkHdv5xUt2LFahqPkC/m2K8vkvDTJ8N9TDyx0KnL3qkTLkq3QZxcTc/HE+RXAj48vnVU3Up0pgidTvRlTR1mfCpy1nsXGbxONn3v/xiS22LunRoILa6aqoojNqmL9YHHnjArdfPftYvEU5SNKZM4wz8Lodr1qxx4yL9NoblSz5RJ4FNH/Tp6qhOCIPct+k/F/p86bOlwiE6OVXBm/TCerKfiOXOE6GsuU7edGKvHhHqcqhxfCrHr/UFCxb0Pvroo0zbjOQ4nqqrrnrC+MGJ2qZMrrL+DRs2dJk+detE9mg/qliXjvH6/o99rVUVXMW50s/HGDaJeDxF8iHgy6f0xakAz5+vaty4ce4ESQdQnyaH1pXVeFe4VBEDXZVXV7jYTN2IESNce5RpFGVJNHBfX65qu8ZEBN2NI/2Vab/LYWwxgUQeRJ5I2ZKc7mtlUTQWNZHoBEVXoXUFV/vPLzSD5CxrrotRyurq/ahMr3+sUnEJXTzTuDQk5/HUfw/qM67upuq67VPAp6I9+s5SoRaN4w3rxZ4gp1vRMV5TMGiIgbKmGtepJZECfiBREfDlE+lLPqvilSb+9cedlCpVynv88cej2TUFWgr4VAAj3l+oulqnLpqau0pXymPnUtN4Qi3+/DoawK+spIK+2AnhEy37pJPQvJybMBmzJTnd1+ra608YH/QJQPppSlTwSCd5QV+QSEaJUtY89jX326TXWxdIdDFKRSYSJTgJk7AdTzWmUGMOlXlS0TMFfup6qvFmCvJ0kVJjOJHzoE+9JPT5VjdOjYHWFBdhqMYJJIKI/mdIaqmpqVagQAH3865du6xIkSI2ePBgK1GihB1yyCF24YUX2v3332+XX365LgDYc889Z7///rvdcMMN7nYkEknzHPGyZMkSGzp0qC1evNhq1qxpEyZMsK+//treeustu+iii6xTp07Rx/rtTBQpKSlWsGBB9/PVV19tDz/8sL399tvWuXNnS3Sx+1LvhR49elihQoVs/vz51qpVK0sk/uFL788LLrjAmjdvbrfddpslkvTvzd27d1vhwoUDbVOyiD0uxb5XP//8c2vdunWgbdu+fbu98MILrl3r16+3zz77zL3uQRxLwy4sx1P/s75z5053vHzmmWfs77//tkqVKtnYsWOtaNGi7vu2VKlS9sgjj7i/KZG+txKdzl8GDhxojz76qM2ZM8cOP/xwt//27NnjPvcAshB0xIm871KmgeQyePBgV4hFVEhC3SM1LkJFW2Kza6qEqeIoiUAlmN98801X+VBlmTVPoK6WakxHmK5M33///aHowpMo2ZKcttWfi0sFMhKly6kvoyvPXI1O/rLmGnukypGqIuh/9oOscBt2YTmeZta7QBlJVZXWeF4Vn8G+f66GDBkSfT/QawLIHjJ8SapPnz42fPhw69Kli7s6esQRR9jLL79sn376qcuCiLJkr7zyir3xxhtWt25dd+VZV0//+usvdzU60a6Y6cre0qVL7ZtvvrE1a9bY448/bpdccomF5cp0rES8yp/I2ZLsZCG1n3VVPZHaGttOfdY2bdrkPpvIPcqinX/++VasWDF31T8RXn//dVdGQlm92CxOIn72wyJMx1O/94Fee/WYUVbyzTfftCeeeMLatGkTdPOSSiK+/kCiSawzeuSKs846yxYtWmS1a9e2b7/91qpWrepOinXCqWDP7/7w5JNP2saNG90JqE5EDz74YHdyMm/ePHd/Zl+uQZ08PfjggzZ79mybOXOm6w5z5JFHWqKL/RKKPeFPxC8nv03/+9//QhfsJWKX04zaqa7J3bp1swMOOCDo5iUNdU0vV66cvfvuuwnzXtXrruOnus/LSy+9ZFu2bEnYz35YhOl4Kv7nX8MS9PrreKpundj/wE4XsDdv3pzQrz+QULKZCURI/PTTT26qBX8ichU8UYVAFbTQ/FCqbpW+28mcOXPcJKaa0y79/HeJ2k0mDGWME73KZRgngQ1Ll9OwFL1JFol2PAjjZz/RhWmfxrZVVUXV1tq1a0eLjSG5X38gEdGlM8noKuLZZ59txYsXtwoVKrgM39SpU12Wr0OHDi67pyujDRo0iP7Ojh07XHcoH90j8k+Xw/TU5ahnz54uW5Jo3Y7C0uU0LBlI5I2wfvYTWZj2aZjaGhbsUyAXBB1xIm8KtejKlzIK77zzTnT9H3/84YqfKNO3ZMkSN4hcZY41ADoRStkni0SZEyxZsiVhKNARtgwk8kbYP/uJKEz7NExtDQv2KZA7CPiS8MA4fvx4N4ed5gHSZKWrV69OE/S1adPGTWCqeYHU1ZPKcbn/Gggn/Pmry2ls5U1e+/yHz37+3qdhamtYsE+B3MNI1yQQWw1MjjnmGPvyyy/t+uuvt7Vr19qAAQNcVUupXLmyLViwwM1Vdumll7ounyrUoq6e2P8uhxl15Uu0LofJUKDj448/Trh9GpaiN8h9fPbz9z4NU1vDgn0K5C7G8IVc7Lim3377zX3JaDxe2bJl3TpV4nzqqaesYsWKNn78eKtevfpez5Eo1TiThU74u3fvzhd+HlFltjJlylgiSsQpAhA/fPbz9z4NU1vDgn0K5A4yfCHP7PnB3rBhw9x0DC1btnT/Tpgwwa3v27ev9erVy9avX+8yfb/++utez0Owl7sn/PrC1wk/X/h5I1GDvUTPQCJv8dnP3/s0TG0NC/YpkHuYhy/EFav8LiR33HGHPfTQQ/bYY4+5wG7ZsmUuuNMcRYMHD3ZBn4K6++67z81fd/fddwf9JyStRJwTDPFz6qmn2sqVKxM6KEXe4LOfv/dpmNoaFuxTIPfQpTPkXTk1cbomclYp/Ysvvjja5W3KlCl2880329NPP21nnnmmWz9jxgzr3LkzGb183OUQQN7hs5+/92mY2hoW7FMgd9ClM0T69etnbdu2dT/7XTlVbOWbb75xB0WfvnAuuugi69ixo5v7xy/q0qVLFxfsacwe8g5f+ED+xGc/f+/TMLU1LNinQO4g4AuRCy64wP744w8XuPk0ubq6kc2bN891JfOVL1/eHSh//vnnaNdPH2P2AAAAgPyBgC9EjjvuOJs6daotXbrUBXmiYE7TMHz99dc2adIkW7FihVu/detWNxVDvXr1Am41AAAAgKAwhi8EYqdNmDlzpi1evNiNzzv77LNdFSsZNWqUm35B5aBr167tMoEK+vRYrQMAAACQ/xDwhciNN95o06dPd9MuKJD79NNPrUOHDvbOO++4+/XvkiVL7KuvvrIDDzzQhgwZ4oI9jfMj6AMAAADyHwK+kNC8Phq79+KLL7qunTt37rQPPvjAVeZs1aqVvf322xn+HsEeAAAAkH8xhi8kNP2CsnSaWF2KFi1qnTp1sgcffNBl9nr06JHh75HZAwAAAPIvAr4E5E+jEKt58+buX3Xp9BUuXNgOP/xwq1Wrlj333HM2cODAuLYTAAAAQGIj4EvACdX9aRT++ecf2717t/tZUyyoS6eKtLz22mvRxxcvXtyOPPJIN55PhVsAAAAAwMcYvgR1991322effWZ//fWXDRs2zE488UT77rvvXOGWv//+2wV5hx12mE2YMMFV8fzwww/dZOyxFT0BAAAA5G9k+BIos+cbPXq0WzRer2zZstatWzc3Vq9p06Y2ZswYO+WUU+zll1+2++67zwV377//vgv29BwEewAAAAB8ZPgSzPLly238+PFuYvXjjz/erbv99ttt3Lhx7t+rrrrKFWzZtWuXbdmyxcqXL++6gFKNEwAAAEB6zMidQN544w07/fTTrXr16ta1a9fo+jvuuMP9O2LECJfBO//8861KlSpWoUIFt16ZPapxAgAAAEiPLp0JREFe//79bc2aNW7ydBVtiQ36rrnmGleJc/bs2Wl+T905AQAAACA9unQGRFm52EAt9rYmU582bZpNnjzZZfzUhdP3xBNPWO/evcnoAQAAAPhPBHwBiA3uJk6caJ9//rmrrtmoUSMbMmSIW9+3b18X9D355JOuaEts0CeM2QMAAADwXxjDFwA/2Bs8eLA9/fTT1qtXLytRooTdcsst9uOPP9qkSZNcoKfHXXrppbZ9+3a76KKL3ETr0ReuEC8dAAAAgKwx+CuOPM+L/jxnzhw3tcJLL71k99xzj7Vo0cJNoq659XzK/qlS53PPPZcm2AMAAACA7CDgiwNNiq4umJo+wZ9vT4VZDjjgAOvQoYNNnz7devToYQ888IBdfvnlbroFVeyUV155xd599914NBMAAABAkiHgy2PPPvus3XXXXS5Lp3F6fndOTb1QrVo1e/zxx61nz542atQou+yyy9x9CxYscJm/n3766d8X6f8mVQcAAACAnCDgy2Oqsqlumm+//bY988wzLuiTcuXKufF6yuhpQnU/2NNUDAr+1P3zwAMP/P8vFFMvAAAAAMghqnTGoRrntm3b7M4773QZu1NOOcUVYFHRFXXbPPPMM61Pnz6ua6cmUh8zZoz9+eeftnDhQvcYBX7qCgoAAAAAOUXAl0f8QM2fPmHr1q0u6Pv555/tpJNOcmP2VIjlhRdecN06Fy9ebAcffLBVrVrVnn/+eXefsoEFCxbMqyYCAAAASHIEfHlswIABLpC74oorXNCn8XzLly9PE/Rt3LjRdeVUlc6yZcumCRQBAAAAYF8xhi8PffXVV67CZo0aNdztUqVKubn2NDZv5syZrqCLAjuN51MBF/2rYE/ZQYI9AAAAAPuLgC8P+BU1582b56ZeaNasWXR9bNA3Y8YMF/T5hVz832PMHgAAAIDcQMCXB1SoRfPs3XHHHXbuueda/fr1o+vFD/oaNGjggj6/eqfuj52cHQAAAAD2BwFfLvOzdO+88461aNHCLrzwwuh9q1atcgHeo48+6oK+W2+9Ndq9c8qUKS7oI7sHAAAAILcQ8OUyP4v38MMPW+vWra1KlSq2ZcsWGz16tPXu3duuvPJKW7Zsme3cudNKlizpMn01a9a0WbNm2WeffZbbzQEAAACQj1GlMw9MnjzZ7r33Xps9e7bL5i1YsMC+/PJLGzp0qLVr184OOeSQvebp08Tsxx9/vBvzBwAAAAC5gYAvl6lb5sUXX2yvv/66lS5d2mrXrm3du3e3s846yypXrrzXPH1+0McE6wAAAAByGxO95bJdu3a5f7t06eIydgr+fLETqftj9fwuoIzdAwAAAJDbyPDlAU2irkCuaNGi7rafxQMAAACAeCLgy2N01QQAAAAQFNJOeYyumgAAAACCQsAHAAAAAEmKgA8AAAAAkhQBHwAAAAAkKQI+AAAAAEhSBHwAAAAAkKQI+AAAAAAgSRHwAQAAAECSIuADAAAAgCRFwAcACEyPHj3s7rvvDt0rEIlE7NVXX3U/r1y50t1evHhxnm93zpw51rx5cytcuLB169bN4u3888+30aNHx327AIB9R8AHAPlc79699woeXnrpJStWrFientx/9dVXNmPGDBswYEC2f2f27NkuuNq4cWOa9R07drRrr73WglCrVi37/fffrVmzZnm+rUGDBlmrVq1sxYoVNmXKFIu3W2+91e666y7btGlT3LcNANg3BHwAgDSeeOIJ6969uz366KN23XXX5dneeeihh+ycc86xUqVKhfoVKFiwoFWtWtUKFSqU59tavny5HXfccVazZk0rV67cXvd7nmd79uzJs+0rqD3wwAPt2WefzbNtAAByFwEfACDqvvvus6uvvtqmTp1qffr0ia5/7bXX7JBDDnFZv/r169vw4cOjgcXFF19sp556apq9uHv3bqtcubI9+eSTGe7dlJQUl0Xs2rVrmvXPPPOMtWnTxkqXLu2CqAsvvNDWrVsX7Tp57LHHup8POOAAl+lTdlLLRx99ZGPHjnXrtOix8u2339rJJ5/sgsoqVaq4LqR//fVXmsygMow33nijlS9f3m1z2LBhadr0448/2tFHH+3+9iZNmth7772X5v70XTr9LOSsWbPc31KiRAlr3769LVu2LM3v3XnnnW4f6W+95JJL7KabbnLZu4z42/j777/d/tbPyvD523r77bft0EMPtaJFi9qnn35qO3fudH+Xnl/tPvLII23BggXR5/N/75133rHWrVtb8eLFXSCpfa3nOvjgg61MmTJu/2/fvj1NW/Sa6f0BAAgJDwCQr/Xq1cs7/fTTvRtvvNErVaqU9/7776e5/+OPP/bKlCnjTZkyxVu+fLn37rvvenXr1vWGDRvm7p8zZ45XsGBBb82aNdHfeeWVV7ySJUt6W7ZsyXCbX375paevoLVr16ZZ/+STT3ozZsxw25k3b553xBFHeCeffLK7b8+ePd7LL7/sfm/ZsmXe77//7m3cuNEtely/fv3cOi167IYNG7xKlSp5Q4YM8b7//nu3zRNOOME79thjo9s75phj3N+mv+WHH37wnnrqKS8Sibi/UVJSUrxmzZp5xx9/vLd48WLvo48+8lq3bu3aMH36dPeYFStWuNuLFi1ytz/88EN3u127dt7s2bO97777zjvqqKO89u3bR7f77LPPesWKFfMmTZrk/pbhw4e7drRs2TLD/aW/R3+XHjNmzBj38/bt26PbatGihWvzTz/95P3999/egAEDvOrVq7t9qe3rNT7ggAPcfbFtPPzww71PP/3U7ZsGDRq4/XHiiSe623rdK1So4N1zzz1p2vL22297RYoU8Xbs2JGNdxcAIGgEfACQzykY0Am8AoBZs2btdb+CnbvvvjvNumeeecarVq1a9HaTJk28e++9N3q7a9euXu/evTPdpoIlBYmpqalZtm3BggWuXX7g6AcqCuZiKVC55ppr0qwbMWKEC15irV69Ohow+r935JFHpnlM27ZtvcGDB7uf33nnHa9QoULeb7/9libgyU7AFxs4v/XWW27dP//8424rGLzqqqvSbLdDhw6ZBny+smXLepMnT47e9rf16quvRtdt3brVK1y4sPfcc89F1+3atcsFgPfdd1+mbRw5cqRbp2Dbd9lll3mdO3dO04avvvrKPW7lypVZthUAkBjo0gkAsBYtWljdunVt6NChtnXr1r2Kq9xxxx2uW6S/9OvXzxUq8bv7qUvi5MmT3c9//PGH6xaoroeZ+eeff1z3Q3UrjLVw4ULXZbB27dquq+Mxxxzj1q9atSrHr5La/eGHH6Zp90EHHRQdCxf7t8eqVq1atBvp999/74qyVK9ePXr/EUccka3txz6vnlP851X3zsMOOyzN49Pfzgl1HfXpb1OX2g4dOkTXqaqnnl9/T2ZtVJdXdT9Vl93YdX6bfer+Kem7egIAElPejzAHACS8GjVquDF1GiN30kknuYBNAZcoANSYvTPPPHOv39P4MOnZs6cbgzZv3jybO3eu1atXz4466qhMt1exYkUXMOzatcuKFCni1m3bts06d+7slueee84qVarkAj3d1uNySu1W8HjvvffudZ8fgPnBUCwFoampqba/Yp/XD2xz43kzUrJkyVxpY3b2xfr1692/en3w/9q7X5bIwiiO48/OW/AFTBFNRsHun2iZZJpgEo0aRLBYDIrRqVpkMIyIQcRm0SQIikEsFsGmxSjfs1wZ7+jurBd2Z+5+P2DwygzXy8Dw45znHEnqfVb4JEmhWq3G8JPHx8cIfS8vL3GdYS1UpAYHBzt+KpWfXyMDAwOx2oEqH8NE2ge+fCYbTnJzc/N+7fb2NoaSrK+vR1ikGpevLmXhkKEv+ev5a9z39fV1VC7z991tQGJ4ycPDQ1QzM+fn54U/McPDwx+GqCD/+3cxRZPnwc6+DBU/3p+hM0UxCIcpoYR2SVLvM/BJkt7RvsgER4IWlbXn5+e0urqadnd3o8pHgKItkCmN7GRrR1vnzs5O/L1er//yqVIdIpAxUTJDGydBhXUN9/f36fDwMK2trXWEUqpOR0dH6enp6b39lFB3cXER0yyZwklVan5+PqpRMzMzEXZodWQqJWE0Hw6/MjExkYaGhuL/oUX07OwsraysFP7EMAmVCaY8L6aAMrHz6uqqo8X1Owizc3NzaWlpKR0fH0eopgWXiurs7Gzh9+cZTE1NFX4fSdLfYeCTJH1A9YbQR3Ai9HFmjYB1cnKSRkdH09jYWNra2orwlQ9HtErymvYzb18hINK62R4CqQ7u7+9HJYpK38bGRkfrKcGT9lHOly0sLMT1xcXF2IfH67JWUO6BKhfhjoAyMjISy9nZX5dVJn/7JVmppFarFWcOOQPHPbN4vCj2HC4vL8d9E3xZpM56iaxFtiieXa1WizUUvP/d3V2EXdZZFPH6+poODg4iQEqS+sMPJrf865uQJPU/qm0EMto6Pzvvl0eIorWx2Wx2PQilzCYnJ2MPILsIe9X29nYEYMK/JKk/OLRFklQI7ZNUAzc3N6N6Nj093dXrmPZIq2j7IvT/Be2VjUYjqqFUJvf29tLp6WnHUvdew1AXWm4lSf3DCp8kqRDOzTGVk1ZQWjLHx8d9ol1UN5kgenl5GW2SVDo5E9lNZVSSpD9h4JMkSZKkknJoiyRJkiSVlIFPkiRJkkrKwCdJkiRJJWXgkyRJkqSSMvBJkiRJUkkZ+CRJkiSppAx8kiRJklRSBj5JkiRJKikDnyRJkiSlcnoDbcu6RJdo4W0AAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import matplotlib.pyplot as plt\n", + "\n", + "# Analyze attention patterns for a specific example\n", + "text = \"When Mary and John went to the store, Mary gave a book to\"\n", + "outputs = list(lit_model.predict([{\"text\": text}]))[0]\n", + "\n", + "tokens = outputs['tokens']\n", + "print(f\"Tokens: {tokens}\")\n", + "\n", + "# Get attention for layer 5 (if available)\n", + "layer = min(5, model.cfg.n_layers - 1)\n", + "attn_key = f\"layer_{layer}/attention\"\n", + "\n", + "if attn_key in outputs:\n", + " attn = outputs[attn_key] # [heads, q, k]\n", + " \n", + " # Plot attention for head 0\n", + " fig, ax = plt.subplots(figsize=(10, 8))\n", + " im = ax.imshow(attn[0], cmap='viridis')\n", + " ax.set_xticks(range(len(tokens)))\n", + " ax.set_yticks(range(len(tokens)))\n", + " ax.set_xticklabels(tokens, rotation=45, ha='right')\n", + " ax.set_yticklabels(tokens)\n", + " ax.set_xlabel('Key (attending from)')\n", + " ax.set_ylabel('Query (attending to)')\n", + " ax.set_title(f'Attention Pattern - Layer {layer}, Head 0')\n", + " plt.colorbar(im)\n", + " plt.tight_layout()\n", + " plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "397f772d", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABK4AAAEiCAYAAADQ0Jp7AAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjgsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvwVt1zgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAYG1JREFUeJzt3QmcjfX7//HLOpayJ2vW9uyyRCtfKhWlorKEKN+yViIiS6EQlVIkKbKk9C1lSalE9CVLC0Uk2bPvxf1/vD/f/31+58ycYc6YMTPnvJ6Px2HmnjNn7nOfe73u67o+mTzP8wwAAAAAAABIZzKn9QwAAAAAAAAA4RC4AgAAAAAAQLpE4AoAAAAAAADpEoErAAAAAAAApEsErgAAAAAAAJAuEbgCAAAAAABAukTgCgAAAAAAAOkSgSsAAAAAAACkSwSuAAAAAAAAkC4RuAIAIAplypTJHnnkEYtGGzdudO9vwoQJgWlPP/20mxYt/v3vf9u//vUvS6+07LW89Vn4rrvuOvdA+vfXX39Z7ty57ZNPPknrWQEA4LQIXAEAkE4oEJCUx4IFCywjOXnypE2cONFq1qxpBQoUsHPPPdcuuugia9WqlX377bdpPXvpzoYNG2zcuHH25JNPJvjZ/v377ZlnnrHq1atb3rx5LS4uzkqVKmXNmjWzWbNmWbRbtGiRC1Lu3bs3Sc+///777Zxzzjnlc7777jsX5L388stdMOeCCy6wu+++23755Zck/Q0/aHr++efb4cOHE/y8dOnSdsstt1h6UrBgQXvggQfsqaeeSutZAQDgtLKe/ikAAOBsePvtt0O+V7Bn3rx5CaZfeumlGeoD6dy5s40ePdoaN25s9913n2XNmtXWrl1rn376qZUtW9Zq1ap1xn+jT58+1rNnT4sGo0aNsjJlytj1118fMn3dunXWsGFD+/333+322293gT8FZf744w+XOaPgiNaZli1bpsl8z50796wErvr37+8CUvny5UuR1xw6dKh98803dtddd1nFihVt27Zt9vLLL1vVqlVdYPWKK65I0uvs2LHDXn31VXv00UctI3jooYfsxRdftM8//9xuuOGGtJ4dAAASReAKAIB0okWLFiHf66JZgav40zOS7du32yuvvGLt27e3119/PeRnI0eOtJ07d6bI31EwTI+M7u+//7ZJkya5oEKwf/75xwWrtDy//PJLq1OnTsjP+/Xr5wJHJ06cOOXrHzp0yGUVpYbs2bNbRtS9e3ebPHlyyPwrg61ChQo2ZMgQe+edd5L0OpUrV7bnn3/elXnmzJkzVeZV2YvHjx+3HDlynPFrKQCuoJzKPglcAQDSM0oFAQDIQBR4UEZHyZIlXZnYxRdfbMOGDTPP8077u4MGDbLMmTPbSy+9FJimrKerr77aBTNUwteoUSP78ccfw5Zb/fnnn9akSRP39XnnnWePPfbYaQMlKnvTvMUPtIjKqwoXLhz4fvfu3e41FTDQ38iTJ4/ddNNNtnLlytO+t8R6XCnoUK1aNRdIUJli8+bNXYZSMPVl0gX8Tz/95LKccuXKZcWLF7fnnnsuwesdPXrU/S2VOip4ULRoUbvjjjts/fr1IcEFBeVUeqbnqITswQcftD179pz2fSxcuNB27dpl9evXD5k+ffp0++GHH1xpV7hlKQ0aNHDLK34fKgW6FEzRsi5RooT7mbK2NE3rj5aNSseUcRTcs8qn9UGBDT1Pv6/1SO8xvnA9ro4dO+aCauXLl3frq9bbHj16uOnherLNnDnTfRZ6rpbf7NmzA8/Rcn/88cfd18pI80tnw81zJK666qoEQbcLL7zQ/f2ff/45ya/Tt29fF1hU1lVKbcf+clEwU/Oj52qZ+J+t1hdlNGp7VAaa1jMFtlRKqYy8/Pnzu4eWebh9hPqoffTRR0nafwAAkFYy/q1JAABihC4ub7vtNvviiy+sXbt2LsNjzpw57mJeQaUXXnjhlKV0zz77rL322msu+0lUgti6dWtXfqZyKfXn0UV33bp17fvvv3e9eXwKUOl56lOlC+zPPvvMhg8fbuXKlbOOHTsm+nfVf8kPvCgwoqBQYn777TcXuNDzFJhQEEDze+2117qgUrFixSJaXuoFpUCP+hWpn4+yuxS0u+aaa9z7Cy41U1DpxhtvdEEoPf+9996zJ554wgXR/GCQloHK8ebPn+8CYF26dLEDBw64rDgFlbQsRMEDBRbatGnjggoK3qn0TH9TJWnZsmU7ZSmcAhJVqlQJma7ggiQn+04BKgU2FFhRwMTv66S/pfehYJSCP/rsFXjSsvY/J5XNKZinjC+VYirAqcy5pGQUKbil9VXBlQ4dOrgMn9WrV7v1VP2j9FkH0/Pef/99N78KoqqMrWnTprZp0yYXWNNno99799133WsUKlTI/Z7eW2psa1r/FCxKKgWAFeBTwFPbRGLLKNLtWKV806ZNcwEsvWdtlytWrHA/69SpkxUpUsSVTypDU5+N1mt9turVpW1eZaTKBFNAUMGsYArq6u8pOJnUkkgAAM46DwAApEsPP/yw0iAC38+cOdN9P2jQoJDn3XnnnV6mTJm8devWBabpefp9efTRR73MmTN7EyZMCPz8wIEDXr58+bz27duHvNa2bdu8vHnzhkxv3bq1e70BAwaEPLdKlSpetWrVTvs+WrVq5X4/f/783u233+4NGzbM+/nnnxM87+jRo96JEydCpm3YsMGLi4sL+duaptd78803A9P69esXsqw2btzoZcmSxXvmmWdCXm/16tVe1qxZQ6Zfe+217ncnTpwYmHbs2DGvSJEiXtOmTQPTxo8f7543YsSIBPN+8uRJ9//XX3/tnjNp0qSQn8+ePTvs9PhatGjhFSxYMMF0LWt9XvEdPHjQ27lzZ+Cxb9++wM+0fPQ369at6/3zzz8hv3f48OEEr7V48eIEy6Fr165u2pIlSwLTduzY4dYRTddnEbwc9fC9/fbbbr3TMgk2ZswY97vffPNNYJq+z549e8g6vHLlSjf9pZdeCkx7/vnnE/zdU9G6mzt3bi9Smnf9nTfeeOO0z/XXPS3/L7/8MsE6UqpUKa9Ro0bJ3o61DH/88ceQ5/qfbcOGDQPrntSuXdu9xkMPPRSYps++RIkSIZ+Nb9GiRe51pk6dmqTlAgBAWqBUEACADEKZE1myZHFZPMFUcqRrXJX9BdM0ZWmo2bdK5pRd5VOWkMqJ7rnnHlea5j/0+sqqUjZIfPH7LinDRFlSp/Pmm2+6jCNlUX3wwQeuHFDZN/Xq1XMZJj6VQamU0c9u+uuvv1zJoMqoli9fHsGSMpe5o4wfZU8Fvz9lp6gMLP77098JzmZS6ViNGjVC3t+MGTNcxouyXOLzyxSVWabR/lSCFfx3ldmivxFuuQbTe1ZpV7jRBMONjte7d2+XceQ/7r333gTPUYadPtdgwdlA6qulv6tyPmXrBC9rrXNqnq9l4dPfUZP909Gy0Od8ySWXhCwLv59S/GWh8kg/a03UKF3loklZx1LSmjVr7OGHH7batWuHbDNJoWw+Zagp6+rIkSMpsh0r4/Cyyy4L+1rK2AoukdW2q9fQdJ/+lkahDLcc/XVNnwsAAOkVpYIAAGQQ6kukcjmVUYUbZVA/D6YR5g4ePOhKwBSgCvbrr7+6/xNryqyAQTD1aopfkqWL3qT0bVIwSoEAPRQgUbncmDFj3AW6StW+/vpr9zwFmhRkUzN3ldcF989SqVgk9P50Aa8gVTjxy/VULhe/R5be36pVqwLfq4+VgminagKvv7tv376Q3l3xR547nXD9hvSZa9nFp7I6lS+eqoxQAcP4FFQZPHiwCyoqeBj8NzX/Pq1TCobEp+VwOloW6hGVWClf/GWh0rb4krqOpRSVRqrPm4KPKheNH/BLCvXiUrBJ63i3bt3OeDsO9/kltsw036LeWfGnh1uO/ucerj8cAADpBYErAACilJp4qxeOsp2UeaTm5D6/ubb6XCkLKb74wZnkXMCHowCU+vvooX5KahyuC3X1wlI/HvWkatu2rQ0cONDNr4JeXbt2DdsM/FT0fF2MKzgWbt7jZy8l9v4ibVqtv6uglZpph3O6fkxaPuECDMpa0mepIJMax/vUJF4PSWykuXC9lpQ1pqCVlq0yixTY0PJSIDHSZZ0YvY56hI0YMSLsz+MHV1LqM0guBezUz0yZiAqmRtpTLTjrSuu2sq7iZykmx6n6iSW2zMJND7cc/XXN7xcGAEB6ROAKAIAMQsEdNUVXQ/DgbA2VNvk/D6bSL1086yJajcfVVNz/Pb8kS0GW+CPYnS0qX1LgauvWrW7eleGiMqs33ngj5HkKJER6Ya33pwt1Zav4gZ0zpddcsmSJK61LrMG6nqPPSEHDpDQwDxegUtBLQRQ/e0aUVTVlyhT3M40Qd6a0rFUGpwb7wSMmalkH0+fiZ+cFW7t27Wn/hpaFRoRUSWhKZfSkVmaQ3vutt97qmr/r80usNC+SrCttdxpc4Ey349SkzMbgbC8AANIjelwBAJBB3Hzzza58ThlUwTQqmC7o/dHvgqlPkHrqqGRLF+Z+3x2NEKhyQGU5KRATn0bgS6nSK41SF9/x48ddIE0ZVQqw+Vki8bNC1CcpuA9WUmkEOr2eRluL/5r6PlzZ3elohDv1Aoq//P3XFGW26TNSxlh8GpkvfmAoPmU/6bWWLVsWMl2vq2CKXlejx51pZlK4Za0RF4PLM/11Tn9v6dKlIetGYhll8edZn93YsWMT/EzroT/CYSQ0qqGcbjlGQu+5WbNmtnjxYre+6TM4UyoVVOBKo3UqKHam23Fq0XqmAGkkoycCAHC2kXEFAEAGocCTMpLUkHvjxo1WqVIlmzt3rn344Yeu5Cu4sXUwNdfWc3TBfOedd9rMmTNd0Eq9r1q2bGlVq1Z1JWIqY9u0aZPNmjXLZQyFC9BEavPmza6xt3ppKfNGZYnqbfTuu++6bBzNt59NpayiAQMGWJs2beyqq66y1atXuwBJ2bJlI/67WhaDBg2yXr16uWXVpEkTl92iDBM1iO/QoYNrEh+JVq1aub5h3bt3d4EcNadX8EXZM+o11bhxYxewePDBB13/KJX2NWjQwGVnKWtJQRH18NJnkJi6deu6ckG9ZnD/Mb2G5lsBRz1HgTn9fQVyFBz6z3/+4z479WdKCi1rlYkqaKGAmII2+pvxe4kpu0vPU8Zely5d3N97/fXXXVZQcP+vcLRuTZs2zZXLqRG71ikFbJRZpOlz5sxxWXeRUJN70TagdVbLRduFH9AKR4FZrQvxqRRVn5uaomv56XV2797tBjIIlljvsNPp16+f215TajtODRqkQfNDjysAQLqWJmMZAgCA03r44YfdUPXBDhw44HXr1s0rVqyYly1bNu/CCy/0nn/+ee/kyZMhz9Pv6feDffjhh17WrFm9Zs2aeSdOnHDTvvjiC69hw4Ze3rx5vRw5cnjlypXz7r//fu+///1v4Pdat27t5c6dO8H89evXL8H8xbd//35v1KhR7m+UKFHCzfO5557r1a5d2xs7dmzIfB89etR79NFHvaJFi3o5c+b06tSp4y1evNi79tpr3cO3YcMG93fffPPN087LjBkzvLp167r51+OSSy5xy2Xt2rWB5+i1L7/88gS/q/ddqlSpkGmHDx/2evfu7ZUpU8a9lyJFinh33nmnt379+pDnvf766161atXc+9D7rVChgtejRw9vy5Yt3ul07tzZK1++fNif7d271xswYIBXpUoV75xzzvGyZ8/ulSxZ0s3DRx99FPJcLR8tk++++y7B6+zZs8dr06aNV6hQIfc6+nzWrFnj3q/ed7BVq1a5ZaT1o3jx4t7AgQO9N954w722Povg5Rj8Ocnx48e9oUOHuuUbFxfn5c+f3y2X/v37e/v27Tvl+irh5kd/X/OROXPmBPMQn35Xzwn30Lruz3diz0nKqbK/7u3cuTPBz/zXbtSoUYptx6f6bBObl3Db8M8//+ye+9lnn532PQIAkJYy6Z+0Dp4BAADgf3777TfX60qN5ZWlBqQGZXd99dVXrlyQjCsAQHpG4AoAACCd6dixo61bt86VcgEpTT3eVO6pkk2VEAMAkJ4RuAIAAAAAAEC6xKiCAAAAAAAASJcIXAEAAAAAACBdInAFAAAAAACAdInAFQAAAAAAANKlrGk9A+nRyZMnbcuWLXbuuecyPDAAAAAAAEAK8zzPDhw4YMWKFbPMmRPPqyJwFYaCViVLlkzpzwQAAAAAAABB/vjjDytRooQlhsBVGMq08hdenjx5El14AAAAAAAAiNz+/ftd0pAfg0kMgaswMmXK5P5X0IrAFQAAAAAAQOrGYNJlc/avvvrKbr31VlfPqBmdOXPmaX9nwYIFVrVqVYuLi7Py5cvbhAkTEjxn9OjRVrp0acuRI4fVrFnTli5dmkrvAAAAAAAAAKklTQNXhw4dskqVKrlAU1Js2LDBGjVqZNdff72tWLHCunbtag888IDNmTMn8JypU6da9+7drV+/frZ8+XL3+g0bNrQdO3ak4jsBAAAAAABASsvkqY17OqCMqw8++MCaNGmS6HOeeOIJmzVrlv3www+Bac2bN7e9e/fa7Nmz3ffKsLryyivt5ZdfDowQqJrJTp06Wc+ePZNcZ5k3b17bt28fpYIAAAAAAAApLKmxl4gyrn7++WeXyXTDDTdYuXLlrGjRolaxYkVr3bq1TZ482Y4dO2apafHixVa/fv2Qacqm0nQ5fvy4LVu2LOQ5GlJR3/vPAQAAAAAAQMaQpMCVSu4U/KlSpYotXLjQZTWpTG/gwIHWokULU9JW7969Xa+qoUOHploAa9u2bXb++eeHTNP3itIdOXLEdu3aZSdOnAj7HP1uYjS/eo3gBwAAAAAAANJWkkYVbNq0qT3++OP23nvvWb58+RJ9nrKaRo0aZcOHD7cnn3zSMorBgwdb//7903o2AKQjpXvOsmi1cUijtJ4FAAAAAEi5wNUvv/xi2bJlO+3zateu7R5///13qiz+IkWK2Pbt20Om6XvVQubMmdOyZMniHuGeo99NTK9evVxDd58yrtQXCwAAAAAAAOm8VDApQaszeX5SKSg2f/78kGnz5s1z0yV79uxWrVq1kOeoObu+958TTlxcnAt+BT8AAAAAAACQATKuXnzxxSS/YOfOnZP83IMHD9q6desC32/YsMFWrFhhBQoUsAsuuMBlQv355582ceJE9/OHHnrIjRbYo0cPa9u2rX3++ec2bdo0N9KgT5lTahZfvXp1q1Gjho0cOdIOHTpkbdq0SfJ8AQAAAAAAIIMErl544YWQ73fu3GmHDx8O9Lvau3ev5cqVywoXLhxR4Oq///2vXX/99YHv/XI9BZ4mTJhgW7dutU2bNgV+XqZMGRek6tatm+ulVaJECRs3bpwbWdDXrFkzN399+/Z1DdkrV65ss2fPTtCwHQAAAAAAAOlbJk9DAkZg8uTJ9sorr9gbb7xhF198sZu2du1aa9++vT344IN23333WUanHld58+a1ffv2UTYIxCiaswMAAABA2sdektTjKthTTz1lL730UiBoJfpaWVl9+vRJ/hwDAAAAAAAAZxK4UvneP//8k2D6iRMnEozmBwAAAAAAAJy1wFW9evVcSeDy5csD05YtW2YdO3a0+vXrJ3tGAAAAAAAAgDMKXI0fP96KFCniRu2Li4tzD43ep+bnapQOAAAAAAAAnLVRBYOdd9559sknn9gvv/xia9ascdMuueQSu+iii1JkhgAAAAAAAIBkBa58pUuXNg1IWK5cOcuaNdkvAwAAAAAAAKRMqeDhw4etXbt2litXLrv88stt06ZNbnqnTp1syJAhkb4cAAAAAAAAkDKBq169etnKlSttwYIFliNHjsB0NWafOnVqpC8HAAAAAAAAhBVxjd/MmTNdgKpWrVqWKVOmwHRlX61fvz7SlwMAAAAAAABSJuNq586dVrhw4QTTDx06FBLIAgAAAAAAAM5q4Kp69eo2a9aswPd+sGrcuHFWu3btM5oZAAAAAAAAINmlgs8++6zddNNN9tNPP9k///xjo0aNcl8vWrTIvvzyy0hfDgAAAAAAAEiZjKu6devaihUrXNCqQoUKNnfuXFc6uHjxYqtWrVqkLwcAAAAAAACkTMaVlCtXzsaOHZucXwUAAAAAAABSJ+PqhhtusP79+yeYvmfPHvczAAAAAAAAIE0yrhYsWGCrV6+277//3iZNmmS5c+d2048fP06PKwAAAAAAAKRdxpV89tlntm3bNqtVq5Zt3Lgx5eYGAAAAAAAAOJPAVdGiRV12lZqzX3nllS4LCwAAAAAAAEjTwFWmTJnc/3FxcTZ58mTr0qWL3XjjjfbKK6+k6IwBAAAAAAAgtkXc48rzvJDv+/TpY5deeqm1bt06JecLAAAAAAAAMS7iwNWGDRusUKFCIdOaNm1qF198sS1btiwl5w0AAAAAAAAxLOLAValSpcJOv+KKK9wDAAAAAAAAOGuBqzvuuMMmTJhgefLkcV+fyvvvv58iMwYAAAAAAIDYlqTAVd68eQNN2fU1AAAAAAAAkC4CV2+++WbYrwEAAAAAAIDUkjnVXhkAAAAAAABI7YyrKlWqBEoFT2f58uVnMj8AAAAAAABA0gNXTZo0sdQ0evRoe/75523btm1WqVIle+mll6xGjRphn3vdddfZl19+mWD6zTffbLNmzXJf33///fbWW2+F/Lxhw4Y2e/bsVHoHAAAAAAAASJPAVb9+/Sy1TJ061bp3725jxoyxmjVr2siRI12Qae3atVa4cOGwoxYeP3488P1ff/3lgl133XVXyPNuvPHGkH5ccXFxqfYeAAAAAAAAEIU9rkaMGGHt27e3Nm3a2GWXXeYCWLly5bLx48eHfX6BAgWsSJEigce8efPc8+MHrhSoCn5e/vz5z9I7AgAAAAAAQJoErk6cOGHDhg1zpXwKCCmQFPyIhDKnli1bZvXr1/+/Gcqc2X2/ePHiJL3GG2+8Yc2bN7fcuXOHTF+wYIHL2Lr44outY8eOLjMrMceOHbP9+/eHPAAAAAAAAJDBAlf9+/d3WVLNmjWzffv2uTK/O+64wwWcnn766Yhea9euXS4Qdv7554dM1/fqd3U6S5cutR9++MEeeOCBBGWCEydOtPnz59vQoUNdT6ybbrrJ/a1wBg8ebHnz5g08SpYsGdH7AAAAAAAAQBr1uAo2adIkGzt2rDVq1MgFqu655x4rV66cVaxY0b799lvr3LmznS3KtqpQoUKCRu7KwPLp55o3zaOysOrVq5fgdXr16uUCcD5lXBG8AgAAAAAAyGAZV8qEUjBIzjnnHJd1JbfccktgVL+kKlSokGXJksW2b98eMl3fqwzxVA4dOmRTpkyxdu3anfbvlC1b1v2tdevWhf25+mHlyZMn5AEAAAAAAIAMFrgqUaKEbd261X2tLKa5c+e6r7/77ruIR+7Lnj27VatWzZX0+U6ePOm+r1279il/d/r06a43VYsWLU77dzZv3ux6XBUtWjSi+QMAAAAAAEAGClzdfvvtgUBTp06d7KmnnrILL7zQWrVqZW3bto14BlSip9LDt956y37++WfXSF3ZVBplUPS6KuULVybYpEkTK1iwYMj0gwcP2uOPP+7KFjdu3OjmtXHjxla+fHlr2LBhxPMHAAAAAACAtBFxj6shQ4YEvlaD9gsuuMCNAKjg1a233hrxDOg1du7caX379nVliJUrV7bZs2cHGrZv2rTJNX4PtnbtWlu4cGEg2yuYSg9XrVrlAmF79+61YsWKWYMGDWzgwIERZ4QBAAAAAAAg7WTyPM9Lw7+fLqk5u0YXVP8u+l0Bsal0z8h69mUkG4c0SutZAAAAABDj9icx9hJxxpVs2bLFZTzt2LHD9aQKdjZHFQQAAAAAAED0ijhwNWHCBHvwwQddY3X1l8qUKVPgZ/qawBUAAAAAAADSJHClZuzqR6WG6fF7TwEAAAAAAAApJeLI0+HDh6158+YErQAAAAAAAJC+Alft2rWz6dOnp87cAAAAAAAAAMktFRw8eLDdcsstNnv2bKtQoYJly5Yt5OcjRoyI9CUBAAAAAACAlAlczZkzxy6++GL3ffzm7AAAAAAAAECaBK6GDx9u48ePt/vvvz9FZgAAAAAAAABIkR5XcXFxVqdOnUh/DQAAAAAAAEjdwFWXLl3spZdeivTXAAAAAAAAgNQtFVy6dKl9/vnn9vHHH9vll1+eoDn7+++/H+lLAgAAAAAAAGceuMqXL5/dcccdkf4aAAAAAAAAkHqBq3/++ceuv/56a9CggRUpUiSyvwQAAAAAAACkVo+rrFmz2kMPPWTHjh2L5NcAAAAAAACA1G/OXqNGDfv+++8j/0sAAAAAAABAava4+ve//22PPvqobd682apVq2a5c+cO+XnFihUjfUkAAAAAAADgzANXzZs3d/937tw5MC1TpkzmeZ77/8SJE5G+JAAAAAAAAHDmgasNGzZE+isAAAAAAABA6geuSpUqFflfQZoq3XNW1H4CG4c0SutZAAAAAAAA6SVwJevXr7eRI0fazz//7L6/7LLLrEuXLlauXLmUnj8AAAAAAADEqIhHFZwzZ44LVC1dutQ1YtdjyZIldvnll9u8efNSZy4BAAAAAAAQcyLOuOrZs6d169bNhgwZkmD6E088Yf/6179Scv4AAAAAAAAQoyLOuFJ5YLt27RJMb9u2rf30008pNV8AAAAAAACIcREHrs477zxbsWJFgumaVrhw4ZSaLwAAAAAAAMS4iEsF27dvbx06dLDffvvNrrrqKjftm2++saFDh1r37t1TYx4BAAAAAAAQgyIOXD311FN27rnn2vDhw61Xr15uWrFixezpp5+2zp07p8Y8AgAAAACARJTuOStql83GIY3SehaQ0UoFM2XK5Jqzb9682fbt2+ce+rpLly7uZ8kxevRoK126tOXIkcNq1qzpRixMzIQJE9zfCX7o94J5nmd9+/a1okWLWs6cOa1+/fr266+/JmveAAAAAAAAkEECV8GUeaXHmZg6daorMezXr58tX77cKlWqZA0bNrQdO3Yk+jt58uSxrVu3Bh6///57yM+fe+45e/HFF23MmDG2ZMkSy507t3vNo0ePntG8AgAAAAAAIB0HrrZv324tW7Z05YFZs2a1LFmyhDwiNWLECNc3q02bNnbZZZe5YFOuXLls/Pjxif6OsqyKFCkSeJx//vkh2VYjR460Pn36WOPGja1ixYo2ceJE27Jli82cOTPi+QMAAAAAAEAG6XF1//3326ZNm1yvK5XiJbc8UI4fP27Lli0L9MqSzJkzu9K+xYsXJ/p7Bw8etFKlStnJkyetatWq9uyzz9rll1/ufrZhwwbbtm2bew1f3rx5XQmiXrN58+bJnl8AAJA4+msAAAAgzQNXCxcutK+//toqV658xn98165dduLEiZCMKdH3a9asCfs7F198scvGUiaV+msNGzbMjW74448/WokSJVzQyn+N+K/p/yy+Y8eOuYdv//79Z/zegIyEi00AAAAAQFSUCpYsWdKV46WV2rVrW6tWrVzg7Nprr7X333/fzjvvPHvttdeS/ZqDBw92WVn+Q+8RAAAAAAAAGSxwpf5RPXv2tI0bN57xHy9UqJDri6W+WcH0vXpXJUW2bNmsSpUqtm7dOve9/3uRvKZKFf0REvX4448/kvmOAAAAAAAAkGaBq2bNmtmCBQusXLlybkTBAgUKhDwikT17dqtWrZrNnz8/ME19q/S9MquSQqWGq1evdv22pEyZMi5AFfyaKv3T6IKJvWZcXJwbqTD4AQAAAAAAgAzW40oZVympe/fu1rp1a6tevbrVqFHDvf6hQ4fcKIOissDixYu7cj4ZMGCA1apVy8qXL2979+61559/3n7//Xd74IEH3M/VLL5r1642aNAgu/DCC10gS43kNQpikyZNUnTeAQAAAAAAkI4CVwoypSRlcO3cudP69u3rmqerd9Xs2bMDzdU1gqFGGvTt2bPH2rdv756bP39+l7G1aNEiu+yyywLP6dGjhwt+dejQwQW36tat614zR44cKTrvAAAAAAAASOPAlYJAuXPnTvKLRvr8Rx55xD3CUVlisBdeeME9TkVZV8rM0gMAAAAAAABR3ONKZXlDhgyxrVu3JvocjTQ4b948u+mmm+zFF19MyXkEAAAAAABADEpSxpWynp588kl7+umnrVKlSq4flXpGqfROpXs//fSTLV682LJmzepG6HvwwQdTf84BAAAAAAAQ1ZIUuLr44ottxowZrt/U9OnT7euvv3Z9pY4cOWKFChWyKlWq2NixY122VZYsWVJ/rgEAADKA0j1nWbTaOKRRWs8CAACIARE1Z7/gggvs0UcfdQ8AAAAAAAAgXY0qCAAAACBlkJUHAEAKNGcHAAAAAAAAzjYCVwAAAAAAAEiXCFwBAAAAAAAgXSJwBQAAAAAAgOgJXH399dfWokULq127tv35559u2ttvv20LFy5M6fkDAAAAAABAjIo4cDVjxgxr2LCh5cyZ077//ns7duyYm75v3z579tlnU2MeAQAAAAAAEIMiDlwNGjTIxowZY2PHjrVs2bIFptepU8eWL1+e0vMHAAAAAACAGBVx4Grt2rV2zTXXJJieN29e27t3b0rNFwAAAAAAAGJc1kh/oUiRIrZu3TorXbp0yHT1typbtmxKzhsAAACiTOmesyxabRzSKK1nAQCAqBNxxlX79u2tS5cutmTJEsuUKZNt2bLFJk2aZI899ph17NgxdeYSAAAAAAAAMSfijKuePXvayZMnrV69enb48GFXNhgXF+cCV506dUqduQQAAAAAAEDMiThwpSyr3r172+OPP+5KBg8ePGiXXXaZnXPOOakzh0AqoEwBAAAAGQXnrgBiWcSBq3379tmJEyesQIECLmDl2717t2XNmtXy5MmT0vMIAAAAAACAGBRxj6vmzZvblClTEkyfNm2a+xkAAAAAAACQJoErNWW//vrrE0y/7rrr3M8AAAAAAACANCkVPHbsmP3zzz8Jpv/999925MiRFJkpAED6Q38NAAAAAOk+46pGjRr2+uuvJ5g+ZswYq1atWkrNFwAAAAAAAGJcxBlXgwYNsvr169vKlSutXr16btr8+fPtu+++s7lz56bGPAIAAAAAACAGRZxxVadOHVu8eLGVLFnSNWT/6KOPrHz58rZq1Sq7+uqrU2cuAQAAAAAAEHMizriSypUr26RJk1J+bgAAAAAAAIAzCVydPHnS1q1bZzt27HBfB7vmmmuS85IAAAAAAADAmQWuvv32W7v33nvt999/N8/zQn6WKVMmO3HiRKQvCQAAAAAAAJx5j6uHHnrIqlevbj/88IPt3r3b9uzZE3jo++QYPXq0lS5d2nLkyGE1a9a0pUuXJvrcsWPHul5a+fPndw81io///Pvvv98F0YIfN954Y7LmDQAAAAAAABkk4+rXX3+19957zzVkTwlTp0617t2725gxY1zQauTIkdawYUNbu3atFS5cOMHzFyxYYPfcc49dddVVLtA1dOhQa9Cggf34449WvHjxwPMUqHrzzTcD38fFxaXI/AIAAAAAACCdBq4UXFJ/q5QKXI0YMcLat29vbdq0cd8rgDVr1iwbP3689ezZM8Hz4zeFHzdunM2YMcPmz59vrVq1CglUFSlSJEXmEQCA+Er3nBW1C2XjkEZpPQsAACSKYzAQWyIOXHXq1MkeffRR27Ztm1WoUMGyZcsW8vOKFSsm+bWOHz9uy5Yts169egWmZc6c2ZX/LV68OEmvcfjwYfv777+tQIECCTKzlLGlcsIbbrjBBg0aZAULFgz7GseOHXMP3/79+5P8HgAAAAAAAJBOAldNmzZ1/7dt2zYwTT2k1Kg90ubsu3btcs8///zzQ6br+zVr1iTpNZ544gkrVqyYC3YFlwnecccdVqZMGVu/fr09+eSTdtNNN7lgWJYsWRK8xuDBg61///5Jnm8AAAAAAACkw8DVhg0bLL0YMmSITZkyxWVXqd+Vr3nz5oGvlRWmLLBy5cq559WrVy/B6yjjS322gjOuSpYseRbeAQAAAAAAAFIscFWqVClLKYUKFXIZUNu3bw+Zru9P159q2LBhLnD12WefnbY8sWzZsu5vqTdXuMCV+mHRvB0AAAAAACB9yZycX3r77betTp06rkTv999/d9M0GuCHH34Y0etkz57dqlWr5hqr+06ePOm+r127dqK/99xzz9nAgQNt9uzZVr169dP+nc2bN9tff/1lRYsWjWj+AAAAAAAAkIEyrl599VXr27evde3a1Z555plAT6t8+fK54FXjxo0jej2V6LVu3doFoGrUqOFe49ChQ4FRBjVSYPHixV0fKhk6dKj7+5MnT7bSpUu7JvFyzjnnuMfBgwddvyr14lLWlnpc9ejRw42C2LBhw0jfLgAAAICziBHjAABnlHH10ksv2dixY613794hjc4VeFq9enWkL2fNmjVzZX8KRlWuXNlWrFjhMqn8hu2bNm2yrVu3hgTONBrhnXfe6TKo/IdeQzRPq1atsttuu80uuugia9euncvq+vrrrykHBAAAAAAAiPbm7FWqVEkwXT2ilCmVHI888oh7hKOG6sE2btx4ytfKmTOnzZkzJ1nzAQAAAAAAgAyccVWmTBmXFRWfsqQuvfTSlJovAAAAAAAAxLiIM67Uk+rhhx+2o0ePmud5tnTpUnv33XddD6px48alzlwCAAAAAAAg5kQcuHrggQdcOV6fPn3s8OHDdu+997rRBUeNGmXNmzdPnbkEAAAAAABAzIk4cCX33XefeyhwpVH8ChcunPJzBgAAAAAAgJiWrMCVL1euXO4BAAAAAAAApEngqmrVqjZ//nzLnz+/G1EwU6ZMiT53+fLlKTl/AAAAAAAAiFFJClw1btzY4uLi3NdNmjRJ7XkCAAAAAAAAkha46tevX9ivAQAAAAAAgNSSOdVeGQAAAAAAAEjtjCv1tjpVX6tgu3fvPpP5AQAAAAAAAJIeuBo5cmRSngYAAAAAAACc3cBV69atU+4vAgAAAAAAACkVuErM0aNH7fjx4yHT8uTJcyYvCQAAAAAAACSvOfuhQ4fskUcescKFC1vu3Lld/6vgBwAAAAAAAJAmgasePXrY559/bq+++qrFxcXZuHHjrH///lasWDGbOHFiiswUAAAAAAAAEHGp4EcffeQCVNddd521adPGrr76aitfvryVKlXKJk2aZPfddx9LFQAAAAAAAGc/42r37t1WtmzZQD8rfS9169a1r7766sznCAAAAAAAAEhO4EpBqw0bNrivL7nkEps2bVogEytfvnwsVAAAAAAAAKRN4ErlgStXrnRf9+zZ00aPHm05cuSwbt262eOPP54ycwUAAAAAAICYF3GPKwWofPXr17eff/7Zli9f7vpcVaxYMeYXKAAAAAAAANIocBVf6dKl3QMAAAAAAABIk1LBxYsX28cffxwyTaMLlilTxgoXLmwdOnSwY8eOpejMAQAAAAAAIHYlOXA1YMAA+/HHHwPfr1692tq1a+fKBdXrSs3ZBw8enFrzCQAAAAAAgBiT5MDVihUrrF69eoHvp0yZYjVr1rSxY8da9+7d7cUXXwyMMAgAAAAAAACctcDVnj177Pzzzw98/+WXX9pNN90U+P7KK6+0P/7444xnCAAAAAAAAIgocKWg1YYNG9zXx48fdyMJ1qpVK/DzAwcOWLZs2ViqAAAAAAAAOLuBq5tvvtn1svr666+tV69elitXLrv66qsDP1+1apWVK1cuWTMxevRoNzJhjhw5XPnh0qVLT/n86dOn2yWXXOKeX6FCBfvkk09Cfu55nvXt29eKFi1qOXPmdH24fv3112TNGwAAAAAAANJ54GrgwIGWNWtWu/baa11fKz2yZ88e+Pn48eOtQYMGEc/A1KlTXY+sfv36uSyuSpUqWcOGDW3Hjh1hn79o0SK75557XGP477//3po0aeIeP/zwQ+A5zz33nOu5NWbMGFuyZInlzp3bvebRo0cjnj8AAAAAAACk88BVoUKF7KuvvnK9rvS4/fbbE2RBKfgUqREjRlj79u2tTZs2dtlll7lgk7K5FAgLZ9SoUXbjjTfa448/bpdeeqkLqFWtWtVefvnlQLbVyJEjrU+fPta4cWOrWLGiTZw40bZs2WIzZ86MeP4AAAAAAACQzgNXvrx581qWLFkSTC9QoEBIBlZSqFfWsmXLXClfYIYyZ3bfL168OOzvaHrw80XZVP7z1Ydr27ZtIc/RPKsEMbHXBAAAAAAAQPqTNS3/+K5du+zEiRMhoxWKvl+zZk3Y31FQKtzzNd3/uT8tsefEd+zYMffw7du3z/2/f/9+iwYnjx22aJXcz4hlwvJgHWG7OdN9CfsRlgnrCcfg+NiXnPnyYP/KMmEdYbtJiX0rMtZnq8q5dBu4Si8GDx5s/fv3TzC9ZMmSaTI/SLq8I1laLBPWkUix3bBMWEcix3bDMmE9YbtJCexLWCasI2w3SOjAgQOuUi5dBq7UN0tlh9u3bw+Zru+LFCkS9nc0/VTP9//XNI0qGPycypUrh31NjZKoBvG+kydP2u7du61gwYKWKVOmM3iHsRctVbDvjz/+sDx58qT17KQLLBOWCesJ2w37EvavHG84BqcXnJewTFhH2G7Yl7BvTU+UaaWgVbFixU75vDQNXKknVrVq1Wz+/PluZEA/aKTvH3nkkbC/U7t2bffzrl27BqbNmzfPTZcyZcq44JWe4weqdJDW6IIdO3YM+5pxcXHuESxfvnwp9j5jjYJWBK5YJqwnbDvsS9i/cszhGJwecF7CMmE9YbthX8L+leNN+nWqTKt0UyqoTKfWrVtb9erVrUaNGm5EwEOHDrlRBqVVq1ZWvHhxV84nXbp0sWuvvdaGDx9ujRo1silTpth///tfe/31193PlSGloNagQYPswgsvdIGsp556ykXw/OAYAAAAAAAA0r80D1w1a9bMdu7caX379nXN05UlNXv27EBz9U2bNrmRBn1XXXWVTZ482fr06WNPPvmkC07NnDnTrrjiisBzevTo4YJfHTp0sL1791rdunXda+bIkSNN3iMAAAAAAAAyYOBKVBaYWGngggULEky766673CMxyroaMGCAe+DsUbllv379EpRdxjKWCcuE9YTthn0J+1eONxyD0wvOS1gmrCNsN+xL2LdmRJm80407CAAAAAAAAKSB/6vBAwAAAAAAANIRAlcAAAAAAABIlwhcAQAAAAAAIF0icAUgRdAuDwAApAeckwBAdCFwBSBFaDRP2bt3b8wv0VGjRtnixYtjfjkg6f7++28WFwCkEM5JACC6ELgCzgB39EK9/fbb1qNHD/vnn39idtkoYPXCCy/Yq6++asuWLUvr2UE69c0339jx48fd10OGDLGPPvooZrcZX6y/f0S2nrC+4HQ4Jzm1kydPxvxKFLwfYZ/COpHY8QbpA4ErIBk7sP3797uv/Tt6+J81a9bY3LlzA8smFnf4tWvXtuHDh7tl8eKLL9p3331nsY6T41C//fabPfzww3bvvfdap06drE+fPnbppZfG9P5E64j//o8ePWqHDx9O61lKF9h2El9Pdu7c6Y7F+/btY1khLM5JQn3++ef2n//8x6ZNm+a+z5yZy0DtQ5TxrBuu2rfE6j7XP1/314mZM2e6Y3Es47wk/cnkxeKVJXAGlBkxevRod7LcqlUru/HGG61MmTIxt0yDA3cnTpywLFmyuAN/pUqV7Pbbb7dBgwZZrNHJT7Zs2dzXb7zxhr3++ut20UUX2eOPP24VK1a0WD3w+ydCixYtcqWkCu7lyZPHrTOxSNvJ+++/b4888ogdOnTIXUzUrFkzZP2J1XVE2WfKRvvxxx+tWbNmdv3111uDBg0sFgUvl59++sl2795tF154oZ1zzjmWO3dui+VjzrPPPmvz5s2zXbt2WZEiRaxv37529dVXp/UsIg1xTnJqTzzxhAtG5MyZ052zaVtSEKt06dIWq3STcc6cOe5Gic7RdN5aoECBkH1vLPHf98cff2xdu3a1devWWazivCR9ir2tEslCfPN/vv32W3cxVbVqVStevLi99tprNnDgQHdXL9YEZ4f4AQjt6G+77Tb7/vvv7dixYxZr24gfdNDJj8oEt23bZpMnT7ZnnnnGli9fbrHIP/l77LHH7I477nBZRgpcvfXWW3bgwAGLNdpGsmbNaoUKFXIXELpoUGnpwYMH3fqjC4pYXUd69+5tzz//vF133XV21113ubJbZaNNmTLFYo32J/5yefLJJ61p06bWvHlza9SokXXp0sU2btxosXrMeeqpp2zEiBHWuXNnl9Wqi86bb77ZduzYkdaziDTEOUniXnnlFRs/frw7H1mxYoV169bNfvjhh5DARKyd52u/+txzz1mTJk3s7rvvdsebW265xQXDte+Nlcyr9u3b25VXXum+9o85OqfPly9fTPff5LwknVLGFXAqJ0+eDHx94MABb+PGjTG5wDZs2OD179/fe+655wLTxo0b59WpU8dr1aqV9/PPP3ux4MSJEyHv//rrr/cWL17s/fXXX27aDz/84OXMmdMbP368F4uGDx/u5cmTx/vss8+8VatWeWPGjPEuuugi77777vO+//57Lxb3G/PmzfOqVavmffHFF94ff/zhtWzZ0qtYsaI3cuRIb9++fV6sbTeyZ88eb/Pmzd4777zjXXnllV7Tpk29gwcPhjzn6NGjXqz45ZdfvAoVKniffvppYNry5cu9Dh06uH3sihUrvFg0YsQI7/zzz/c+//xz9/3999/vFSxY0Pv666+9WKRtpnbt2t7cuXPd9x999JGXL18+75VXXgnZzoL3P4hunJOcXpcuXbznn3/eff3ee++5c5TXXnvNfR//uBMLZs6c6V1xxRXet99+677/z3/+451zzjneBRdc4F1++eXezp073fR//vnHi3Y6tpQsWdK76aabAtPeeustr27dul6s70s5L0l/yLhCovy7Df5dGKXm667vZZddFnN3e9evX+/uyIwdO9ZlS/jatWtnbdq0cXetdOdGd7Bi5S6EMiNUsqKyJ5U8KRNg1qxZVqJECevfv79LQd++fbvFCm0n2ma++uora9GihdWrV88qVKhgDz74oPXr18+VtagMSnc7Y+nu97vvvmuffvqpK/dSJo3Wj4kTJ1qtWrXcHeAJEya4HhOxknI+f/58VxqofaiyNrVP7dixo23atMnatm1rR44ccc9TDyw9N1rFv5ut9WXLli0hmZpVqlRxd4M1/ZdffrFYov2JlsWCBQtcJppKJj/55BObMWOGOxbXrVvX/VylptEseD3Re9VyUYZz5cqV3X7lnnvuscGDB7ttSNuORnTdunVrTPeLiy/aM2k4Jzm91atXu+1HxxSds+pcpEOHDm770jajjKxYkj17dpelqRJ9nbf6y2TcuHG2efNmVzmg89dYaGegY4uymletWmUNGzZ003Rs8SsIYmlfynlJBpDWkTOk76yAX3/91RswYIBXuXJld3dCdzqVYeNn18QSZVsVLVrUu+WWW7w///wz5GfKLrrsssu8hx56yDt27JgX7evGSy+95GXKlMn78ccf3ffKLurUqZNXrFgxr1GjRl6NGjXcXauVK1cm+N1o5d+Vat68ude6desEd+sef/xxL2/evN6tt97q/fTTT14s0DLRXTutK3rf8e/cPfjgg16VKlW8Z555xjt06JAX7fx1oHTp0i4rURmLon3Gm2++6TKvtB+pX7++25b+/vtvLxpt37498LXu+v/222/eli1bXMaVMha13QSvKzruPProo16s0ef/r3/9y1u2bJnLMFJGgDI4/XXm9ddfD2RiRbu+ffu6Y/DWrVu9Jk2aeD169PDOPffcQNaI6HjUuHFjb/78+Wk6r+nNtm3bvGjEOcmpaf8wY8YM9/Wrr77qXXXVVe644+9DZPfu3e6cTcfgWKN9yeHDh71rrrnG7VtEGeA6J4mLi3PncrFk4cKFXvHixb277rrLXdNce+213gsvvODWow8//NCbMmWKN3jwYHesjkacl2QMBK4Q1pw5c9wOKnfu3N4dd9zhDRs2zO3gy5YtGxMHuOCLpuCvVSaoAJ5OmlWyEOztt9925YTRTuVeOsgrtTq+RYsWuRMkXXwrWKEg3/Hjx71olFgwbsiQIW67iV8WOHToUK9WrVreI488ErWBvHDvS0GIu+++2wVrJk2alKD8TT9TqW00pqMHvyeVEmu7+O6771zZ27PPPutlzpzZlYL5gQgFgLt27eo9/PDDgaBVtJUq6ORY24eCDHqv5513XmC/2a9fPy9Hjhze+++/H9hv6EKiatWq3osvvuhFs8T2CdqHXnjhha60J7j8WhcPuomki4poXx461pQoUcIF8KR9+/bu+KL1J7iNgUpdGjZsGLX71+RQa4csWbK485NoxTlJQjt27PBuuOEG77HHHnPfq22Bzj8UlFEQXMem9evXezfffLO7YRKtN0lOd1zWzXndJNI65AezmjVr5oLf0bwfSexcTcfnUqVKuf3r1Vdf7dWrV89d89SsWdMlMOimWrSdkwjnJRkHowoiOPvOlX1pdJ7p06e74dkbN27sUmiVLquR9CZNmmQLFy50qdnBI7hEE/99qZxn6tSpbiSnsmXLuibsSi9WWYKWT/369d2oG8WKFbNYoTK4++67z6Wca1Q0lX5pndH6EbwuqOm2Us81go3WGS2/aC39Upq5lkHevHnd8pBbb73VNWfXelK+fHn3MzVWVvq5tid/yOVoGrVGy8Avo9UIaHrP2l7OO+889zO9d5Xw9OrVyzVD1c98/rKI1n2KShD++uuvQImt6L2OHDnSHn30UddoWvuSUy3TaKH3rfKuuXPnuvenkSavuOKKwM/VcFsl2dpe8ufP78oXVLKhAR+ibVn4gvcFKiX2Rw0sWrSoK1NXOama9qvcR8OTqyRO+2HtZ1VKGM3lLO+8845t2LDB7RfUqN+nkhaV6GskQe1jli5danv27HH7XZW4RNv+Nbm0jqgRt9Yp7W+iDeckidM5WM+ePd0+Redg2ka0f9XIvhoVu2TJkm6f+uWXXwYGBonmfUk4alOgNgY6X+nevbsNGzbMTddIg9p/ROMyCd43an+pdeGCCy5wrQs0YIxG9VUZqUZX/Prrr0PORfxztGjbv3JekoGkdeQMaUsNgePfydbdTd3ZDE6blNtvv93r1auXuzMTjdkRwT744ANXhqDGwKNGjXJf606D0qpl0KBB7i7Vv//976hNmw1HmRFaB3T3P/hud/DdG/9ujO6Aq7QyuJl9NAhe97t37+4VLlzYK1KkiLsr5S8TZSdq29J6o2wJNWfXw7+rGU3bjzIwv/nmm8D3TzzxhMvM1HJRJpUas4veuzIidMd32rRpCTKvovXupt5nx44dAxmIwbQeKOMqW7ZsMZHJ6hs4cKBbHgUKFHAN2ONTo+0WLVq4MjktOz/7Ktru9D799NPeJ598ElJKqrvdajau/Yef1aoMNG1P5cuXd3e+VTqp7Shal0vwPkFNg7WuaH2IT5m/KmtRVviTTz4Z2L/GUvZIUqxevdq78cYbozL7mXOShPz9gbaDBg0auPNUnZPIpk2bXGa8MjUXLFgQ8txYpH2Mzke0Xy1XrpzLUvO3k2g8Jwk+91TliDJZtY9VCanO1/xzOWUgaVAQZeSd6jWiCeclGQOBqxg3duxYL2vWrKcdAU59JNRfIxZK4RSI0ohnqu32S1UUmFAPp+ADmS7QVRsfP8AXLRI7aOvERxcJGn0l+GI7XPBKvUhU/hONBzqNNqLRztTHSz2rtL7owlJ9m3y64JwwYYIrn4zG0i+NyKP0cfWv0nL48ssv3UmQSo21PG677TbXV+Pjjz92z9cyUD8NpeZHax+acNuNAt7aZlSyo5NkCd4mdAGuXmDRuJ2Eox6Jupi+9957vUKFCgVGyIt/8RS8rUTbhZW2F5XuKDj31VdfuZFZFfDVxaT2GSqhrV69utuHiEa5Ur9JlRzr59F4wRlu/df7035WF1FaNqe7mIym/WtKioYegpyTnJrOM7Rf2bt3b2B7Ukm6jtH+KHnhxPo2o/VK24fKBv11LBqDvPHXFZXpq0ei1o2pU6e6skCds/kl2Qpi6fpQN2hjAeclGQOBK3iTJ092vVbeeOONsCeQ2ph1Eu0HKaLxLkSw33//3R3o1XNGQRpdZCvzyqc+NL5TnQxkZMGfsQIMaso4a9aswLDJCmD27t3bu+SSS9yJUbj1Rs8PbuAeTbStKHtIQ9P7J306WdTJgO7aPfDAAzFzgqgLa118K0OkW7dugYCvKJB15513ugt0rQ/+CaEabUfjsgjebtauXeuCEWqM7AcXFPxW8MoPRoTrpRftwavgz137WK0fCl5pWfkU7FZvHl+0LhNlIyoLTw+tG8H7Ul086A54tWrV3P43nGjahoK3Hd0s0rHGz8rUPkODfSirdenSpTGzreD/cE5yajrO6OZH/vz53Q0Bf5+hY4/O07R/QWSi8VrH32cqg1W9AoPNnj3bXfs89dRTgeOLeqNF03EmMZyXZBwErmJU/B3RW2+9lWjwSqUcGmni008/9WKBmlqqrEtZaLoDrqCVf/dFWTbXXXeduyCPVsEXA8oqUxaRloNOihSg8O/maSSwPn36uAsKlQ+GE3zxGS1UAqmSHmUWaZkE07LRiD0XX3yxuyCPlZO66dOnu1IUBSCUbh1M24rKeZQ14Y9w5IumE6Lg7UbbQ6VKldxFhO5iapRJlWocOXLEBfdUGjhz5sxTvkas0L5V64dGW3z++efdSEbap0TTuhFf8N18ZZspC7FgwYIJRk5U8ErrjkZpPV1WdEYWvN6rDF/lTRrMQdmrKtv3g5wa3EAjT2qAg1jcVmIV5yRJp4CVBoDRMUbnIBMnTnQVE9qmNEAITr1+6RwlFva1LVu29O655x73dfCxVtnfqjDReW6waD4ehxOL5yUZBYGrGLRr167A19pBaxQN0YhfwcErfwenPhvKronGC+9wd261w1KwSv2J4vek0QWpelv9+eefXrTTzlo9qvxMCL/+WxkAfq8vBa86d+7sDoDByzCadu7hLpD++OMPd4BXLxoF74IpW2D48OEuSzEa79gFL5Pgz1lZRLrA1sWlsiKC6eJcvSP8zMVovuhUTzcF8DRKkfYlbdq0caPoqWeE7N+/3wWvtC1FcwA8Ug899JALbqq8OJp7jASXlqtUX/tSlQpq+1C2pt8TzqcbRyrFbdu2rRfttC9V3zMF/3XOod5VukGgG2ui9UJl/Or3xUV47OGcJCFtBxrBWJUCPmVZKbir0fF0TFaWr4438W8cIfRc5J133nHLSfsc/xw3o0vsGKqSc43gG7/HpAKdOg5HQ2lxSoiV85KMhMBVjNHFlOqadeGtRtLqGxGcFaMdd/zMK1GmQLRccKrMLfiOt4YG1pDBurur8h7Rhbca4Gq48XHjxrnsCN3FUlNyDWUfjYJ7pegkSBdLfkmTGgirx5kyAlSuoZMhP/NKPcGitXQj+ACl96u7/nqItiGVNF166aXu/2Aqc/GXRbQd5ILfj7aj4G1J24kuwLXu/Pe//w35PW030bYsgunzVlBKwW7/jq2yVLXdKEAhKn1SsE8nhRr0IZr6EyVX8D5D2a7+99HYYyT+kNvKsvKPvzo2qxGuslrj93/TcSmatx3RcqhatWogw8p/3+qvogEu/D5oWi+ULRBNN0cQHuckp6bgrjJ7dZNVrQuCS41FxyOd76p1gTJ/Od6cPmilnk7hBgzJiIKPGepHqhskwQPpNG7c2GVX6Qba5s2b3fqiQag0PdrO5SMVS+clGQ2BqxijAJRShhW80sFODXLD7cBP1fMqo/fziouLc82jRXe3lVKtHbXK4XQhocwz0U5ed7k1TXXf2qGr8WU0UsPoIUOGeHv27AlM0wWEenwpiKc7UOrf5J8s6QCv0UiUXRRt60i4g/6wYcPcBaXuvOgOjN/bTAE+P3ilDKz4onmZaDQ8lThpu9CoRf4dOgWvNE0BnPjBq/ivkdHFfy+6mFZKuU4OP/roIxe08rcbBTw1klP8oAQXE//bTsJtK9G0rojeo7IgVEIafPwNvomibUr7GjXNjeblEf/zVmNkLRM1CQ6mZaQs33DlOwSvohfnJKemgRp0Hq99xvr1613Wu7IVg9s2BO8v/O2N403o8ggOWulaQNlr0UatLXR9o/VF5+06P9O5ux6qDNB+V6PZ6qa0Mlr94Ey0nb9GKlbOSzIaAlcxInhD8wMPKgPTyWI48csGM/pOLPj9qxeP3rsuIHXXWxeTPjXbVhaa0mWDo+2q9/Ybk0cTfaZqvqj1QQ+lDwcHr2Tw4MHu4OYPp6zlpe91JzwWLhx0IqjSlNGjR7ssGQUxlY3nl/woeKWglU4a42cqRvMyUTmcSnuUrageeLrz65fvvPfee17Dhg3dcoqFkh4N2KD9g24M6M62ApwKTvhBKz+jRAEJv+wJsdlj5HRDbutmikZ20n7GH90p2ihb1f/MNZiDSu91t1/ZmtqnBN8Qkauvvtp7+OGH02hucTZxTnJ6KgNUdqKyNP3jT65cudwN2DJlynh9+/YNPDc4QyQjn8OfjaBVtGRaBXvppZfcsWbRokXuJoCue9TDV+dm/nLQqM/vvvuuu7EfjaPVJlcsnZdkJASuYozuzKjHlQ58KktQJo1/hyH+Qc1v2B4tG6v6MWlnrVES1XRPowVqtDONpBFMPWkUvNIBzS+Hi3bqJaLGg/q8n3zyyZDGjCqR1MmQDmg6mN1+++2uga4vmoNXyh7SXSilWfv93pRFc8EFF7heTgpq+uuWglbRvCyCy3d0984fJVC0nehCW/3ffNp+unTpEtV3p7TP1D5VJ756v6KsTO1bFLzyywPVL0P7W12Ax8I6Eolo7zGS1CG3g9eLBQsWuBsD0bjtqFwyZ86cLqNZ/RH1tX8DTTcAdJNgwoQJgeCVjkU6TivrFbGDc5LE6TxMfTR13FUgQuerammhmyfK+FU5srKgkVDwPjXag1aivqLazwbT+arOYTXwRzico8TeeUlGQuAqhigApRNAn+5wKgNAG6Oybnyqk/cvyBWB1wb75ptvehmdyt70Xv07t0qt1nt75ZVXEuyo1RMge/bsboSWaL5LpfetA7k+czWMVnq+lomCV1o/RBdWyqbRslNwQmVx/t2YaF42fm+vHj16BO5KqWz05Zdfdun5Gm1E25M/uEG0HvSVUeb3uBOd4CnlfM2aNSF3dNUjQRfiaqwcXzRdgId7L7rDrbuYfv87jbKoxqcK5NWsWdONPqltx19W0baOJFe09xhJzpDbCt4EnxxH47qijF31i9QFdvxyYl1kKSNa2SO6+NaFuG4ekAEQGzgnSZxukgQvJ/9Gq87d/GOLthkdb3QOG03H3ZTmX9tEc9BKlPmuR/z1RoPI6NwkfnYrYu+8JKMhcBVDVP6mTAD1ovEPaLpDo55XatCnshY1I9dJYvDJslJIVULnl4plFPGDKnpPKoVT4EV3e/2yQd2tUnlG/AsEBbh++eUXLxasW7fO1bkri+bDDz8MBK90caULBgWv1MtJmVb+BUQ0XlCFo8CU1v1rrrkm0MdKB/sqVaq4fmnNmzePyiCe3o9KlfzAtR+80nvXxbZKSH1aFxToVHBGpT+xQD2stG7ovSu4p95ECmD5y0l3NdWHRNtNcDYeF+Cx12MkkiG3dXyKxn2rPm//fansWp+33rfKnJSVGEzbizJ9tU0pazPWjjngnCQ+3SzTNqP/gylI1aJFi8D+REFhZSxG6+AwKUF99LQsle0ZLcGIxPaNOrbqmKKb8MGUoadz2FipKkkqzkvSPwJXUSrcRbTubCpw5TeWDj4ZVMNYRd9VzhJuyM+MFrTy5z1+Wqd20mo+qOaEPvXV0HIJF7yK1jtNTz31lMsmCqagnj/kuk58goNX8cXCcgrehlTKonXE7ymhgIW2GaXpR/uJYbt27VxQ8+233w6UkKoPjXps6OTHp/2GpikjLdppX6HtQxmrunOpYJVuDJQuXTowMmk4sbDdJEUs9RhJimgfcjv4vSjwrXYFyur2G0qrBFvHmfjnLcHHHgK+0YtzklN77bXXXK8iv1zJH1xI25VuJuqGkQLgKkWvUKFC4DgTbTfTUvKmk3pQqmVKNAj+nNXLSu1PNJqk9pnqxar2HrpJ71fObNu2zX2vjF/WkfDLkfOS9IvAVZTTRWTPnj1ds2TdqVFvGu20w1GpTzSNPKIsImWGqORAO29/1LMlS5a4Mp7goYMVyNKoGgrmROvFpT7bH374IdCIXSc7aiStg5wOZMp0UC8rvwRMPc50MdmpU6eobEwfCV1sKbCr7EQtL60vevgXZNG4zgS/J5UfaJtR8MovWVDvhHLlyrlsTAU9/WzNaNh3xBf/5E49ivRer7vuOtecXtuSMqyuuuqqkKB4NK4XZyrWeozE+pDbwZ+3Rq5t1apVyAWjLp50EalsX3/fof5ewSXYXFxFJ85JTk83D/2+bzrO3nTTTa7M9tNPP3U/13FHwatbb73VlQ1Sjp400VAipx5Vylj1qa1Fvnz53E1WlWFrfdCNNGWEq6eiBtFRhYnOXYJbF0TTTZLk4rwk4yBwFcUnBBrF6r777nPpoNpRqRmfLhLUhFvlTSNHjnQZI/4BMPh3o4HK/LQT13tWwEHv1x9+XCfG1atXDzTGlRo1ariUWj/AFa00NK6WiTJFdMdbgT2tH8oiUc8mrTN+uZPu9CkTIFrWiTM5qKn/l4JXCtYoSy8WDvrxg1cqjfRH3NTIYMq40gmQeiioXCHaT5oVXPGzQDTyqvYvfhmt+sA1bdrU9cbT3XGcWqz0GDmdWBlyW8cd3UjS6EzqNxm/Ebf6ByqTUwFwZS5GYwAc4XFOEp5G21TvxOCRaJX13LJlSxe8ij+wkI9tJ/rpHEvnoQpqasALDeihxITPP//cDQCim4wqPdf5vbKv1MpBIzyrp68GHaJ1QXicl6R/BK6iyKlOdHUgU6mgmgUrbVSReu30lKavPhLREpjwl4F/4B41apRrXNm7d29XjqH3r0Dd0qVLvYsvvtj1LAq+yNadiWgV/D47duzoLiLURFoHM2Xmadno4qFatWqBxuzBomUdOZN1S0FNlQ3661k0ZUZEGrwK3t8Efx2tJ83KCNFNAF1k++XWuhuuHjz+z9WXR8EYZaEhtnqMJFcsDLn9wQcfuBtnwf3LdAGuspbgY9Itt9zizk+iPQCO/+Gc5NQUcNDNET9A5R9nlZ2pm40lSpQIZNz4P4v187RYonMt9TRTj2JVkCj7O5hKsHVD7emnnw77++xfQ3FekjEQuIoSwReOOsip0bo2Qn+UK5/KvtTHKrg8MBoOeP68+z14fLoLoVpulQCqT5cyIXQiMGLECDddX/tZWLEg+EClUi+lE/sZNFu2bHHlpP7yyMjrw9kUbZkRkZQNxi8hjeZ1RsFcZVmpFFCjKuouuLI4lY7vlz6pp54ysKI1eJdSoq3HSHLFypDbarauzExRKboussqXL+9unKk3T7jyHbah2MA5yakpA1E3l/0BDLTPUGBXN1AuvPBCl3mlckHEJu0nVUHj99yMP9CFslg1Smu0V5KkBM5LMgYCV1GYcq00e/VaUS28IvFKHfUpcKPMmmi8+FZPDJ30KwsiOHNq4MCBLrtIQTrRhaWakOtkQDt79QaIpTsP8e9y+hk0wY1wo2F9SGmxkBkRSfBK2TLK2Iv2gFW47UH7We1j1UxbWYpq1B8fF97R32PkTMTSkNu6IFCGiLK81UdRfa6GDx/upmfJksX78ssvQ54f7fsThOKcJCF/G9ANIrVwUDavT4Er9YbTuawasqtEneNN7FKwSoPo6JxMN+mD958aFV6D5uzZsydN5zGjiPXzkoyAwFUUUfBBgarFixe771988UV3Mqwdl2/lypUuiKP+T9FGO2aV/mmIbZ0gv/DCC4GfqTRSD3/oVzUjV0BPwatVq1Z5sSZcEEIXTxlt9MizJVYyIyK9wFDGhHrpxSKdIOpupjKHtD74jeuB04m1Ibd1XFGGovYZylr0byKpcbBupGnQEMQ2zknCU79R9SNV8EH9aRXAUjsHlYApWKWbsCqxRWxTMFPZqzonmzJlimtpoZJSDcCkBzcDEC0y6R9DhqSPLlOmTHby5EnLnDmzPfHEE3b06FEbNWqUzZw501q2bGkjRoyw9u3b28GDB2337t22d+9e69Gjh33yySfud6LRTz/9ZP369bMVK1ZYiRIlbMyYMbZq1SqbNWuWtWjRwurXr59gGcaiEydOWJYsWdzXnTp1stGjR9unn35qDRs2TOtZS1eC15FJkya57UrLbenSpValShWL5fXmkksuscaNG9vQoUNjZlvy97dy5MgRW7x4sb3yyis2derUwHIBItmfZM2a1ZYsWRKV+5Pg7cV/79qH7N+/31q3bu3+//zzz6P2fARJxzlJKH97OXbsmDvfePvtt+2vv/6y8847z53nx8XF2b333mvnnHOOOwbp+BMLx2CE9/fff7vjybRp06xIkSLuXH7dunVu/5otW7aQfTGQURG4yqCCd0DHjx+37Nmzu8BVrly5rGrVqu5g9vzzz9tDDz3kDn46Qd66das9/vjjCQJe0UhBOl1QPvXUU7Zv3z67++67bd68eVatWjV77bXX0nr20uWJ4rBhw6xr167uIgqnvsjUSWTlypVjcjH59zq0/7jnnnusQoUKbjuLRfGDdTpx1AkiEE7wMTcWglbhHD582F1Y6f3rOP3tt99yUYUAzklCJXZDSDdN+vTpY2+99ZYtXLjQ3UQCdD3YrVs3e/XVV+2bb76xWrVqufXnn3/+4dweUSE6oxZRTlkxmzZtcl/37NnTHnvsMfd1sWLF7OWXX3YXk37QShS40Z0a/S/+QTBag1ZSoEABa9SokS1fvtxuu+02l321bds2Gzt2rI0bNy6tZy/dUNBKJ4qi9Sg4aKWLrFh2qsyIWA5aaZno8c4779h7771nEydOtD179lgsip+w7N/VBMLxj7nvvvtuTAat/GOOLq7q1q3r3ru2GQV8o/l8BEnHOUko/xzk/7d2cV/rJvQzzzxj8+fPt9mzZxO0QoCSGFRpo2vDGjVqBDJcuSGNaEHGVQbSpk0b69+/v918881uR1S7dm2bMWOGu9uirAdRKdz7779vH330kZUuXdpdRKkMbNeuXe7OZiztvIIDDwsWLHAHeKVTK1uGu1Phl5PWHQU4ta7FMjIjEqJkMvHlwXaDpFK2UfPmzS1HjhzujngsBa38bUaBKwWsgjNJojkDHJFh33rqZaKb1zqffe6551y5IHA67F8RLQhcZRBNmza177//3n777Tf3veqXleWgCyZlFvlpoKqF13PV00kBiEsvvdSdIPo1zsFp2LGYZq1+Gnny5EnTeUrvwQj1BVu5cqXlz5/fYp0yI+67776YzIwIRsnk6ZcH2w2S4uOPP7ZWrVrZ3LlzrXr16jG30ILPQZSxeeDAgZi/UYL/w7711MtE1RPqDVeyZEl3np83b15WH4QNUCmxQdc8sX4jGtGFwFUGsH79etdQXAcspdePHDnSBgwYYIULF7acOXPa9OnTrXz58iEHt0WLFrkmjgpwqa+TdmTUOCMYGTSnFsuZEbHcTPp02G5wpmL1BgpBCUS6fsTyQCjCMsGZrCvcUEO0IXCVAeiO5J133umCVAULFrQffvjBpkyZ4oJSderUcQEpZV4peOXT6IK64PaRJopgZNCcXqxnRgglk6HYboDk4QIcka4fDITCMgH7VyAYgasMQjXtDz74oGvKOGvWLGvQoIGbvmPHDrvxxhvdQX/y5Mmur5XSQsuWLWvPPvtszAxRj6QjgybpYjUzIj5KJtlugOQiKIHkrB+xmtUrLBOc6boSy6NfI3rRCTOd80cRUW8rXURXrFjRjYq3efNmN13lgmrSqJ2UygjVsF0j6KmJuxC0QvwMGk4Qk46g1f9KJtXnSxmcsXohwXYDJA+js4J9K8cbpA72r4g1ZFylU/EzpVQemCtXLnfh+Oqrr1qhQoXs5ZdftmLFigWeo95XGgq1Q4cOLpBFTyskhgwaJBUlk2w3QHJQaoyk4pyEZQL2r8DpELhK5yd7f/75pwtCKdvBHz3kjTfesLfeeits8MoXa6MHIuloOo5IUTLJdgMkF0EJnArnJCwTJB/7V8QSSgXTYaaVH7R6+umnrWnTplapUiX3/5gxY9z0du3aueFwd+/ebZ07dw6UDQYjaIXEKHMvX7589tVXX8Vk2RciR8kk2w2QHJQa43Q4J2GZIHnYvyLWkHGVTksDBwwYYKNGjbLXXnvNBajWrl1rL730kg0cONCeeOIJ95wJEybYc889Z02aNHGN2IGkIoMGiBzbDRAZSo3BvjV5ON6A/SsQisBVOiwR3Lt3rwtGtWrVytq2bRs4gClQ9eSTT9rEiRPtjjvucNM/+eQTa9iwIRlWAAAg3eECHADYvwJnilLBNNa+fXu78sor//dh/P8SQTVVX716tTvZCy7VadGihV133XVuiFN/tMGbb77ZBa3U0woAACA9odQYANi/AmeKwFUau+eee2z79u0uAOUrWLCg3XLLLbZ48WLbuHFjYHqBAgXcCeBvv/0WUlYo9LQCAAAAAADRhsBVGrvhhhtsypQptmbNGhesEgWlrr32Wlu1apWNHz/eNmzY4KYfPHjQtmzZYmXKlEnjuQYAAAAAAEh99LhKIyrt87OkZs+ebStWrHD9q+688043SoQMGzbM3nrrLcuaNatdcMEFLjNLwSs9V9MAAAAAAACiGYGrNNajRw/74IMPrGnTpi4gtXDhQqtTp47NmTPH/Vz///TTT7Zy5UorV66c9erVywWt1AeL4BUAAAAAAIhmBK7S0JIlS1xvq+nTp7uSwWPHjtnnn3/uRhKsXLmyffrpp2F/j6AVAAAAAACIBfS4SkN79+51WVOVKlVy38fFxVn9+vXthRdecJlWLVu2DPt7ZFoBAAAAAIBYQODqLPE8L8G0ChUquP9VKujLli2b1apVy0qWLGmTJk2ybt26na1ZBAAAAAAASFcIXJ0FJ0+edCMFypEjR+zvv/92X+fJk8eVCqoZ+4cffhh4fs6cOa1u3bqu35UatAMAAAAAAMQielydRc8++6x9++23tmvXLnv66aetQYMG9uOPP7oG7X/99ZcLVtWoUcPGjBnjRh384osvLHPmzCEjEAIAAAAAAMQKMq5SOdPKN3z4cPdQP6u8efNakyZNXC+ryy+/3EaOHGmNGjWyGTNm2HPPPeeCVJ999pkLWuk1CFoBAAAAAIBYRMbVWbB+/Xp7+eWX7ZZbbrF69eq5aX379rUXX3zR/f/www+7xuzHjx+3AwcOWIECBVxpIaMHAgAAAACAWJY1rWcg2n300UfWuHFjK1asmN16662B6QMGDHD/Dxw40GVUNW/e3M4//3wrWLCgm65MK0YPBAAAAAAAsYxSwVSmYNUjjzxiW7ZssZUrV7rm7MHBqy5duriRAxcsWBD6wWTmowEAAAAAALGNUsEUpCyp4IBT8Pdt27a1qVOn2ptvvukysFQa6Bs3bpzdf//9ZFgBAAAAAAAEIXCVQoKDVGPHjrUlS5a40QAvuugi69Wrl5verl07F7x64403XHP24OCV0NMKAAAAAADg/9DjKoX4QasnnnjCJk6caK1bt7ZcuXJZ79697ddff7Xx48e7gJWe16FDBzt8+LC1aNHCsmXL9n8fRlY+DgAAAAAAAB+NlM6Q53mBr7/55hubMWOGvffeezZkyBCrWLGi5cyZ02rUqBF4jrKxNLLgpEmTQoJWAAAAAAAACEXgKpm++OILV9qXKVMmVyYoasCeP39+q1Onjn3wwQfWsmVLGzFihD300EN24MABN8KgvP/++zZ37tzk/mkAAAAAAICYQOAqGd555x175plnXNaU+lj5ZYLFihWzokWL2uuvv26tWrWyYcOG2YMPPuh+9t1337lMrHXr1v1vwWfOHAh4AQAAAAAAICECV8mgUQFV/vfpp5/a22+/7YJXki9fPtfPShlWffv2DQStjhw54oJYKissV67c/y38oBEIAQAAAAAAEIpRBZM5euChQ4ds0KBBLoOqUaNGrtG6mqurHPCOO+6wNm3auJLBggUL2siRI23nzp22bNky9xwFsFRiCAAAAAAAgMQRuIqAH3BSbysFoA4ePOiCV7/99pvdeOONrqeVGq5PmzbNlQuuWLHCLr30UitSpIhNnjzZ/UzZWVmyZInkzwIAAAAAAMQkAlfJ0LlzZxeQ6tixowteqd/V+vXrQ4JXe/fudSWCGlUwb968IQEvAAAAAAAAnB5NliK0cuVKNyJg8eLF3ffnnHOO9e7d2/Wumj17tmvcrgCV+l2pUbv+V9BK2VoErQAAAAAAAJKOwFUS+SMALl682PLnz29XXHFFYHpw8OqTTz5xwSu/Ybv/e/S0AgAAAAAAiAyBq6QuqMyZbcuWLTZgwAC7++67rWzZsoHp4gevypcv74JX/miD+rmyrQAAAAAAABAZAldJ4GdNzZkzxypWrGj33ntv4GebNm1ygapXX33VBa/69OkTKBucMGGCC16RbQUAAAAAABA5AldJWUj/P6tq9OjRVqVKFTv//PPtwIEDNnz4cLv//vvt3//+t61du9aOHTtmuXPndplXJUqUsPnz59u3336bjI8FAAAAAAAAjCqYRG+++aYNHTrUFixY4LKrvvvuO1u+fLn169fPatasaVWrVg1kZynQdejQIfv000+tXr16ricWAAAAAAAAIkPgKglU7te2bVv7z3/+Y+eee65dcMEFdt9991nTpk2tcOHCgeepl5XKAv3glf89AAAAAAAAIpc1Gb8Tc44fP+7+v/nmm10GlYJYwUGtLFmyuK/9IJVfWkjQCgAAAAAAIPnIuEqiI0eOuIBUXFyc+97PqgIAAAAAAEDqIHCVDJQAAgAAAAAApD5ShpKBEkAAAAAAAIDUR+AKAAAAAAAA6RKBKwAAAAAAAKRLBK4AAAAAAACQLhG4AgAAAAAAQLpE4AoAAAAAAADpEoErAAAAAAAApEsErgAAAAAAAJAuEbgCAAAAAABAukTgCgAAAAAAAOkSgSsAAAAAAACkSwSuAAAAAAAAkC4RuAIAAAAAAIClR/8PtKW8wSabmxsAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Visualize token salience (gradient-based)\n", + "import numpy as np\n", + "\n", + "if 'grad_l2' in outputs:\n", + " # grad_l2 is now [seq_len, emb_dim] - compute L2 norm per token\n", + " grad_raw = outputs['grad_l2']\n", + " grad_l2 = np.linalg.norm(grad_raw, axis=1)\n", + " \n", + " # Normalize for visualization\n", + " grad_normalized = grad_l2 / grad_l2.max()\n", + " \n", + " # Plot\n", + " fig, ax = plt.subplots(figsize=(12, 3))\n", + " bars = ax.bar(range(len(tokens)), grad_normalized)\n", + " ax.set_xticks(range(len(tokens)))\n", + " ax.set_xticklabels(tokens, rotation=45, ha='right')\n", + " ax.set_ylabel('Salience (normalized)')\n", + " ax.set_title('Token Salience (Gradient L2 Norm)')\n", + " plt.tight_layout()\n", + " plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "9776ce82", + "metadata": {}, + "source": [ + "## Tips for Using LIT with TransformerLens\n", + "\n", + "### Attention Visualization\n", + "- Use the **Attention** module to see which tokens the model attends to\n", + "- Compare attention patterns across layers to see how information flows\n", + "- Look for \"induction heads\" that copy patterns\n", + "\n", + "### Embedding Projector\n", + "- Use UMAP/t-SNE to visualize token embeddings\n", + "- Compare embeddings from different layers\n", + "- Look for semantic clustering\n", + "\n", + "### Salience Maps\n", + "- Gradient-based salience shows which tokens matter most\n", + "- Compare `grad_l2` (magnitude) vs `grad_dot_input` (signed)\n", + "- Higher values = more important for the prediction\n", + "\n", + "### IOI Task\n", + "- A great benchmark for understanding how models do name-binding\n", + "- Look for attention to the indirect object name\n", + "- Compare behavior with swapped names\n", + "\n", + "### Performance Tips\n", + "- Use smaller models (gpt2-small, pythia-70m) for faster iteration\n", + "- Disable gradients (`compute_gradients=False`) if you don't need salience\n", + "- Reduce `output_all_layers=False` to output fewer embeddings" + ] + }, + { + "cell_type": "markdown", + "id": "4b5a0fe9", + "metadata": {}, + "source": [ + "## References\n", + "\n", + "- **TransformerLens**: https://github.com/TransformerLensOrg/TransformerLens\n", + "- **LIT**: https://pair-code.github.io/lit/\n", + "- **LIT Paper**: Tenney et al., \"The Language Interpretability Tool\" (EMNLP 2020)\n", + "- **IOI Paper**: Wang et al., \"Interpretability in the Wild\" (2022)\n", + "- **Induction Heads Paper**: Olsson et al., \"In-context Learning and Induction Heads\" (2022)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/poetry.lock b/poetry.lock index ad4b24007..bab9cb46a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,16 @@ # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +[[package]] +name = "absl-py" +version = "2.3.1" +description = "Abseil Python Common Libraries, see https://github.com/abseil/abseil-py." +optional = true +python-versions = ">=3.8" +files = [ + {file = "absl_py-2.3.1-py3-none-any.whl", hash = "sha256:eeecf07f0c2a93ace0772c92e596ace6d3d3996c042b2128459aaae2a76de11d"}, + {file = "absl_py-2.3.1.tar.gz", hash = "sha256:a97820526f7fbfd2ec1bce83f3f25e3a14840dac0d8e02a0b71cd75db3f77fc9"}, +] + [[package]] name = "accelerate" version = "1.0.1" @@ -215,6 +226,17 @@ files = [ [package.dependencies] typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} +[[package]] +name = "annoy" +version = "1.17.3" +description = "Approximate Nearest Neighbors in C++/Python optimized for memory usage and loading/saving to disk." +optional = true +python-versions = "*" +files = [ + {file = "annoy-1.17.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c33a5d4d344c136c84976bfb2825760142a8bb25335165e24e11c9afbfa8c2e9"}, + {file = "annoy-1.17.3.tar.gz", hash = "sha256:9cbfebefe0a5f843eba29c6be4c84d601f4f41ad4ded0486f1b88c3b07739c15"}, +] + [[package]] name = "anyio" version = "4.5.2" @@ -913,6 +935,17 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} +[[package]] +name = "cloudpickle" +version = "3.1.2" +description = "Pickler class to extend the standard pickle.Pickler functionality" +optional = true +python-versions = ">=3.8" +files = [ + {file = "cloudpickle-3.1.2-py3-none-any.whl", hash = "sha256:9acb47f6afd73f60dc1df93bb801b472f05ff42fa6c84167d25cb206be1fbf4a"}, + {file = "cloudpickle-3.1.2.tar.gz", hash = "sha256:7fda9eb655c9c230dab534f1983763de5835249750e85fbcef43aaa30a9a2414"}, +] + [[package]] name = "colorama" version = "0.4.6" @@ -941,6 +974,101 @@ traitlets = ">=4" [package.extras] test = ["pytest"] +[[package]] +name = "contextlib2" +version = "21.6.0" +description = "Backports and enhancements for the contextlib module" +optional = true +python-versions = ">=3.6" +files = [ + {file = "contextlib2-21.6.0-py2.py3-none-any.whl", hash = "sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f"}, + {file = "contextlib2-21.6.0.tar.gz", hash = "sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869"}, +] + +[[package]] +name = "contourpy" +version = "1.3.0" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = true +python-versions = ">=3.9" +files = [ + {file = "contourpy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:880ea32e5c774634f9fcd46504bf9f080a41ad855f4fef54f5380f5133d343c7"}, + {file = "contourpy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:76c905ef940a4474a6289c71d53122a4f77766eef23c03cd57016ce19d0f7b42"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92f8557cbb07415a4d6fa191f20fd9d2d9eb9c0b61d1b2f52a8926e43c6e9af7"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36f965570cff02b874773c49bfe85562b47030805d7d8360748f3eca570f4cab"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cacd81e2d4b6f89c9f8a5b69b86490152ff39afc58a95af002a398273e5ce589"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69375194457ad0fad3a839b9e29aa0b0ed53bb54db1bfb6c3ae43d111c31ce41"}, + {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a52040312b1a858b5e31ef28c2e865376a386c60c0e248370bbea2d3f3b760d"}, + {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3faeb2998e4fcb256542e8a926d08da08977f7f5e62cf733f3c211c2a5586223"}, + {file = "contourpy-1.3.0-cp310-cp310-win32.whl", hash = "sha256:36e0cff201bcb17a0a8ecc7f454fe078437fa6bda730e695a92f2d9932bd507f"}, + {file = "contourpy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:87ddffef1dbe5e669b5c2440b643d3fdd8622a348fe1983fad7a0f0ccb1cd67b"}, + {file = "contourpy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fa4c02abe6c446ba70d96ece336e621efa4aecae43eaa9b030ae5fb92b309ad"}, + {file = "contourpy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:834e0cfe17ba12f79963861e0f908556b2cedd52e1f75e6578801febcc6a9f49"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbc4c3217eee163fa3984fd1567632b48d6dfd29216da3ded3d7b844a8014a66"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4865cd1d419e0c7a7bf6de1777b185eebdc51470800a9f42b9e9decf17762081"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:303c252947ab4b14c08afeb52375b26781ccd6a5ccd81abcdfc1fafd14cf93c1"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637f674226be46f6ba372fd29d9523dd977a291f66ab2a74fbeb5530bb3f445d"}, + {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76a896b2f195b57db25d6b44e7e03f221d32fe318d03ede41f8b4d9ba1bff53c"}, + {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e1fd23e9d01591bab45546c089ae89d926917a66dceb3abcf01f6105d927e2cb"}, + {file = "contourpy-1.3.0-cp311-cp311-win32.whl", hash = "sha256:d402880b84df3bec6eab53cd0cf802cae6a2ef9537e70cf75e91618a3801c20c"}, + {file = "contourpy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:6cb6cc968059db9c62cb35fbf70248f40994dfcd7aa10444bbf8b3faeb7c2d67"}, + {file = "contourpy-1.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:570ef7cf892f0afbe5b2ee410c507ce12e15a5fa91017a0009f79f7d93a1268f"}, + {file = "contourpy-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:da84c537cb8b97d153e9fb208c221c45605f73147bd4cadd23bdae915042aad6"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0be4d8425bfa755e0fd76ee1e019636ccc7c29f77a7c86b4328a9eb6a26d0639"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c0da700bf58f6e0b65312d0a5e695179a71d0163957fa381bb3c1f72972537c"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb8b141bb00fa977d9122636b16aa67d37fd40a3d8b52dd837e536d64b9a4d06"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3634b5385c6716c258d0419c46d05c8aa7dc8cb70326c9a4fb66b69ad2b52e09"}, + {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0dce35502151b6bd35027ac39ba6e5a44be13a68f55735c3612c568cac3805fd"}, + {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea348f053c645100612b333adc5983d87be69acdc6d77d3169c090d3b01dc35"}, + {file = "contourpy-1.3.0-cp312-cp312-win32.whl", hash = "sha256:90f73a5116ad1ba7174341ef3ea5c3150ddf20b024b98fb0c3b29034752c8aeb"}, + {file = "contourpy-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:b11b39aea6be6764f84360fce6c82211a9db32a7c7de8fa6dd5397cf1d079c3b"}, + {file = "contourpy-1.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3e1c7fa44aaae40a2247e2e8e0627f4bea3dd257014764aa644f319a5f8600e3"}, + {file = "contourpy-1.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:364174c2a76057feef647c802652f00953b575723062560498dc7930fc9b1cb7"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32b238b3b3b649e09ce9aaf51f0c261d38644bdfa35cbaf7b263457850957a84"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d51fca85f9f7ad0b65b4b9fe800406d0d77017d7270d31ec3fb1cc07358fdea0"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:732896af21716b29ab3e988d4ce14bc5133733b85956316fb0c56355f398099b"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d73f659398a0904e125280836ae6f88ba9b178b2fed6884f3b1f95b989d2c8da"}, + {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c6c7c2408b7048082932cf4e641fa3b8ca848259212f51c8c59c45aa7ac18f14"}, + {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f317576606de89da6b7e0861cf6061f6146ead3528acabff9236458a6ba467f8"}, + {file = "contourpy-1.3.0-cp313-cp313-win32.whl", hash = "sha256:31cd3a85dbdf1fc002280c65caa7e2b5f65e4a973fcdf70dd2fdcb9868069294"}, + {file = "contourpy-1.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4553c421929ec95fb07b3aaca0fae668b2eb5a5203d1217ca7c34c063c53d087"}, + {file = "contourpy-1.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:345af746d7766821d05d72cb8f3845dfd08dd137101a2cb9b24de277d716def8"}, + {file = "contourpy-1.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3bb3808858a9dc68f6f03d319acd5f1b8a337e6cdda197f02f4b8ff67ad2057b"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:420d39daa61aab1221567b42eecb01112908b2cab7f1b4106a52caaec8d36973"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d63ee447261e963af02642ffcb864e5a2ee4cbfd78080657a9880b8b1868e18"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:167d6c890815e1dac9536dca00828b445d5d0df4d6a8c6adb4a7ec3166812fa8"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:710a26b3dc80c0e4febf04555de66f5fd17e9cf7170a7b08000601a10570bda6"}, + {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:75ee7cb1a14c617f34a51d11fa7524173e56551646828353c4af859c56b766e2"}, + {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:33c92cdae89ec5135d036e7218e69b0bb2851206077251f04a6c4e0e21f03927"}, + {file = "contourpy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a11077e395f67ffc2c44ec2418cfebed032cd6da3022a94fc227b6faf8e2acb8"}, + {file = "contourpy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e8134301d7e204c88ed7ab50028ba06c683000040ede1d617298611f9dc6240c"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12968fdfd5bb45ffdf6192a590bd8ddd3ba9e58360b29683c6bb71a7b41edca"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd2a0fc506eccaaa7595b7e1418951f213cf8255be2600f1ea1b61e46a60c55f"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfb5c62ce023dfc410d6059c936dcf96442ba40814aefbfa575425a3a7f19dc"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68a32389b06b82c2fdd68276148d7b9275b5f5cf13e5417e4252f6d1a34f72a2"}, + {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94e848a6b83da10898cbf1311a815f770acc9b6a3f2d646f330d57eb4e87592e"}, + {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d78ab28a03c854a873787a0a42254a0ccb3cb133c672f645c9f9c8f3ae9d0800"}, + {file = "contourpy-1.3.0-cp39-cp39-win32.whl", hash = "sha256:81cb5ed4952aae6014bc9d0421dec7c5835c9c8c31cdf51910b708f548cf58e5"}, + {file = "contourpy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:14e262f67bd7e6eb6880bc564dcda30b15e351a594657e55b7eec94b6ef72843"}, + {file = "contourpy-1.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe41b41505a5a33aeaed2a613dccaeaa74e0e3ead6dd6fd3a118fb471644fd6c"}, + {file = "contourpy-1.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca7e17a65f72a5133bdbec9ecf22401c62bcf4821361ef7811faee695799779"}, + {file = "contourpy-1.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ec4dc6bf570f5b22ed0d7efba0dfa9c5b9e0431aeea7581aa217542d9e809a4"}, + {file = "contourpy-1.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:00ccd0dbaad6d804ab259820fa7cb0b8036bda0686ef844d24125d8287178ce0"}, + {file = "contourpy-1.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca947601224119117f7c19c9cdf6b3ab54c5726ef1d906aa4a69dfb6dd58102"}, + {file = "contourpy-1.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6ec93afeb848a0845a18989da3beca3eec2c0f852322efe21af1931147d12cb"}, + {file = "contourpy-1.3.0.tar.gz", hash = "sha256:7ffa0db17717a8ffb127efd0c95a4362d996b892c2904db72428d5b52e1938a4"}, +] + +[package.dependencies] +numpy = ">=1.23" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.11.1)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"] + [[package]] name = "coverage" version = "7.6.1" @@ -1028,6 +1156,70 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] +[[package]] +name = "cryptography" +version = "43.0.3" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = true +python-versions = ">=3.7" +files = [ + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "cycler" +version = "0.12.1" +description = "Composable style cycles" +optional = true +python-versions = ">=3.8" +files = [ + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, +] + +[package.extras] +docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] +tests = ["pytest", "pytest-cov", "pytest-xdist"] + [[package]] name = "datasets" version = "3.1.0" @@ -1164,6 +1356,43 @@ files = [ {file = "einops-0.8.1.tar.gz", hash = "sha256:de5d960a7a761225532e0f1959e5315ebeafc0cd43394732f103ca44b9837e84"}, ] +[[package]] +name = "etils" +version = "1.5.2" +description = "Collection of common python utils" +optional = true +python-versions = ">=3.9" +files = [ + {file = "etils-1.5.2-py3-none-any.whl", hash = "sha256:6dc882d355e1e98a5d1a148d6323679dc47c9a5792939b9de72615aa4737eb0b"}, + {file = "etils-1.5.2.tar.gz", hash = "sha256:ba6a3e1aff95c769130776aa176c11540637f5dd881f3b79172a5149b6b1c446"}, +] + +[package.dependencies] +fsspec = {version = "*", optional = true, markers = "extra == \"epath\""} +importlib_resources = {version = "*", optional = true, markers = "extra == \"epath\""} +typing_extensions = {version = "*", optional = true, markers = "extra == \"epy\""} +zipp = {version = "*", optional = true, markers = "extra == \"epath\""} + +[package.extras] +all = ["etils[array-types]", "etils[eapp]", "etils[ecolab]", "etils[edc]", "etils[enp]", "etils[epath-gcs]", "etils[epath-s3]", "etils[epath]", "etils[epy]", "etils[etqdm]", "etils[etree-dm]", "etils[etree-jax]", "etils[etree-tf]", "etils[etree]"] +array-types = ["etils[enp]"] +dev = ["chex", "dataclass_array", "optree", "pyink", "pylint (>=2.6.0)", "pytest", "pytest-subtests", "pytest-xdist", "torch"] +docs = ["etils[all,dev]", "sphinx-apitree[ext]"] +eapp = ["absl-py", "etils[epy]", "simple_parsing"] +ecolab = ["etils[enp]", "etils[epy]", "jupyter", "mediapy", "numpy", "packaging"] +edc = ["etils[epy]"] +enp = ["etils[epy]", "numpy"] +epath = ["etils[epy]", "fsspec", "importlib_resources", "typing_extensions", "zipp"] +epath-gcs = ["etils[epath]", "gcsfs"] +epath-s3 = ["etils[epath]", "s3fs"] +epy = ["typing_extensions"] +etqdm = ["absl-py", "etils[epy]", "tqdm"] +etree = ["etils[array-types]", "etils[enp]", "etils[epy]", "etils[etqdm]"] +etree-dm = ["dm-tree", "etils[etree]"] +etree-jax = ["etils[etree]", "jax[cpu]"] +etree-tf = ["etils[etree]", "tensorflow"] +lazy-imports = ["etils[ecolab]"] + [[package]] name = "eval-type-backport" version = "0.2.2" @@ -1286,6 +1515,86 @@ docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2. testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2)"] +[[package]] +name = "fonttools" +version = "4.60.2" +description = "Tools to manipulate font files" +optional = true +python-versions = ">=3.9" +files = [ + {file = "fonttools-4.60.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4e36fadcf7e8ca6e34d490eef86ed638d6fd9c55d2f514b05687622cfc4a7050"}, + {file = "fonttools-4.60.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6e500fc9c04bee749ceabfc20cb4903f6981c2139050d85720ea7ada61b75d5c"}, + {file = "fonttools-4.60.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22efea5e784e1d1cd8d7b856c198e360a979383ebc6dea4604743b56da1cbc34"}, + {file = "fonttools-4.60.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:677aa92d84d335e4d301d8ba04afca6f575316bc647b6782cb0921943fcb6343"}, + {file = "fonttools-4.60.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:edd49d3defbf35476e78b61ff737ff5efea811acff68d44233a95a5a48252334"}, + {file = "fonttools-4.60.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:126839492b69cecc5baf2bddcde60caab2ffafd867bbae2a88463fce6078ca3a"}, + {file = "fonttools-4.60.2-cp310-cp310-win32.whl", hash = "sha256:ffcab6f5537136046ca902ed2491ab081ba271b07591b916289b7c27ff845f96"}, + {file = "fonttools-4.60.2-cp310-cp310-win_amd64.whl", hash = "sha256:9c68b287c7ffcd29dd83b5f961004b2a54a862a88825d52ea219c6220309ba45"}, + {file = "fonttools-4.60.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a2aed0a7931401b3875265717a24c726f87ecfedbb7b3426c2ca4d2812e281ae"}, + {file = "fonttools-4.60.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dea6868e9d2b816c9076cfea77754686f3c19149873bdbc5acde437631c15df1"}, + {file = "fonttools-4.60.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2fa27f34950aa1fe0f0b1abe25eed04770a3b3b34ad94e5ace82cc341589678a"}, + {file = "fonttools-4.60.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:13a53d479d187b09bfaa4a35ffcbc334fc494ff355f0a587386099cb66674f1e"}, + {file = "fonttools-4.60.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fac5e921d3bd0ca3bb8517dced2784f0742bc8ca28579a68b139f04ea323a779"}, + {file = "fonttools-4.60.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:648f4f9186fd7f1f3cd57dbf00d67a583720d5011feca67a5e88b3a491952cfb"}, + {file = "fonttools-4.60.2-cp311-cp311-win32.whl", hash = "sha256:3274e15fad871bead5453d5ce02658f6d0c7bc7e7021e2a5b8b04e2f9e40da1a"}, + {file = "fonttools-4.60.2-cp311-cp311-win_amd64.whl", hash = "sha256:91d058d5a483a1525b367803abb69de0923fbd45e1f82ebd000f5c8aa65bc78e"}, + {file = "fonttools-4.60.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e0164b7609d2b5c5dd4e044b8085b7bd7ca7363ef8c269a4ab5b5d4885a426b2"}, + {file = "fonttools-4.60.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1dd3d9574fc595c1e97faccae0f264dc88784ddf7fbf54c939528378bacc0033"}, + {file = "fonttools-4.60.2-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:98d0719f1b11c2817307d2da2e94296a3b2a3503f8d6252a101dca3ee663b917"}, + {file = "fonttools-4.60.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9d3ea26957dd07209f207b4fff64c702efe5496de153a54d3b91007ec28904dd"}, + {file = "fonttools-4.60.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ee301273b0850f3a515299f212898f37421f42ff9adfc341702582ca5073c13"}, + {file = "fonttools-4.60.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c6eb4694cc3b9c03b7c01d65a9cf35b577f21aa6abdbeeb08d3114b842a58153"}, + {file = "fonttools-4.60.2-cp312-cp312-win32.whl", hash = "sha256:57f07b616c69c244cc1a5a51072eeef07dddda5ebef9ca5c6e9cf6d59ae65b70"}, + {file = "fonttools-4.60.2-cp312-cp312-win_amd64.whl", hash = "sha256:310035802392f1fe5a7cf43d76f6ff4a24c919e4c72c0352e7b8176e2584b8a0"}, + {file = "fonttools-4.60.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2bb5fd231e56ccd7403212636dcccffc96c5ae0d6f9e4721fa0a32cb2e3ca432"}, + {file = "fonttools-4.60.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:536b5fab7b6fec78ccf59b5c59489189d9d0a8b0d3a77ed1858be59afb096696"}, + {file = "fonttools-4.60.2-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6b9288fc38252ac86a9570f19313ecbc9ff678982e0f27c757a85f1f284d3400"}, + {file = "fonttools-4.60.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93fcb420791d839ef592eada2b69997c445d0ce9c969b5190f2e16828ec10607"}, + {file = "fonttools-4.60.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7916a381b094db4052ac284255186aebf74c5440248b78860cb41e300036f598"}, + {file = "fonttools-4.60.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:58c8c393d5e16b15662cfc2d988491940458aa87894c662154f50c7b49440bef"}, + {file = "fonttools-4.60.2-cp313-cp313-win32.whl", hash = "sha256:19c6e0afd8b02008caa0aa08ab896dfce5d0bcb510c49b2c499541d5cb95a963"}, + {file = "fonttools-4.60.2-cp313-cp313-win_amd64.whl", hash = "sha256:6a500dc59e11b2338c2dba1f8cf11a4ae8be35ec24af8b2628b8759a61457b76"}, + {file = "fonttools-4.60.2-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:9387c532acbe323bbf2a920f132bce3c408a609d5f9dcfc6532fbc7e37f8ccbb"}, + {file = "fonttools-4.60.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:e6f1c824185b5b8fb681297f315f26ae55abb0d560c2579242feea8236b1cfef"}, + {file = "fonttools-4.60.2-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:55a3129d1e4030b1a30260f1b32fe76781b585fb2111d04a988e141c09eb6403"}, + {file = "fonttools-4.60.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b196e63753abc33b3b97a6fd6de4b7c4fef5552c0a5ba5e562be214d1e9668e0"}, + {file = "fonttools-4.60.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:de76c8d740fb55745f3b154f0470c56db92ae3be27af8ad6c2e88f1458260c9a"}, + {file = "fonttools-4.60.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6ba6303225c95998c9fda2d410aa792c3d2c1390a09df58d194b03e17583fa25"}, + {file = "fonttools-4.60.2-cp314-cp314-win32.whl", hash = "sha256:0a89728ce10d7c816fedaa5380c06d2793e7a8a634d7ce16810e536c22047384"}, + {file = "fonttools-4.60.2-cp314-cp314-win_amd64.whl", hash = "sha256:fa8446e6ab8bd778b82cb1077058a2addba86f30de27ab9cc18ed32b34bc8667"}, + {file = "fonttools-4.60.2-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:4063bc81ac5a4137642865cb63dd270e37b3cd1f55a07c0d6e41d072699ccca2"}, + {file = "fonttools-4.60.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:ebfdb66fa69732ed604ab8e2a0431e6deff35e933a11d73418cbc7823d03b8e1"}, + {file = "fonttools-4.60.2-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50b10b3b1a72d1d54c61b0e59239e1a94c0958f4a06a1febf97ce75388dd91a4"}, + {file = "fonttools-4.60.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:beae16891a13b4a2ddec9b39b4de76092a3025e4d1c82362e3042b62295d5e4d"}, + {file = "fonttools-4.60.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:522f017fdb3766fd5d2d321774ef351cc6ce88ad4e6ac9efe643e4a2b9d528db"}, + {file = "fonttools-4.60.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:82cceceaf9c09a965a75b84a4b240dd3768e596ffb65ef53852681606fe7c9ba"}, + {file = "fonttools-4.60.2-cp314-cp314t-win32.whl", hash = "sha256:bbfbc918a75437fe7e6d64d1b1e1f713237df1cf00f3a36dedae910b2ba01cee"}, + {file = "fonttools-4.60.2-cp314-cp314t-win_amd64.whl", hash = "sha256:0e5cd9b0830f6550d58c84f3ab151a9892b50c4f9d538c5603c0ce6fff2eb3f1"}, + {file = "fonttools-4.60.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a3c75b8b42f7f93906bdba9eb1197bb76aecbe9a0a7cf6feec75f7605b5e8008"}, + {file = "fonttools-4.60.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0f86c8c37bc0ec0b9c141d5e90c717ff614e93c187f06d80f18c7057097f71bc"}, + {file = "fonttools-4.60.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fe905403fe59683b0e9a45f234af2866834376b8821f34633b1c76fb731b6311"}, + {file = "fonttools-4.60.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38ce703b60a906e421e12d9e3a7f064883f5e61bb23e8961f4be33cfe578500b"}, + {file = "fonttools-4.60.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9e810c06f3e79185cecf120e58b343ea5a89b54dd695fd644446bcf8c026da5e"}, + {file = "fonttools-4.60.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:38faec8cc1d12122599814d15a402183f5123fb7608dac956121e7c6742aebc5"}, + {file = "fonttools-4.60.2-cp39-cp39-win32.whl", hash = "sha256:80a45cf7bf659acb7b36578f300231873daba67bd3ca8cce181c73f861f14a37"}, + {file = "fonttools-4.60.2-cp39-cp39-win_amd64.whl", hash = "sha256:c355d5972071938e1b1e0f5a1df001f68ecf1a62f34a3407dc8e0beccf052501"}, + {file = "fonttools-4.60.2-py3-none-any.whl", hash = "sha256:73cf92eeda67cf6ff10c8af56fc8f4f07c1647d989a979be9e388a49be26552a"}, + {file = "fonttools-4.60.2.tar.gz", hash = "sha256:d29552e6b155ebfc685b0aecf8d429cb76c14ab734c22ef5d3dea6fdf800c92c"}, +] + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.45.0)", "unicodedata2 (>=17.0.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "pycairo", "scipy"] +lxml = ["lxml (>=4.0)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.45.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +unicode = ["unicodedata2 (>=17.0.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] + [[package]] name = "fqdn" version = "1.5.1" @@ -1489,6 +1798,156 @@ gitdb = ">=4.0.1,<5" doc = ["sphinx (>=7.1.2,<7.2)", "sphinx-autodoc-typehints", "sphinx_rtd_theme"] test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] +[[package]] +name = "google-api-core" +version = "2.25.2" +description = "Google API client core library" +optional = true +python-versions = ">=3.7" +files = [ + {file = "google_api_core-2.25.2-py3-none-any.whl", hash = "sha256:e9a8f62d363dc8424a8497f4c2a47d6bcda6c16514c935629c257ab5d10210e7"}, + {file = "google_api_core-2.25.2.tar.gz", hash = "sha256:1c63aa6af0d0d5e37966f157a77f9396d820fba59f9e43e9415bc3dc5baff300"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.0" +googleapis-common-protos = ">=1.56.2,<2.0.0" +grpcio = {version = ">=1.49.1,<2.0.0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""} +grpcio-status = {version = ">=1.49.1,<2.0.0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""} +proto-plus = {version = ">=1.25.0,<2.0.0", markers = "python_version >= \"3.13\""} +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" +requests = ">=2.18.0,<3.0.0" + +[package.extras] +async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.0)"] +grpc = ["grpcio (>=1.33.2,<2.0.0)", "grpcio (>=1.49.1,<2.0.0)", "grpcio-status (>=1.33.2,<2.0.0)", "grpcio-status (>=1.49.1,<2.0.0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"] + +[[package]] +name = "google-api-core" +version = "2.29.0" +description = "Google API client core library" +optional = true +python-versions = ">=3.7" +files = [ + {file = "google_api_core-2.29.0-py3-none-any.whl", hash = "sha256:d30bc60980daa36e314b5d5a3e5958b0200cb44ca8fa1be2b614e932b75a3ea9"}, + {file = "google_api_core-2.29.0.tar.gz", hash = "sha256:84181be0f8e6b04006df75ddfe728f24489f0af57c96a529ff7cf45bc28797f7"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.0" +googleapis-common-protos = ">=1.56.2,<2.0.0" +grpcio = [ + {version = ">=1.33.2,<2.0.0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0.0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\" and python_version < \"3.14\""}, +] +grpcio-status = [ + {version = ">=1.33.2,<2.0.0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0.0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\" and python_version < \"3.14\""}, +] +proto-plus = [ + {version = ">=1.22.3,<2.0.0", markers = "python_version < \"3.13\""}, + {version = ">=1.25.0,<2.0.0", markers = "python_version >= \"3.13\""}, +] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" +requests = ">=2.18.0,<3.0.0" + +[package.extras] +async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.0)"] +grpc = ["grpcio (>=1.33.2,<2.0.0)", "grpcio (>=1.49.1,<2.0.0)", "grpcio (>=1.75.1,<2.0.0)", "grpcio-status (>=1.33.2,<2.0.0)", "grpcio-status (>=1.49.1,<2.0.0)", "grpcio-status (>=1.75.1,<2.0.0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"] + +[[package]] +name = "google-auth" +version = "2.48.0" +description = "Google Authentication Library" +optional = true +python-versions = ">=3.8" +files = [ + {file = "google_auth-2.48.0-py3-none-any.whl", hash = "sha256:2e2a537873d449434252a9632c28bfc268b0adb1e53f9fb62afc5333a975903f"}, + {file = "google_auth-2.48.0.tar.gz", hash = "sha256:4f7e706b0cd3208a3d940a19a822c37a476ddba5450156c3e6624a71f7c841ce"}, +] + +[package.dependencies] +cryptography = ">=38.0.3" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0)", "requests (>=2.20.0,<3.0.0)"] +cryptography = ["cryptography (>=38.0.3)"] +enterprise-cert = ["pyopenssl"] +pyjwt = ["pyjwt (>=2.0)"] +pyopenssl = ["pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0)"] +testing = ["aiohttp (<3.10.0)", "aiohttp (>=3.6.2,<4.0.0)", "aioresponses", "flask", "freezegun", "grpcio", "oauth2client", "packaging", "pyjwt (>=2.0)", "pyopenssl (<24.3.0)", "pyopenssl (>=20.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-localserver", "pyu2f (>=0.1.5)", "requests (>=2.20.0,<3.0.0)", "responses", "urllib3"] +urllib3 = ["packaging", "urllib3"] + +[[package]] +name = "google-cloud-core" +version = "2.5.0" +description = "Google Cloud API client core library" +optional = true +python-versions = ">=3.7" +files = [ + {file = "google_cloud_core-2.5.0-py3-none-any.whl", hash = "sha256:67d977b41ae6c7211ee830c7912e41003ea8194bff15ae7d72fd6f51e57acabc"}, + {file = "google_cloud_core-2.5.0.tar.gz", hash = "sha256:7c1b7ef5c92311717bd05301aa1a91ffbc565673d3b0b4163a52d8413a186963"}, +] + +[package.dependencies] +google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0" +google-auth = ">=1.25.0,<3.0.0" + +[package.extras] +grpc = ["grpcio (>=1.38.0,<2.0.0)", "grpcio (>=1.75.1,<2.0.0)", "grpcio-status (>=1.38.0,<2.0.0)"] + +[[package]] +name = "google-cloud-translate" +version = "3.24.0" +description = "Google Cloud Translate API client library" +optional = true +python-versions = ">=3.7" +files = [ + {file = "google_cloud_translate-3.24.0-py3-none-any.whl", hash = "sha256:a4000f01ab51ff790913c3f40425e118e2632e7cd1589ae0401d19e6b355aedb"}, + {file = "google_cloud_translate-3.24.0.tar.gz", hash = "sha256:2f3b8b90f8cdaf63a435d18e63b21c3650de31fc4f858623f2d0d69be0cd3e9a"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0" +google-cloud-core = ">=1.4.4,<3.0.0" +grpc-google-iam-v1 = ">=0.14.0,<1.0.0" +grpcio = [ + {version = ">=1.33.2,<2.0.0", markers = "python_version < \"3.14\""}, + {version = ">=1.75.1,<2.0.0", markers = "python_version >= \"3.14\""}, +] +proto-plus = [ + {version = ">=1.22.3,<2.0.0", markers = "python_version < \"3.13\""}, + {version = ">=1.25.0,<2.0.0", markers = "python_version >= \"3.13\""}, +] +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + +[[package]] +name = "googleapis-common-protos" +version = "1.72.0" +description = "Common protobufs used in Google APIs" +optional = true +python-versions = ">=3.7" +files = [ + {file = "googleapis_common_protos-1.72.0-py3-none-any.whl", hash = "sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038"}, + {file = "googleapis_common_protos-1.72.0.tar.gz", hash = "sha256:e55a601c1b32b52d7a3e65f43563e2aa61bcd737998ee672ac9b951cd49319f5"}, +] + +[package.dependencies] +grpcio = {version = ">=1.44.0,<2.0.0", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0)"] + [[package]] name = "gradio" version = "5.38.2" @@ -1569,6 +2028,114 @@ files = [ [package.extras] dev = ["pytest", "ruff (==0.9.3)"] +[[package]] +name = "grpc-google-iam-v1" +version = "0.14.3" +description = "IAM API client library" +optional = true +python-versions = ">=3.7" +files = [ + {file = "grpc_google_iam_v1-0.14.3-py3-none-any.whl", hash = "sha256:7a7f697e017a067206a3dfef44e4c634a34d3dee135fe7d7a4613fe3e59217e6"}, + {file = "grpc_google_iam_v1-0.14.3.tar.gz", hash = "sha256:879ac4ef33136c5491a6300e27575a9ec760f6cdf9a2518798c1b8977a5dc389"}, +] + +[package.dependencies] +googleapis-common-protos = {version = ">=1.56.0,<2.0.0", extras = ["grpc"]} +grpcio = ">=1.44.0,<2.0.0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + +[[package]] +name = "grpcio" +version = "1.78.0" +description = "HTTP/2-based RPC framework" +optional = true +python-versions = ">=3.9" +files = [ + {file = "grpcio-1.78.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:7cc47943d524ee0096f973e1081cb8f4f17a4615f2116882a5f1416e4cfe92b5"}, + {file = "grpcio-1.78.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:c3f293fdc675ccba4db5a561048cca627b5e7bd1c8a6973ffedabe7d116e22e2"}, + {file = "grpcio-1.78.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:10a9a644b5dd5aec3b82b5b0b90d41c0fa94c85ef42cb42cf78a23291ddb5e7d"}, + {file = "grpcio-1.78.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4c5533d03a6cbd7f56acfc9cfb44ea64f63d29091e40e44010d34178d392d7eb"}, + {file = "grpcio-1.78.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ff870aebe9a93a85283837801d35cd5f8814fe2ad01e606861a7fb47c762a2b7"}, + {file = "grpcio-1.78.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:391e93548644e6b2726f1bb84ed60048d4bcc424ce5e4af0843d28ca0b754fec"}, + {file = "grpcio-1.78.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:df2c8f3141f7cbd112a6ebbd760290b5849cda01884554f7c67acc14e7b1758a"}, + {file = "grpcio-1.78.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd8cb8026e5f5b50498a3c4f196f57f9db344dad829ffae16b82e4fdbaea2813"}, + {file = "grpcio-1.78.0-cp310-cp310-win32.whl", hash = "sha256:f8dff3d9777e5d2703a962ee5c286c239bf0ba173877cc68dc02c17d042e29de"}, + {file = "grpcio-1.78.0-cp310-cp310-win_amd64.whl", hash = "sha256:94f95cf5d532d0e717eed4fc1810e8e6eded04621342ec54c89a7c2f14b581bf"}, + {file = "grpcio-1.78.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2777b783f6c13b92bd7b716667452c329eefd646bfb3f2e9dabea2e05dbd34f6"}, + {file = "grpcio-1.78.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:9dca934f24c732750389ce49d638069c3892ad065df86cb465b3fa3012b70c9e"}, + {file = "grpcio-1.78.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:459ab414b35f4496138d0ecd735fed26f1318af5e52cb1efbc82a09f0d5aa911"}, + {file = "grpcio-1.78.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:082653eecbdf290e6e3e2c276ab2c54b9e7c299e07f4221872380312d8cf395e"}, + {file = "grpcio-1.78.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85f93781028ec63f383f6bc90db785a016319c561cc11151fbb7b34e0d012303"}, + {file = "grpcio-1.78.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f12857d24d98441af6a1d5c87442d624411db486f7ba12550b07788f74b67b04"}, + {file = "grpcio-1.78.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5397fff416b79e4b284959642a4e95ac4b0f1ece82c9993658e0e477d40551ec"}, + {file = "grpcio-1.78.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbe6e89c7ffb48518384068321621b2a69cab509f58e40e4399fdd378fa6d074"}, + {file = "grpcio-1.78.0-cp311-cp311-win32.whl", hash = "sha256:6092beabe1966a3229f599d7088b38dfc8ffa1608b5b5cdda31e591e6500f856"}, + {file = "grpcio-1.78.0-cp311-cp311-win_amd64.whl", hash = "sha256:1afa62af6e23f88629f2b29ec9e52ec7c65a7176c1e0a83292b93c76ca882558"}, + {file = "grpcio-1.78.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:f9ab915a267fc47c7e88c387a3a28325b58c898e23d4995f765728f4e3dedb97"}, + {file = "grpcio-1.78.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3f8904a8165ab21e07e58bf3e30a73f4dffc7a1e0dbc32d51c61b5360d26f43e"}, + {file = "grpcio-1.78.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:859b13906ce098c0b493af92142ad051bf64c7870fa58a123911c88606714996"}, + {file = "grpcio-1.78.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b2342d87af32790f934a79c3112641e7b27d63c261b8b4395350dad43eff1dc7"}, + {file = "grpcio-1.78.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:12a771591ae40bc65ba67048fa52ef4f0e6db8279e595fd349f9dfddeef571f9"}, + {file = "grpcio-1.78.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:185dea0d5260cbb2d224c507bf2a5444d5abbb1fa3594c1ed7e4c709d5eb8383"}, + {file = "grpcio-1.78.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:51b13f9aed9d59ee389ad666b8c2214cc87b5de258fa712f9ab05f922e3896c6"}, + {file = "grpcio-1.78.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fd5f135b1bd58ab088930b3c613455796dfa0393626a6972663ccdda5b4ac6ce"}, + {file = "grpcio-1.78.0-cp312-cp312-win32.whl", hash = "sha256:94309f498bcc07e5a7d16089ab984d42ad96af1d94b5a4eb966a266d9fcabf68"}, + {file = "grpcio-1.78.0-cp312-cp312-win_amd64.whl", hash = "sha256:9566fe4ababbb2610c39190791e5b829869351d14369603702e890ef3ad2d06e"}, + {file = "grpcio-1.78.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:ce3a90455492bf8bfa38e56fbbe1dbd4f872a3d8eeaf7337dc3b1c8aa28c271b"}, + {file = "grpcio-1.78.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:2bf5e2e163b356978b23652c4818ce4759d40f4712ee9ec5a83c4be6f8c23a3a"}, + {file = "grpcio-1.78.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8f2ac84905d12918e4e55a16da17939eb63e433dc11b677267c35568aa63fc84"}, + {file = "grpcio-1.78.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b58f37edab4a3881bc6c9bca52670610e0c9ca14e2ea3cf9debf185b870457fb"}, + {file = "grpcio-1.78.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:735e38e176a88ce41840c21bb49098ab66177c64c82426e24e0082500cc68af5"}, + {file = "grpcio-1.78.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2045397e63a7a0ee7957c25f7dbb36ddc110e0cfb418403d110c0a7a68a844e9"}, + {file = "grpcio-1.78.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9f136fbafe7ccf4ac7e8e0c28b31066e810be52d6e344ef954a3a70234e1702"}, + {file = "grpcio-1.78.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:748b6138585379c737adc08aeffd21222abbda1a86a0dca2a39682feb9196c20"}, + {file = "grpcio-1.78.0-cp313-cp313-win32.whl", hash = "sha256:271c73e6e5676afe4fc52907686670c7cea22ab2310b76a59b678403ed40d670"}, + {file = "grpcio-1.78.0-cp313-cp313-win_amd64.whl", hash = "sha256:f2d4e43ee362adfc05994ed479334d5a451ab7bc3f3fee1b796b8ca66895acb4"}, + {file = "grpcio-1.78.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:e87cbc002b6f440482b3519e36e1313eb5443e9e9e73d6a52d43bd2004fcfd8e"}, + {file = "grpcio-1.78.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:c41bc64626db62e72afec66b0c8a0da76491510015417c127bfc53b2fe6d7f7f"}, + {file = "grpcio-1.78.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8dfffba826efcf366b1e3ccc37e67afe676f290e13a3b48d31a46739f80a8724"}, + {file = "grpcio-1.78.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:74be1268d1439eaaf552c698cdb11cd594f0c49295ae6bb72c34ee31abbe611b"}, + {file = "grpcio-1.78.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be63c88b32e6c0f1429f1398ca5c09bc64b0d80950c8bb7807d7d7fb36fb84c7"}, + {file = "grpcio-1.78.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:3c586ac70e855c721bda8f548d38c3ca66ac791dc49b66a8281a1f99db85e452"}, + {file = "grpcio-1.78.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:35eb275bf1751d2ffbd8f57cdbc46058e857cf3971041521b78b7db94bdaf127"}, + {file = "grpcio-1.78.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:207db540302c884b8848036b80db352a832b99dfdf41db1eb554c2c2c7800f65"}, + {file = "grpcio-1.78.0-cp314-cp314-win32.whl", hash = "sha256:57bab6deef2f4f1ca76cc04565df38dc5713ae6c17de690721bdf30cb1e0545c"}, + {file = "grpcio-1.78.0-cp314-cp314-win_amd64.whl", hash = "sha256:dce09d6116df20a96acfdbf85e4866258c3758180e8c49845d6ba8248b6d0bbb"}, + {file = "grpcio-1.78.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:86f85dd7c947baa707078a236288a289044836d4b640962018ceb9cd1f899af5"}, + {file = "grpcio-1.78.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:de8cb00d1483a412a06394b8303feec5dcb3b55f81d83aa216dbb6a0b86a94f5"}, + {file = "grpcio-1.78.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e888474dee2f59ff68130f8a397792d8cb8e17e6b3434339657ba4ee90845a8c"}, + {file = "grpcio-1.78.0-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:86ce2371bfd7f212cf60d8517e5e854475c2c43ce14aa910e136ace72c6db6c1"}, + {file = "grpcio-1.78.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b0c689c02947d636bc7fab3e30cc3a3445cca99c834dfb77cd4a6cabfc1c5597"}, + {file = "grpcio-1.78.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ce7599575eeb25c0f4dc1be59cada6219f3b56176f799627f44088b21381a28a"}, + {file = "grpcio-1.78.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:684083fd383e9dc04c794adb838d4faea08b291ce81f64ecd08e4577c7398adf"}, + {file = "grpcio-1.78.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ab399ef5e3cd2a721b1038a0f3021001f19c5ab279f145e1146bb0b9f1b2b12c"}, + {file = "grpcio-1.78.0-cp39-cp39-win32.whl", hash = "sha256:f3d6379493e18ad4d39537a82371c5281e153e963cecb13f953ebac155756525"}, + {file = "grpcio-1.78.0-cp39-cp39-win_amd64.whl", hash = "sha256:5361a0630a7fdb58a6a97638ab70e1dae2893c4d08d7aba64ded28bb9e7a29df"}, + {file = "grpcio-1.78.0.tar.gz", hash = "sha256:7382b95189546f375c174f53a5fa873cef91c4b8005faa05cc5b3beea9c4f1c5"}, +] + +[package.dependencies] +typing-extensions = ">=4.12,<5.0" + +[package.extras] +protobuf = ["grpcio-tools (>=1.78.0)"] + +[[package]] +name = "grpcio-status" +version = "1.71.2" +description = "Status proto mapping for gRPC" +optional = true +python-versions = ">=3.9" +files = [ + {file = "grpcio_status-1.71.2-py3-none-any.whl", hash = "sha256:803c98cb6a8b7dc6dbb785b1111aed739f241ab5e9da0bba96888aa74704cfd3"}, + {file = "grpcio_status-1.71.2.tar.gz", hash = "sha256:c7a97e176df71cdc2c179cd1847d7fc86cca5832ad12e9798d7fed6b7a1aab50"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.5.5" +grpcio = ">=1.71.2" +protobuf = ">=5.26.1,<6.0dev" + [[package]] name = "h11" version = "0.16.0" @@ -1697,6 +2264,39 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] +[[package]] +name = "imageio" +version = "2.37.2" +description = "Read and write images and video across all major formats. Supports scientific and volumetric data." +optional = true +python-versions = ">=3.9" +files = [ + {file = "imageio-2.37.2-py3-none-any.whl", hash = "sha256:ad9adfb20335d718c03de457358ed69f141021a333c40a53e57273d8a5bd0b9b"}, + {file = "imageio-2.37.2.tar.gz", hash = "sha256:0212ef2727ac9caa5ca4b2c75ae89454312f440a756fcfc8ef1993e718f50f8a"}, +] + +[package.dependencies] +numpy = "*" +pillow = ">=8.3.2" + +[package.extras] +all-plugins = ["astropy", "av", "fsspec[http]", "imageio-ffmpeg", "numpy (>2)", "pillow-heif", "psutil", "rawpy", "tifffile"] +all-plugins-pypy = ["fsspec[http]", "imageio-ffmpeg", "pillow-heif", "psutil", "tifffile"] +dev = ["black", "flake8", "fsspec[github]", "pytest", "pytest-cov"] +docs = ["numpydoc", "pydata-sphinx-theme", "sphinx (<6)"] +ffmpeg = ["imageio-ffmpeg", "psutil"] +fits = ["astropy"] +freeimage = ["fsspec[http]"] +full = ["astropy", "av", "black", "flake8", "fsspec[github,http]", "imageio-ffmpeg", "numpy (>2)", "numpydoc", "pillow-heif", "psutil", "pydata-sphinx-theme", "pytest", "pytest-cov", "rawpy", "sphinx (<6)", "tifffile"] +gdal = ["gdal"] +itk = ["itk"] +linting = ["black", "flake8"] +pillow-heif = ["pillow-heif"] +pyav = ["av"] +rawpy = ["numpy (>2)", "rawpy"] +test = ["fsspec[github]", "pytest", "pytest-cov"] +tifffile = ["tifffile"] + [[package]] name = "imagesize" version = "1.4.1" @@ -1939,6 +2539,17 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "joblib" +version = "1.5.3" +description = "Lightweight pipelining with Python functions" +optional = true +python-versions = ">=3.9" +files = [ + {file = "joblib-1.5.3-py3-none-any.whl", hash = "sha256:5fc3c5039fc5ca8c0276333a188bbd59d6b7ab37fe6632daa76bc7f9ec18e713"}, + {file = "joblib-1.5.3.tar.gz", hash = "sha256:8561a3269e6801106863fd0d6d84bb737be9e7631e33aaed3fb9ce5953688da3"}, +] + [[package]] name = "json5" version = "0.12.0" @@ -2275,6 +2886,254 @@ files = [ {file = "jupyterlab_widgets-3.0.15.tar.gz", hash = "sha256:2920888a0c2922351a9202817957a68c07d99673504d6cd37345299e971bb08b"}, ] +[[package]] +name = "kiwisolver" +version = "1.4.7" +description = "A fast implementation of the Cassowary constraint solver" +optional = true +python-versions = ">=3.8" +files = [ + {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8a9c83f75223d5e48b0bc9cb1bf2776cf01563e00ade8775ffe13b0b6e1af3a6"}, + {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58370b1ffbd35407444d57057b57da5d6549d2d854fa30249771775c63b5fe17"}, + {file = "kiwisolver-1.4.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa0abdf853e09aff551db11fce173e2177d00786c688203f52c87ad7fcd91ef9"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8d53103597a252fb3ab8b5845af04c7a26d5e7ea8122303dd7a021176a87e8b9"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:88f17c5ffa8e9462fb79f62746428dd57b46eb931698e42e990ad63103f35e6c"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a9ca9c710d598fd75ee5de59d5bda2684d9db36a9f50b6125eaea3969c2599"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f4d742cb7af1c28303a51b7a27aaee540e71bb8e24f68c736f6f2ffc82f2bf05"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28c7fea2196bf4c2f8d46a0415c77a1c480cc0724722f23d7410ffe9842c407"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e968b84db54f9d42046cf154e02911e39c0435c9801681e3fc9ce8a3c4130278"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0c18ec74c0472de033e1bebb2911c3c310eef5649133dd0bedf2a169a1b269e5"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8f0ea6da6d393d8b2e187e6a5e3fb81f5862010a40c3945e2c6d12ae45cfb2ad"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:f106407dda69ae456dd1227966bf445b157ccc80ba0dff3802bb63f30b74e895"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:84ec80df401cfee1457063732d90022f93951944b5b58975d34ab56bb150dfb3"}, + {file = "kiwisolver-1.4.7-cp310-cp310-win32.whl", hash = "sha256:71bb308552200fb2c195e35ef05de12f0c878c07fc91c270eb3d6e41698c3bcc"}, + {file = "kiwisolver-1.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:44756f9fd339de0fb6ee4f8c1696cfd19b2422e0d70b4cefc1cc7f1f64045a8c"}, + {file = "kiwisolver-1.4.7-cp310-cp310-win_arm64.whl", hash = "sha256:78a42513018c41c2ffd262eb676442315cbfe3c44eed82385c2ed043bc63210a"}, + {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d2b0e12a42fb4e72d509fc994713d099cbb15ebf1103545e8a45f14da2dfca54"}, + {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2a8781ac3edc42ea4b90bc23e7d37b665d89423818e26eb6df90698aa2287c95"}, + {file = "kiwisolver-1.4.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:46707a10836894b559e04b0fd143e343945c97fd170d69a2d26d640b4e297935"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef97b8df011141c9b0f6caf23b29379f87dd13183c978a30a3c546d2c47314cb"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab58c12a2cd0fc769089e6d38466c46d7f76aced0a1f54c77652446733d2d02"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:803b8e1459341c1bb56d1c5c010406d5edec8a0713a0945851290a7930679b51"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9a9e8a507420fe35992ee9ecb302dab68550dedc0da9e2880dd88071c5fb052"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18077b53dc3bb490e330669a99920c5e6a496889ae8c63b58fbc57c3d7f33a18"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6af936f79086a89b3680a280c47ea90b4df7047b5bdf3aa5c524bbedddb9e545"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3abc5b19d24af4b77d1598a585b8a719beb8569a71568b66f4ebe1fb0449460b"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:933d4de052939d90afbe6e9d5273ae05fb836cc86c15b686edd4b3560cc0ee36"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:65e720d2ab2b53f1f72fb5da5fb477455905ce2c88aaa671ff0a447c2c80e8e3"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3bf1ed55088f214ba6427484c59553123fdd9b218a42bbc8c6496d6754b1e523"}, + {file = "kiwisolver-1.4.7-cp311-cp311-win32.whl", hash = "sha256:4c00336b9dd5ad96d0a558fd18a8b6f711b7449acce4c157e7343ba92dd0cf3d"}, + {file = "kiwisolver-1.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:929e294c1ac1e9f615c62a4e4313ca1823ba37326c164ec720a803287c4c499b"}, + {file = "kiwisolver-1.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:e33e8fbd440c917106b237ef1a2f1449dfbb9b6f6e1ce17c94cd6a1e0d438376"}, + {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:5360cc32706dab3931f738d3079652d20982511f7c0ac5711483e6eab08efff2"}, + {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942216596dc64ddb25adb215c3c783215b23626f8d84e8eff8d6d45c3f29f75a"}, + {file = "kiwisolver-1.4.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:48b571ecd8bae15702e4f22d3ff6a0f13e54d3d00cd25216d5e7f658242065ee"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad42ba922c67c5f219097b28fae965e10045ddf145d2928bfac2eb2e17673640"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:612a10bdae23404a72941a0fc8fa2660c6ea1217c4ce0dbcab8a8f6543ea9e7f"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e838bba3a3bac0fe06d849d29772eb1afb9745a59710762e4ba3f4cb8424483"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:22f499f6157236c19f4bbbd472fa55b063db77a16cd74d49afe28992dff8c258"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693902d433cf585133699972b6d7c42a8b9f8f826ebcaf0132ff55200afc599e"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4e77f2126c3e0b0d055f44513ed349038ac180371ed9b52fe96a32aa071a5107"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:657a05857bda581c3656bfc3b20e353c232e9193eb167766ad2dc58b56504948"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4bfa75a048c056a411f9705856abfc872558e33c055d80af6a380e3658766038"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:34ea1de54beef1c104422d210c47c7d2a4999bdecf42c7b5718fbe59a4cac383"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:90da3b5f694b85231cf93586dad5e90e2d71b9428f9aad96952c99055582f520"}, + {file = "kiwisolver-1.4.7-cp312-cp312-win32.whl", hash = "sha256:18e0cca3e008e17fe9b164b55735a325140a5a35faad8de92dd80265cd5eb80b"}, + {file = "kiwisolver-1.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:58cb20602b18f86f83a5c87d3ee1c766a79c0d452f8def86d925e6c60fbf7bfb"}, + {file = "kiwisolver-1.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:f5a8b53bdc0b3961f8b6125e198617c40aeed638b387913bf1ce78afb1b0be2a"}, + {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2e6039dcbe79a8e0f044f1c39db1986a1b8071051efba3ee4d74f5b365f5226e"}, + {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a1ecf0ac1c518487d9d23b1cd7139a6a65bc460cd101ab01f1be82ecf09794b6"}, + {file = "kiwisolver-1.4.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7ab9ccab2b5bd5702ab0803676a580fffa2aa178c2badc5557a84cc943fcf750"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f816dd2277f8d63d79f9c8473a79fe54047bc0467754962840782c575522224d"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf8bcc23ceb5a1b624572a1623b9f79d2c3b337c8c455405ef231933a10da379"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dea0bf229319828467d7fca8c7c189780aa9ff679c94539eed7532ebe33ed37c"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c06a4c7cf15ec739ce0e5971b26c93638730090add60e183530d70848ebdd34"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:913983ad2deb14e66d83c28b632fd35ba2b825031f2fa4ca29675e665dfecbe1"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5337ec7809bcd0f424c6b705ecf97941c46279cf5ed92311782c7c9c2026f07f"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c26ed10c4f6fa6ddb329a5120ba3b6db349ca192ae211e882970bfc9d91420b"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c619b101e6de2222c1fcb0531e1b17bbffbe54294bfba43ea0d411d428618c27"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:073a36c8273647592ea332e816e75ef8da5c303236ec0167196793eb1e34657a"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3ce6b2b0231bda412463e152fc18335ba32faf4e8c23a754ad50ffa70e4091ee"}, + {file = "kiwisolver-1.4.7-cp313-cp313-win32.whl", hash = "sha256:f4c9aee212bc89d4e13f58be11a56cc8036cabad119259d12ace14b34476fd07"}, + {file = "kiwisolver-1.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:8a3ec5aa8e38fc4c8af308917ce12c536f1c88452ce554027e55b22cbbfbff76"}, + {file = "kiwisolver-1.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:76c8094ac20ec259471ac53e774623eb62e6e1f56cd8690c67ce6ce4fcb05650"}, + {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5d5abf8f8ec1f4e22882273c423e16cae834c36856cac348cfbfa68e01c40f3a"}, + {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aeb3531b196ef6f11776c21674dba836aeea9d5bd1cf630f869e3d90b16cfade"}, + {file = "kiwisolver-1.4.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7d755065e4e866a8086c9bdada157133ff466476a2ad7861828e17b6026e22c"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08471d4d86cbaec61f86b217dd938a83d85e03785f51121e791a6e6689a3be95"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7bbfcb7165ce3d54a3dfbe731e470f65739c4c1f85bb1018ee912bae139e263b"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d34eb8494bea691a1a450141ebb5385e4b69d38bb8403b5146ad279f4b30fa3"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9242795d174daa40105c1d86aba618e8eab7bf96ba8c3ee614da8302a9f95503"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a0f64a48bb81af7450e641e3fe0b0394d7381e342805479178b3d335d60ca7cf"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8e045731a5416357638d1700927529e2b8ab304811671f665b225f8bf8d8f933"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4322872d5772cae7369f8351da1edf255a604ea7087fe295411397d0cfd9655e"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e1631290ee9271dffe3062d2634c3ecac02c83890ada077d225e081aca8aab89"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:edcfc407e4eb17e037bca59be0e85a2031a2ac87e4fed26d3e9df88b4165f92d"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4d05d81ecb47d11e7f8932bd8b61b720bf0b41199358f3f5e36d38e28f0532c5"}, + {file = "kiwisolver-1.4.7-cp38-cp38-win32.whl", hash = "sha256:b38ac83d5f04b15e515fd86f312479d950d05ce2368d5413d46c088dda7de90a"}, + {file = "kiwisolver-1.4.7-cp38-cp38-win_amd64.whl", hash = "sha256:d83db7cde68459fc803052a55ace60bea2bae361fc3b7a6d5da07e11954e4b09"}, + {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f9362ecfca44c863569d3d3c033dbe8ba452ff8eed6f6b5806382741a1334bd"}, + {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8df2eb9b2bac43ef8b082e06f750350fbbaf2887534a5be97f6cf07b19d9583"}, + {file = "kiwisolver-1.4.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f32d6edbc638cde7652bd690c3e728b25332acbadd7cad670cc4a02558d9c417"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e2e6c39bd7b9372b0be21456caab138e8e69cc0fc1190a9dfa92bd45a1e6e904"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dda56c24d869b1193fcc763f1284b9126550eaf84b88bbc7256e15028f19188a"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79849239c39b5e1fd906556c474d9b0439ea6792b637511f3fe3a41158d89ca8"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e3bc157fed2a4c02ec468de4ecd12a6e22818d4f09cde2c31ee3226ffbefab2"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3da53da805b71e41053dc670f9a820d1157aae77b6b944e08024d17bcd51ef88"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8705f17dfeb43139a692298cb6637ee2e59c0194538153e83e9ee0c75c2eddde"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:82a5c2f4b87c26bb1a0ef3d16b5c4753434633b83d365cc0ddf2770c93829e3c"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce8be0466f4c0d585cdb6c1e2ed07232221df101a4c6f28821d2aa754ca2d9e2"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:409afdfe1e2e90e6ee7fc896f3df9a7fec8e793e58bfa0d052c8a82f99c37abb"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5b9c3f4ee0b9a439d2415012bd1b1cc2df59e4d6a9939f4d669241d30b414327"}, + {file = "kiwisolver-1.4.7-cp39-cp39-win32.whl", hash = "sha256:a79ae34384df2b615eefca647a2873842ac3b596418032bef9a7283675962644"}, + {file = "kiwisolver-1.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:cf0438b42121a66a3a667de17e779330fc0f20b0d97d59d2f2121e182b0505e4"}, + {file = "kiwisolver-1.4.7-cp39-cp39-win_arm64.whl", hash = "sha256:764202cc7e70f767dab49e8df52c7455e8de0df5d858fa801a11aa0d882ccf3f"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:94252291e3fe68001b1dd747b4c0b3be12582839b95ad4d1b641924d68fd4643"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b7dfa3b546da08a9f622bb6becdb14b3e24aaa30adba66749d38f3cc7ea9706"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3de6481f4ed8b734da5df134cd5a6a64fe32124fe83dde1e5b5f29fe30b1e6"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a91b5f9f1205845d488c928e8570dcb62b893372f63b8b6e98b863ebd2368ff2"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fa14dbd66b8b8f470d5fc79c089a66185619d31645f9b0773b88b19f7223c4"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:eb542fe7933aa09d8d8f9d9097ef37532a7df6497819d16efe4359890a2f417a"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bfa1acfa0c54932d5607e19a2c24646fb4c1ae2694437789129cf099789a3b00"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:eee3ea935c3d227d49b4eb85660ff631556841f6e567f0f7bda972df6c2c9935"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f3160309af4396e0ed04db259c3ccbfdc3621b5559b5453075e5de555e1f3a1b"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a17f6a29cf8935e587cc8a4dbfc8368c55edc645283db0ce9801016f83526c2d"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10849fb2c1ecbfae45a693c070e0320a91b35dd4bcf58172c023b994283a124d"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ac542bf38a8a4be2dc6b15248d36315ccc65f0743f7b1a76688ffb6b5129a5c2"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8b01aac285f91ca889c800042c35ad3b239e704b150cfd3382adfc9dcc780e39"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:48be928f59a1f5c8207154f935334d374e79f2b5d212826307d072595ad76a2e"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f37cfe618a117e50d8c240555331160d73d0411422b59b5ee217843d7b693608"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599b5c873c63a1f6ed7eead644a8a380cfbdf5db91dcb6f85707aaab213b1674"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:801fa7802e5cfabe3ab0c81a34c323a319b097dfb5004be950482d882f3d7225"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0c6c43471bc764fad4bc99c5c2d6d16a676b1abf844ca7c8702bdae92df01ee0"}, + {file = "kiwisolver-1.4.7.tar.gz", hash = "sha256:9893ff81bd7107f7b685d3017cc6583daadb4fc26e4a888350df530e41980a60"}, +] + +[[package]] +name = "lazy-loader" +version = "0.4" +description = "Makes it easy to load subpackages and functions on demand." +optional = true +python-versions = ">=3.7" +files = [ + {file = "lazy_loader-0.4-py3-none-any.whl", hash = "sha256:342aa8e14d543a154047afb4ba8ef17f5563baad3fc610d7b15b213b0f119efc"}, + {file = "lazy_loader-0.4.tar.gz", hash = "sha256:47c75182589b91a4e1a85a136c074285a5ad4d9f39c63e0d7fb76391c4574cd1"}, +] + +[package.dependencies] +packaging = "*" + +[package.extras] +dev = ["changelist (==0.5)"] +lint = ["pre-commit (==3.7.0)"] +test = ["pytest (>=7.4)", "pytest-cov (>=4.1)"] + +[[package]] +name = "levenshtein" +version = "0.27.1" +description = "Python extension for computing string edit distances and similarities." +optional = true +python-versions = ">=3.9" +files = [ + {file = "levenshtein-0.27.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13d6f617cb6fe63714c4794861cfaacd398db58a292f930edb7f12aad931dace"}, + {file = "levenshtein-0.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca9d54d41075e130c390e61360bec80f116b62d6ae973aec502e77e921e95334"}, + {file = "levenshtein-0.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de1f822b5c9a20d10411f779dfd7181ce3407261436f8470008a98276a9d07f"}, + {file = "levenshtein-0.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81270392c2e45d1a7e1b3047c3a272d5e28bb4f1eff0137637980064948929b7"}, + {file = "levenshtein-0.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d30c3ea23a94dddd56dbe323e1fa8a29ceb24da18e2daa8d0abf78b269a5ad1"}, + {file = "levenshtein-0.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3e0bea76695b9045bbf9ad5f67ad4cc01c11f783368f34760e068f19b6a6bc"}, + {file = "levenshtein-0.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdd190e468a68c31a5943368a5eaf4e130256a8707886d23ab5906a0cb98a43c"}, + {file = "levenshtein-0.27.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7c3121314bb4b676c011c33f6a0ebb462cfdcf378ff383e6f9e4cca5618d0ba7"}, + {file = "levenshtein-0.27.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f8ef378c873efcc5e978026b69b45342d841cd7a2f273447324f1c687cc4dc37"}, + {file = "levenshtein-0.27.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ff18d78c5c16bea20876425e1bf5af56c25918fb01bc0f2532db1317d4c0e157"}, + {file = "levenshtein-0.27.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:13412ff805afbfe619d070280d1a76eb4198c60c5445cd5478bd4c7055bb3d51"}, + {file = "levenshtein-0.27.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a2adb9f263557f7fb13e19eb2f34595d86929a44c250b2fca6e9b65971e51e20"}, + {file = "levenshtein-0.27.1-cp310-cp310-win32.whl", hash = "sha256:6278a33d2e0e909d8829b5a72191419c86dd3bb45b82399c7efc53dabe870c35"}, + {file = "levenshtein-0.27.1-cp310-cp310-win_amd64.whl", hash = "sha256:5b602b8428ee5dc88432a55c5303a739ee2be7c15175bd67c29476a9d942f48e"}, + {file = "levenshtein-0.27.1-cp310-cp310-win_arm64.whl", hash = "sha256:48334081fddaa0c259ba01ee898640a2cf8ede62e5f7e25fefece1c64d34837f"}, + {file = "levenshtein-0.27.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e6f1760108319a108dceb2f02bc7cdb78807ad1f9c673c95eaa1d0fe5dfcaae"}, + {file = "levenshtein-0.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c4ed8400d94ab348099395e050b8ed9dd6a5d6b5b9e75e78b2b3d0b5f5b10f38"}, + {file = "levenshtein-0.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7826efe51be8ff58bc44a633e022fdd4b9fc07396375a6dbc4945a3bffc7bf8f"}, + {file = "levenshtein-0.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff5afb78719659d353055863c7cb31599fbea6865c0890b2d840ee40214b3ddb"}, + {file = "levenshtein-0.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:201dafd5c004cd52018560cf3213da799534d130cf0e4db839b51f3f06771de0"}, + {file = "levenshtein-0.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5ddd59f3cfaec216811ee67544779d9e2d6ed33f79337492a248245d6379e3d"}, + {file = "levenshtein-0.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6afc241d27ecf5b921063b796812c55b0115423ca6fa4827aa4b1581643d0a65"}, + {file = "levenshtein-0.27.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ee2e766277cceb8ca9e584ea03b8dc064449ba588d3e24c1923e4b07576db574"}, + {file = "levenshtein-0.27.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:920b23d6109453913ce78ec451bc402ff19d020ee8be4722e9d11192ec2fac6f"}, + {file = "levenshtein-0.27.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:560d7edba126e2eea3ac3f2f12e7bd8bc9c6904089d12b5b23b6dfa98810b209"}, + {file = "levenshtein-0.27.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8d5362b6c7aa4896dc0cb1e7470a4ad3c06124e0af055dda30d81d3c5549346b"}, + {file = "levenshtein-0.27.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:65ba880815b0f80a80a293aeebac0fab8069d03ad2d6f967a886063458f9d7a1"}, + {file = "levenshtein-0.27.1-cp311-cp311-win32.whl", hash = "sha256:fcc08effe77fec0bc5b0f6f10ff20b9802b961c4a69047b5499f383119ddbe24"}, + {file = "levenshtein-0.27.1-cp311-cp311-win_amd64.whl", hash = "sha256:0ed402d8902be7df212ac598fc189f9b2d520817fdbc6a05e2ce44f7f3ef6857"}, + {file = "levenshtein-0.27.1-cp311-cp311-win_arm64.whl", hash = "sha256:7fdaab29af81a8eb981043737f42450efca64b9761ca29385487b29c506da5b5"}, + {file = "levenshtein-0.27.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:25fb540d8c55d1dc7bdc59b7de518ea5ed9df92eb2077e74bcb9bb6de7b06f69"}, + {file = "levenshtein-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f09cfab6387e9c908c7b37961c045e8e10eb9b7ec4a700367f8e080ee803a562"}, + {file = "levenshtein-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dafa29c0e616f322b574e0b2aeb5b1ff2f8d9a1a6550f22321f3bd9bb81036e3"}, + {file = "levenshtein-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be7a7642ea64392fa1e6ef7968c2e50ef2152c60948f95d0793361ed97cf8a6f"}, + {file = "levenshtein-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:060b48c45ed54bcea9582ce79c6365b20a1a7473767e0b3d6be712fa3a22929c"}, + {file = "levenshtein-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:712f562c5e64dd0398d3570fe99f8fbb88acec7cc431f101cb66c9d22d74c542"}, + {file = "levenshtein-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6141ad65cab49aa4527a3342d76c30c48adb2393b6cdfeca65caae8d25cb4b8"}, + {file = "levenshtein-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:799b8d73cda3265331116f62932f553804eae16c706ceb35aaf16fc2a704791b"}, + {file = "levenshtein-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ec99871d98e517e1cc4a15659c62d6ea63ee5a2d72c5ddbebd7bae8b9e2670c8"}, + {file = "levenshtein-0.27.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8799164e1f83588dbdde07f728ea80796ea72196ea23484d78d891470241b222"}, + {file = "levenshtein-0.27.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:583943813898326516ab451a83f734c6f07488cda5c361676150d3e3e8b47927"}, + {file = "levenshtein-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5bb22956af44bb4eade93546bf95be610c8939b9a9d4d28b2dfa94abf454fed7"}, + {file = "levenshtein-0.27.1-cp312-cp312-win32.whl", hash = "sha256:d9099ed1bcfa7ccc5540e8ad27b5dc6f23d16addcbe21fdd82af6440f4ed2b6d"}, + {file = "levenshtein-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:7f071ecdb50aa6c15fd8ae5bcb67e9da46ba1df7bba7c6bf6803a54c7a41fd96"}, + {file = "levenshtein-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:83b9033a984ccace7703f35b688f3907d55490182fd39b33a8e434d7b2e249e6"}, + {file = "levenshtein-0.27.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ab00c2cae2889166afb7e1af64af2d4e8c1b126f3902d13ef3740df00e54032d"}, + {file = "levenshtein-0.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c27e00bc7527e282f7c437817081df8da4eb7054e7ef9055b851fa3947896560"}, + {file = "levenshtein-0.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5b07de42bfc051136cc8e7f1e7ba2cb73666aa0429930f4218efabfdc5837ad"}, + {file = "levenshtein-0.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb11ad3c9dae3063405aa50d9c96923722ab17bb606c776b6817d70b51fd7e07"}, + {file = "levenshtein-0.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c5986fb46cb0c063305fd45b0a79924abf2959a6d984bbac2b511d3ab259f3f"}, + {file = "levenshtein-0.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75191e469269ddef2859bc64c4a8cfd6c9e063302766b5cb7e1e67f38cc7051a"}, + {file = "levenshtein-0.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51b3a7b2266933babc04e4d9821a495142eebd6ef709f90e24bc532b52b81385"}, + {file = "levenshtein-0.27.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbac509794afc3e2a9e73284c9e3d0aab5b1d928643f42b172969c3eefa1f2a3"}, + {file = "levenshtein-0.27.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8d68714785178347ecb272b94e85cbf7e638165895c4dd17ab57e7742d8872ec"}, + {file = "levenshtein-0.27.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:8ee74ee31a5ab8f61cd6c6c6e9ade4488dde1285f3c12207afc018393c9b8d14"}, + {file = "levenshtein-0.27.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f2441b6365453ec89640b85344afd3d602b0d9972840b693508074c613486ce7"}, + {file = "levenshtein-0.27.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a9be39640a46d8a0f9be729e641651d16a62b2c07d3f4468c36e1cc66b0183b9"}, + {file = "levenshtein-0.27.1-cp313-cp313-win32.whl", hash = "sha256:a520af67d976761eb6580e7c026a07eb8f74f910f17ce60e98d6e492a1f126c7"}, + {file = "levenshtein-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:7dd60aa49c2d8d23e0ef6452c8329029f5d092f386a177e3385d315cabb78f2a"}, + {file = "levenshtein-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:149cd4f0baf5884ac5df625b7b0d281721b15de00f447080e38f5188106e1167"}, + {file = "levenshtein-0.27.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c9231ac7c705a689f12f4fc70286fa698b9c9f06091fcb0daddb245e9259cbe"}, + {file = "levenshtein-0.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cf9ba080b1a8659d35c11dcfffc7f8c001028c2a3a7b7e6832348cdd60c53329"}, + {file = "levenshtein-0.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:164e3184385caca94ef7da49d373edd7fb52d4253bcc5bd5b780213dae307dfb"}, + {file = "levenshtein-0.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6024d67de6efbd32aaaafd964864c7fee0569b960556de326c3619d1eeb2ba4"}, + {file = "levenshtein-0.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6fbb234b3b04e04f7b3a2f678e24fd873c86c543d541e9df3ac9ec1cc809e732"}, + {file = "levenshtein-0.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffdd9056c7afb29aea00b85acdb93a3524e43852b934ebb9126c901506d7a1ed"}, + {file = "levenshtein-0.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1a0918243a313f481f4ba6a61f35767c1230395a187caeecf0be87a7c8f0624"}, + {file = "levenshtein-0.27.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c57655b20690ffa5168df7f4b7c6207c4ca917b700fb1b142a49749eb1cf37bb"}, + {file = "levenshtein-0.27.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:079cc78de05d3ded6cf1c5e2c3eadeb1232e12d49be7d5824d66c92b28c3555a"}, + {file = "levenshtein-0.27.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ac28c4ced134c0fe2941230ce4fd5c423aa66339e735321665fb9ae970f03a32"}, + {file = "levenshtein-0.27.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a2f7688355b22db27588f53c922b4583b8b627c83a8340191bbae1fbbc0f5f56"}, + {file = "levenshtein-0.27.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:654e8f016cb64ad27263d3364c6536e7644205f20d94748c8b94c586e3362a23"}, + {file = "levenshtein-0.27.1-cp39-cp39-win32.whl", hash = "sha256:145e6e8744643a3764fed9ab4ab9d3e2b8e5f05d2bcd0ad7df6f22f27a9fbcd4"}, + {file = "levenshtein-0.27.1-cp39-cp39-win_amd64.whl", hash = "sha256:612f0c90201c318dd113e7e97bd677e6e3e27eb740f242b7ae1a83f13c892b7e"}, + {file = "levenshtein-0.27.1-cp39-cp39-win_arm64.whl", hash = "sha256:cde09ec5b3cc84a6737113b47e45392b331c136a9e8a8ead8626f3eacae936f8"}, + {file = "levenshtein-0.27.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c92a222ab95b8d903eae6d5e7d51fe6c999be021b647715c18d04d0b0880f463"}, + {file = "levenshtein-0.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:71afc36b4ee950fa1140aff22ffda9e5e23280285858e1303260dbb2eabf342d"}, + {file = "levenshtein-0.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b1daeebfc148a571f09cfe18c16911ea1eaaa9e51065c5f7e7acbc4b866afa"}, + {file = "levenshtein-0.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:105edcb14797d95c77f69bad23104314715a64cafbf4b0e79d354a33d7b54d8d"}, + {file = "levenshtein-0.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d9c58fb1ef8bdc8773d705fbacf628e12c3bb63ee4d065dda18a76e86042444a"}, + {file = "levenshtein-0.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e52270591854af67217103955a36bd7436b57c801e3354e73ba44d689ed93697"}, + {file = "levenshtein-0.27.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:909b7b6bce27a4ec90576c9a9bd9af5a41308dfecf364b410e80b58038277bbe"}, + {file = "levenshtein-0.27.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d193a7f97b8c6a350e36ec58e41a627c06fa4157c3ce4b2b11d90cfc3c2ebb8f"}, + {file = "levenshtein-0.27.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:614be316e3c06118705fae1f717f9072d35108e5fd4e66a7dd0e80356135340b"}, + {file = "levenshtein-0.27.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31fc0a5bb070722bdabb6f7e14955a294a4a968c68202d294699817f21545d22"}, + {file = "levenshtein-0.27.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9415aa5257227af543be65768a80c7a75e266c3c818468ce6914812f88f9c3df"}, + {file = "levenshtein-0.27.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:7987ef006a3cf56a4532bd4c90c2d3b7b4ca9ad3bf8ae1ee5713c4a3bdfda913"}, + {file = "levenshtein-0.27.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e67750653459a8567b5bb10e56e7069b83428d42ff5f306be821ef033b92d1a8"}, + {file = "levenshtein-0.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:93344c2c3812f21fdc46bd9e57171684fc53dd107dae2f648d65ea6225d5ceaf"}, + {file = "levenshtein-0.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da4baef7e7460691006dd2ca6b9e371aecf135130f72fddfe1620ae740b68d94"}, + {file = "levenshtein-0.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8141c8e5bf2bd76ae214c348ba382045d7ed9d0e7ce060a36fc59c6af4b41d48"}, + {file = "levenshtein-0.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:773aa120be48c71e25c08d92a2108786e6537a24081049664463715926c76b86"}, + {file = "levenshtein-0.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f12a99138fb09eb5606ab9de61dd234dd82a7babba8f227b5dce0e3ae3a9eaf4"}, + {file = "levenshtein-0.27.1.tar.gz", hash = "sha256:3e18b73564cfc846eec94dd13fab6cb006b5d2e0cc56bad1fd7d5585881302e3"}, +] + +[package.dependencies] +rapidfuzz = ">=3.9.0,<4.0.0" + [[package]] name = "libcst" version = "1.1.0" @@ -2323,6 +3182,51 @@ typing-inspect = ">=0.4.0" [package.extras] dev = ["Sphinx (>=5.1.1)", "black (==23.9.1)", "build (>=0.10.0)", "coverage (>=4.5.4)", "fixit (==2.0.0.post1)", "flake8 (>=3.7.8,<5)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.1.2)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<0.16)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.18)", "setuptools-rust (>=1.5.2)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==2.2.0)", "usort (==1.0.7)"] +[[package]] +name = "lit-nlp" +version = "1.3.1" +description = "🔥LIT: The Learning Interpretability Tool" +optional = true +python-versions = ">=3.9" +files = [ + {file = "lit_nlp-1.3.1-py3-none-any.whl", hash = "sha256:a904081b1588c79a480acbc09fa64a30973821082e6d05c4c04c03409fffee29"}, + {file = "lit_nlp-1.3.1.tar.gz", hash = "sha256:7e9e5d7fcdf614106e96a3fb0c2ce1f2a34b4f225aeee589a267bbbb0fce84e2"}, +] + +[package.dependencies] +absl-py = ">=1.4.0" +annoy = ">=1.17.3" +attrs = ">=22.1.0" +etils = {version = ">=1.5.0", extras = ["epath"]} +filelock = ">=3.12.3" +google-cloud-translate = ">=3.11.1" +ipython = ">=7.34.0" +Levenshtein = ">=0.21.1" +matplotlib = ">=3.7.1" +ml-collections = ">=0.1.1" +numpy = ">=1.24.1,<2.0.0" +pandas = {version = ">=2.0.3", extras = ["output-formatting"]} +Pillow = ">=10.0.0" +portpicker = ">=1.5.2" +requests = ">=2.31.0" +rouge-score = ">=0.1.2" +sacrebleu = ">=2.3.1" +saliency = ">=0.1.3" +scikit-learn = ">=1.0.2" +scipy = ">=1.10.1" +shap = ">=0.42.0,<0.46.0" +six = ">=1.16.0" +termcolor = ">=2.3.0" +tqdm = ">=4.64.0" +werkzeug = ">=2.2.3" + +[package.extras] +examples-common = ["gunicorn (>=20.1.0)", "tensorflow (>=2.16.0)", "transformers (>=4.27.1)"] +examples-discriminative-ai = ["lit-nlp[examples-common]", "tensorflow-datasets (>=4.9.0)", "tf-keras (>=2.16)"] +examples-generative-ai = ["google-cloud-aiplatform (>=1.60.0)", "keras (>=3.0.0)", "keras-nlp (>=0.14.0)", "lit-nlp[examples-common]", "sentencepiece (==0.1.99)", "tensorflow-text (>=2.16.0)", "torch (>=2.0.0)", "vertexai (>=1.49.0)"] +test = ["lime (==0.2.0.1)", "lit-nlp[examples-discriminative-ai]", "lit-nlp[examples-generative-ai]", "lit-nlp[umap]", "pytest (>=7.4.0,<8.0.0)", "webtest (>=2.0)"] +umap = ["umap-learn (==0.5.6)"] + [[package]] name = "livereload" version = "2.7.1" @@ -2337,6 +3241,191 @@ files = [ [package.dependencies] tornado = "*" +[[package]] +name = "llvmlite" +version = "0.43.0" +description = "lightweight wrapper around basic LLVM functionality" +optional = true +python-versions = ">=3.9" +files = [ + {file = "llvmlite-0.43.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a289af9a1687c6cf463478f0fa8e8aa3b6fb813317b0d70bf1ed0759eab6f761"}, + {file = "llvmlite-0.43.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4fd101f571a31acb1559ae1af30f30b1dc4b3186669f92ad780e17c81e91bc"}, + {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d434ec7e2ce3cc8f452d1cd9a28591745de022f931d67be688a737320dfcead"}, + {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6912a87782acdff6eb8bf01675ed01d60ca1f2551f8176a300a886f09e836a6a"}, + {file = "llvmlite-0.43.0-cp310-cp310-win_amd64.whl", hash = "sha256:14f0e4bf2fd2d9a75a3534111e8ebeb08eda2f33e9bdd6dfa13282afacdde0ed"}, + {file = "llvmlite-0.43.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e8d0618cb9bfe40ac38a9633f2493d4d4e9fcc2f438d39a4e854f39cc0f5f98"}, + {file = "llvmlite-0.43.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0a9a1a39d4bf3517f2af9d23d479b4175ead205c592ceeb8b89af48a327ea57"}, + {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1da416ab53e4f7f3bc8d4eeba36d801cc1894b9fbfbf2022b29b6bad34a7df2"}, + {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977525a1e5f4059316b183fb4fd34fa858c9eade31f165427a3977c95e3ee749"}, + {file = "llvmlite-0.43.0-cp311-cp311-win_amd64.whl", hash = "sha256:d5bd550001d26450bd90777736c69d68c487d17bf371438f975229b2b8241a91"}, + {file = "llvmlite-0.43.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f99b600aa7f65235a5a05d0b9a9f31150c390f31261f2a0ba678e26823ec38f7"}, + {file = "llvmlite-0.43.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:35d80d61d0cda2d767f72de99450766250560399edc309da16937b93d3b676e7"}, + {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eccce86bba940bae0d8d48ed925f21dbb813519169246e2ab292b5092aba121f"}, + {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6509e1507ca0760787a199d19439cc887bfd82226f5af746d6977bd9f66844"}, + {file = "llvmlite-0.43.0-cp312-cp312-win_amd64.whl", hash = "sha256:7a2872ee80dcf6b5dbdc838763d26554c2a18aa833d31a2635bff16aafefb9c9"}, + {file = "llvmlite-0.43.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cd2a7376f7b3367019b664c21f0c61766219faa3b03731113ead75107f3b66c"}, + {file = "llvmlite-0.43.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18e9953c748b105668487b7c81a3e97b046d8abf95c4ddc0cd3c94f4e4651ae8"}, + {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74937acd22dc11b33946b67dca7680e6d103d6e90eeaaaf932603bec6fe7b03a"}, + {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9efc739cc6ed760f795806f67889923f7274276f0eb45092a1473e40d9b867"}, + {file = "llvmlite-0.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:47e147cdda9037f94b399bf03bfd8a6b6b1f2f90be94a454e3386f006455a9b4"}, + {file = "llvmlite-0.43.0.tar.gz", hash = "sha256:ae2b5b5c3ef67354824fb75517c8db5fbe93bc02cd9671f3c62271626bc041d5"}, +] + +[[package]] +name = "lxml" +version = "6.0.2" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = true +python-versions = ">=3.8" +files = [ + {file = "lxml-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e77dd455b9a16bbd2a5036a63ddbd479c19572af81b624e79ef422f929eef388"}, + {file = "lxml-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d444858b9f07cefff6455b983aea9a67f7462ba1f6cbe4a21e8bf6791bf2153"}, + {file = "lxml-6.0.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f952dacaa552f3bb8834908dddd500ba7d508e6ea6eb8c52eb2d28f48ca06a31"}, + {file = "lxml-6.0.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:71695772df6acea9f3c0e59e44ba8ac50c4f125217e84aab21074a1a55e7e5c9"}, + {file = "lxml-6.0.2-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17f68764f35fd78d7c4cc4ef209a184c38b65440378013d24b8aecd327c3e0c8"}, + {file = "lxml-6.0.2-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:058027e261afed589eddcfe530fcc6f3402d7fd7e89bfd0532df82ebc1563dba"}, + {file = "lxml-6.0.2-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8ffaeec5dfea5881d4c9d8913a32d10cfe3923495386106e4a24d45300ef79c"}, + {file = "lxml-6.0.2-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:f2e3b1a6bb38de0bc713edd4d612969dd250ca8b724be8d460001a387507021c"}, + {file = "lxml-6.0.2-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d6690ec5ec1cce0385cb20896b16be35247ac8c2046e493d03232f1c2414d321"}, + {file = "lxml-6.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2a50c3c1d11cad0ebebbac357a97b26aa79d2bcaf46f256551152aa85d3a4d1"}, + {file = "lxml-6.0.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:3efe1b21c7801ffa29a1112fab3b0f643628c30472d507f39544fd48e9549e34"}, + {file = "lxml-6.0.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:59c45e125140b2c4b33920d21d83681940ca29f0b83f8629ea1a2196dc8cfe6a"}, + {file = "lxml-6.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:452b899faa64f1805943ec1c0c9ebeaece01a1af83e130b69cdefeda180bb42c"}, + {file = "lxml-6.0.2-cp310-cp310-win32.whl", hash = "sha256:1e786a464c191ca43b133906c6903a7e4d56bef376b75d97ccbb8ec5cf1f0a4b"}, + {file = "lxml-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:dacf3c64ef3f7440e3167aa4b49aa9e0fb99e0aa4f9ff03795640bf94531bcb0"}, + {file = "lxml-6.0.2-cp310-cp310-win_arm64.whl", hash = "sha256:45f93e6f75123f88d7f0cfd90f2d05f441b808562bf0bc01070a00f53f5028b5"}, + {file = "lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607"}, + {file = "lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938"}, + {file = "lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d"}, + {file = "lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438"}, + {file = "lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964"}, + {file = "lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d"}, + {file = "lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7"}, + {file = "lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178"}, + {file = "lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553"}, + {file = "lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb"}, + {file = "lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a"}, + {file = "lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c"}, + {file = "lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7"}, + {file = "lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46"}, + {file = "lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078"}, + {file = "lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285"}, + {file = "lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456"}, + {file = "lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924"}, + {file = "lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f"}, + {file = "lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534"}, + {file = "lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564"}, + {file = "lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f"}, + {file = "lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0"}, + {file = "lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192"}, + {file = "lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0"}, + {file = "lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092"}, + {file = "lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f"}, + {file = "lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8"}, + {file = "lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f"}, + {file = "lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6"}, + {file = "lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322"}, + {file = "lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849"}, + {file = "lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f"}, + {file = "lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6"}, + {file = "lxml-6.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9b33d21594afab46f37ae58dfadd06636f154923c4e8a4d754b0127554eb2e77"}, + {file = "lxml-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c8963287d7a4c5c9a432ff487c52e9c5618667179c18a204bdedb27310f022f"}, + {file = "lxml-6.0.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1941354d92699fb5ffe6ed7b32f9649e43c2feb4b97205f75866f7d21aa91452"}, + {file = "lxml-6.0.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb2f6ca0ae2d983ded09357b84af659c954722bbf04dea98030064996d156048"}, + {file = "lxml-6.0.2-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb2a12d704f180a902d7fa778c6d71f36ceb7b0d317f34cdc76a5d05aa1dd1df"}, + {file = "lxml-6.0.2-cp313-cp313-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:6ec0e3f745021bfed19c456647f0298d60a24c9ff86d9d051f52b509663feeb1"}, + {file = "lxml-6.0.2-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:846ae9a12d54e368933b9759052d6206a9e8b250291109c48e350c1f1f49d916"}, + {file = "lxml-6.0.2-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ef9266d2aa545d7374938fb5c484531ef5a2ec7f2d573e62f8ce722c735685fd"}, + {file = "lxml-6.0.2-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:4077b7c79f31755df33b795dc12119cb557a0106bfdab0d2c2d97bd3cf3dffa6"}, + {file = "lxml-6.0.2-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a7c5d5e5f1081955358533be077166ee97ed2571d6a66bdba6ec2f609a715d1a"}, + {file = "lxml-6.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8f8d0cbd0674ee89863a523e6994ac25fd5be9c8486acfc3e5ccea679bad2679"}, + {file = "lxml-6.0.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2cbcbf6d6e924c28f04a43f3b6f6e272312a090f269eff68a2982e13e5d57659"}, + {file = "lxml-6.0.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dfb874cfa53340009af6bdd7e54ebc0d21012a60a4e65d927c2e477112e63484"}, + {file = "lxml-6.0.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:fb8dae0b6b8b7f9e96c26fdd8121522ce5de9bb5538010870bd538683d30e9a2"}, + {file = "lxml-6.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:358d9adae670b63e95bc59747c72f4dc97c9ec58881d4627fe0120da0f90d314"}, + {file = "lxml-6.0.2-cp313-cp313-win32.whl", hash = "sha256:e8cd2415f372e7e5a789d743d133ae474290a90b9023197fd78f32e2dc6873e2"}, + {file = "lxml-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:b30d46379644fbfc3ab81f8f82ae4de55179414651f110a1514f0b1f8f6cb2d7"}, + {file = "lxml-6.0.2-cp313-cp313-win_arm64.whl", hash = "sha256:13dcecc9946dca97b11b7c40d29fba63b55ab4170d3c0cf8c0c164343b9bfdcf"}, + {file = "lxml-6.0.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:b0c732aa23de8f8aec23f4b580d1e52905ef468afb4abeafd3fec77042abb6fe"}, + {file = "lxml-6.0.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4468e3b83e10e0317a89a33d28f7aeba1caa4d1a6fd457d115dd4ffe90c5931d"}, + {file = "lxml-6.0.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:abd44571493973bad4598a3be7e1d807ed45aa2adaf7ab92ab7c62609569b17d"}, + {file = "lxml-6.0.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:370cd78d5855cfbffd57c422851f7d3864e6ae72d0da615fca4dad8c45d375a5"}, + {file = "lxml-6.0.2-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:901e3b4219fa04ef766885fb40fa516a71662a4c61b80c94d25336b4934b71c0"}, + {file = "lxml-6.0.2-cp314-cp314-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:a4bf42d2e4cf52c28cc1812d62426b9503cdb0c87a6de81442626aa7d69707ba"}, + {file = "lxml-6.0.2-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2c7fdaa4d7c3d886a42534adec7cfac73860b89b4e5298752f60aa5984641a0"}, + {file = "lxml-6.0.2-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:98a5e1660dc7de2200b00d53fa00bcd3c35a3608c305d45a7bbcaf29fa16e83d"}, + {file = "lxml-6.0.2-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:dc051506c30b609238d79eda75ee9cab3e520570ec8219844a72a46020901e37"}, + {file = "lxml-6.0.2-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8799481bbdd212470d17513a54d568f44416db01250f49449647b5ab5b5dccb9"}, + {file = "lxml-6.0.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9261bb77c2dab42f3ecd9103951aeca2c40277701eb7e912c545c1b16e0e4917"}, + {file = "lxml-6.0.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:65ac4a01aba353cfa6d5725b95d7aed6356ddc0a3cd734de00124d285b04b64f"}, + {file = "lxml-6.0.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b22a07cbb82fea98f8a2fd814f3d1811ff9ed76d0fc6abc84eb21527596e7cc8"}, + {file = "lxml-6.0.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:d759cdd7f3e055d6bc8d9bec3ad905227b2e4c785dc16c372eb5b5e83123f48a"}, + {file = "lxml-6.0.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:945da35a48d193d27c188037a05fec5492937f66fb1958c24fc761fb9d40d43c"}, + {file = "lxml-6.0.2-cp314-cp314-win32.whl", hash = "sha256:be3aaa60da67e6153eb15715cc2e19091af5dc75faef8b8a585aea372507384b"}, + {file = "lxml-6.0.2-cp314-cp314-win_amd64.whl", hash = "sha256:fa25afbadead523f7001caf0c2382afd272c315a033a7b06336da2637d92d6ed"}, + {file = "lxml-6.0.2-cp314-cp314-win_arm64.whl", hash = "sha256:063eccf89df5b24e361b123e257e437f9e9878f425ee9aae3144c77faf6da6d8"}, + {file = "lxml-6.0.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:6162a86d86893d63084faaf4ff937b3daea233e3682fb4474db07395794fa80d"}, + {file = "lxml-6.0.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:414aaa94e974e23a3e92e7ca5b97d10c0cf37b6481f50911032c69eeb3991bba"}, + {file = "lxml-6.0.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48461bd21625458dd01e14e2c38dd0aea69addc3c4f960c30d9f59d7f93be601"}, + {file = "lxml-6.0.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:25fcc59afc57d527cfc78a58f40ab4c9b8fd096a9a3f964d2781ffb6eb33f4ed"}, + {file = "lxml-6.0.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5179c60288204e6ddde3f774a93350177e08876eaf3ab78aa3a3649d43eb7d37"}, + {file = "lxml-6.0.2-cp314-cp314t-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:967aab75434de148ec80597b75062d8123cadf2943fb4281f385141e18b21338"}, + {file = "lxml-6.0.2-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d100fcc8930d697c6561156c6810ab4a508fb264c8b6779e6e61e2ed5e7558f9"}, + {file = "lxml-6.0.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ca59e7e13e5981175b8b3e4ab84d7da57993eeff53c07764dcebda0d0e64ecd"}, + {file = "lxml-6.0.2-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:957448ac63a42e2e49531b9d6c0fa449a1970dbc32467aaad46f11545be9af1d"}, + {file = "lxml-6.0.2-cp314-cp314t-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b7fc49c37f1786284b12af63152fe1d0990722497e2d5817acfe7a877522f9a9"}, + {file = "lxml-6.0.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e19e0643cc936a22e837f79d01a550678da8377d7d801a14487c10c34ee49c7e"}, + {file = "lxml-6.0.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:1db01e5cf14345628e0cbe71067204db658e2fb8e51e7f33631f5f4735fefd8d"}, + {file = "lxml-6.0.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:875c6b5ab39ad5291588aed6925fac99d0097af0dd62f33c7b43736043d4a2ec"}, + {file = "lxml-6.0.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:cdcbed9ad19da81c480dfd6dd161886db6096083c9938ead313d94b30aadf272"}, + {file = "lxml-6.0.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:80dadc234ebc532e09be1975ff538d154a7fa61ea5031c03d25178855544728f"}, + {file = "lxml-6.0.2-cp314-cp314t-win32.whl", hash = "sha256:da08e7bb297b04e893d91087df19638dc7a6bb858a954b0cc2b9f5053c922312"}, + {file = "lxml-6.0.2-cp314-cp314t-win_amd64.whl", hash = "sha256:252a22982dca42f6155125ac76d3432e548a7625d56f5a273ee78a5057216eca"}, + {file = "lxml-6.0.2-cp314-cp314t-win_arm64.whl", hash = "sha256:bb4c1847b303835d89d785a18801a883436cdfd5dc3d62947f9c49e24f0f5a2c"}, + {file = "lxml-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a656ca105115f6b766bba324f23a67914d9c728dafec57638e2b92a9dcd76c62"}, + {file = "lxml-6.0.2-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c54d83a2188a10ebdba573f16bd97135d06c9ef60c3dc495315c7a28c80a263f"}, + {file = "lxml-6.0.2-cp38-cp38-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:1ea99340b3c729beea786f78c38f60f4795622f36e305d9c9be402201efdc3b7"}, + {file = "lxml-6.0.2-cp38-cp38-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:af85529ae8d2a453feee4c780d9406a5e3b17cee0dd75c18bd31adcd584debc3"}, + {file = "lxml-6.0.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fe659f6b5d10fb5a17f00a50eb903eb277a71ee35df4615db573c069bcf967ac"}, + {file = "lxml-6.0.2-cp38-cp38-win32.whl", hash = "sha256:5921d924aa5468c939d95c9814fa9f9b5935a6ff4e679e26aaf2951f74043512"}, + {file = "lxml-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:0aa7070978f893954008ab73bb9e3c24a7c56c054e00566a21b553dc18105fca"}, + {file = "lxml-6.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2c8458c2cdd29589a8367c09c8f030f1d202be673f0ca224ec18590b3b9fb694"}, + {file = "lxml-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3fee0851639d06276e6b387f1c190eb9d7f06f7f53514e966b26bae46481ec90"}, + {file = "lxml-6.0.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b2142a376b40b6736dfc214fd2902409e9e3857eff554fed2d3c60f097e62a62"}, + {file = "lxml-6.0.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6b5b39cc7e2998f968f05309e666103b53e2edd01df8dc51b90d734c0825444"}, + {file = "lxml-6.0.2-cp39-cp39-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d4aec24d6b72ee457ec665344a29acb2d35937d5192faebe429ea02633151aad"}, + {file = "lxml-6.0.2-cp39-cp39-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:b42f4d86b451c2f9d06ffb4f8bbc776e04df3ba070b9fe2657804b1b40277c48"}, + {file = "lxml-6.0.2-cp39-cp39-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cdaefac66e8b8f30e37a9b4768a391e1f8a16a7526d5bc77a7928408ef68e93"}, + {file = "lxml-6.0.2-cp39-cp39-manylinux_2_31_armv7l.whl", hash = "sha256:b738f7e648735714bbb82bdfd030203360cfeab7f6e8a34772b3c8c8b820568c"}, + {file = "lxml-6.0.2-cp39-cp39-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:daf42de090d59db025af61ce6bdb2521f0f102ea0e6ea310f13c17610a97da4c"}, + {file = "lxml-6.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:66328dabea70b5ba7e53d94aa774b733cf66686535f3bc9250a7aab53a91caaf"}, + {file = "lxml-6.0.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:e237b807d68a61fc3b1e845407e27e5eb8ef69bc93fe8505337c1acb4ee300b6"}, + {file = "lxml-6.0.2-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:ac02dc29fd397608f8eb15ac1610ae2f2f0154b03f631e6d724d9e2ad4ee2c84"}, + {file = "lxml-6.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:817ef43a0c0b4a77bd166dc9a09a555394105ff3374777ad41f453526e37f9cb"}, + {file = "lxml-6.0.2-cp39-cp39-win32.whl", hash = "sha256:bc532422ff26b304cfb62b328826bd995c96154ffd2bac4544f37dbb95ecaa8f"}, + {file = "lxml-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:995e783eb0374c120f528f807443ad5a83a656a8624c467ea73781fc5f8a8304"}, + {file = "lxml-6.0.2-cp39-cp39-win_arm64.whl", hash = "sha256:08b9d5e803c2e4725ae9e8559ee880e5328ed61aa0935244e0515d7d9dbec0aa"}, + {file = "lxml-6.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e748d4cf8fef2526bb2a589a417eba0c8674e29ffcb570ce2ceca44f1e567bf6"}, + {file = "lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4ddb1049fa0579d0cbd00503ad8c58b9ab34d1254c77bc6a5576d96ec7853dba"}, + {file = "lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cb233f9c95f83707dae461b12b720c1af9c28c2d19208e1be03387222151daf5"}, + {file = "lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc456d04db0515ce3320d714a1eac7a97774ff0849e7718b492d957da4631dd4"}, + {file = "lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2613e67de13d619fd283d58bda40bff0ee07739f624ffee8b13b631abf33083d"}, + {file = "lxml-6.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:24a8e756c982c001ca8d59e87c80c4d9dcd4d9b44a4cbeb8d9be4482c514d41d"}, + {file = "lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700"}, + {file = "lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee"}, + {file = "lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f"}, + {file = "lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9"}, + {file = "lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a"}, + {file = "lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e"}, + {file = "lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml_html_clean"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -2430,6 +3519,71 @@ files = [ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] +[[package]] +name = "matplotlib" +version = "3.9.4" +description = "Python plotting package" +optional = true +python-versions = ">=3.9" +files = [ + {file = "matplotlib-3.9.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:c5fdd7abfb706dfa8d307af64a87f1a862879ec3cd8d0ec8637458f0885b9c50"}, + {file = "matplotlib-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d89bc4e85e40a71d1477780366c27fb7c6494d293e1617788986f74e2a03d7ff"}, + {file = "matplotlib-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ddf9f3c26aae695c5daafbf6b94e4c1a30d6cd617ba594bbbded3b33a1fcfa26"}, + {file = "matplotlib-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18ebcf248030173b59a868fda1fe42397253f6698995b55e81e1f57431d85e50"}, + {file = "matplotlib-3.9.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:974896ec43c672ec23f3f8c648981e8bc880ee163146e0312a9b8def2fac66f5"}, + {file = "matplotlib-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:4598c394ae9711cec135639374e70871fa36b56afae17bdf032a345be552a88d"}, + {file = "matplotlib-3.9.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d4dd29641d9fb8bc4492420c5480398dd40a09afd73aebe4eb9d0071a05fbe0c"}, + {file = "matplotlib-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30e5b22e8bcfb95442bf7d48b0d7f3bdf4a450cbf68986ea45fca3d11ae9d099"}, + {file = "matplotlib-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bb0030d1d447fd56dcc23b4c64a26e44e898f0416276cac1ebc25522e0ac249"}, + {file = "matplotlib-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aca90ed222ac3565d2752b83dbb27627480d27662671e4d39da72e97f657a423"}, + {file = "matplotlib-3.9.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a181b2aa2906c608fcae72f977a4a2d76e385578939891b91c2550c39ecf361e"}, + {file = "matplotlib-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:1f6882828231eca17f501c4dcd98a05abb3f03d157fbc0769c6911fe08b6cfd3"}, + {file = "matplotlib-3.9.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:dfc48d67e6661378a21c2983200a654b72b5c5cdbd5d2cf6e5e1ece860f0cc70"}, + {file = "matplotlib-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47aef0fab8332d02d68e786eba8113ffd6f862182ea2999379dec9e237b7e483"}, + {file = "matplotlib-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fba1f52c6b7dc764097f52fd9ab627b90db452c9feb653a59945de16752e965f"}, + {file = "matplotlib-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:173ac3748acaac21afcc3fa1633924609ba1b87749006bc25051c52c422a5d00"}, + {file = "matplotlib-3.9.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320edea0cadc07007765e33f878b13b3738ffa9745c5f707705692df70ffe0e0"}, + {file = "matplotlib-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a4a4cfc82330b27042a7169533da7991e8789d180dd5b3daeaee57d75cd5a03b"}, + {file = "matplotlib-3.9.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:37eeffeeca3c940985b80f5b9a7b95ea35671e0e7405001f249848d2b62351b6"}, + {file = "matplotlib-3.9.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3e7465ac859ee4abcb0d836137cd8414e7bb7ad330d905abced457217d4f0f45"}, + {file = "matplotlib-3.9.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4c12302c34afa0cf061bea23b331e747e5e554b0fa595c96e01c7b75bc3b858"}, + {file = "matplotlib-3.9.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b8c97917f21b75e72108b97707ba3d48f171541a74aa2a56df7a40626bafc64"}, + {file = "matplotlib-3.9.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0229803bd7e19271b03cb09f27db76c918c467aa4ce2ae168171bc67c3f508df"}, + {file = "matplotlib-3.9.4-cp313-cp313-win_amd64.whl", hash = "sha256:7c0d8ef442ebf56ff5e206f8083d08252ee738e04f3dc88ea882853a05488799"}, + {file = "matplotlib-3.9.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a04c3b00066a688834356d196136349cb32f5e1003c55ac419e91585168b88fb"}, + {file = "matplotlib-3.9.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:04c519587f6c210626741a1e9a68eefc05966ede24205db8982841826af5871a"}, + {file = "matplotlib-3.9.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:308afbf1a228b8b525fcd5cec17f246bbbb63b175a3ef6eb7b4d33287ca0cf0c"}, + {file = "matplotlib-3.9.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddb3b02246ddcffd3ce98e88fed5b238bc5faff10dbbaa42090ea13241d15764"}, + {file = "matplotlib-3.9.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8a75287e9cb9eee48cb79ec1d806f75b29c0fde978cb7223a1f4c5848d696041"}, + {file = "matplotlib-3.9.4-cp313-cp313t-win_amd64.whl", hash = "sha256:488deb7af140f0ba86da003e66e10d55ff915e152c78b4b66d231638400b1965"}, + {file = "matplotlib-3.9.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3c3724d89a387ddf78ff88d2a30ca78ac2b4c89cf37f2db4bd453c34799e933c"}, + {file = "matplotlib-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d5f0a8430ffe23d7e32cfd86445864ccad141797f7d25b7c41759a5b5d17cfd7"}, + {file = "matplotlib-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bb0141a21aef3b64b633dc4d16cbd5fc538b727e4958be82a0e1c92a234160e"}, + {file = "matplotlib-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57aa235109e9eed52e2c2949db17da185383fa71083c00c6c143a60e07e0888c"}, + {file = "matplotlib-3.9.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b18c600061477ccfdd1e6fd050c33d8be82431700f3452b297a56d9ed7037abb"}, + {file = "matplotlib-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:ef5f2d1b67d2d2145ff75e10f8c008bfbf71d45137c4b648c87193e7dd053eac"}, + {file = "matplotlib-3.9.4-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:44e0ed786d769d85bc787b0606a53f2d8d2d1d3c8a2608237365e9121c1a338c"}, + {file = "matplotlib-3.9.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:09debb9ce941eb23ecdbe7eab972b1c3e0276dcf01688073faff7b0f61d6c6ca"}, + {file = "matplotlib-3.9.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc53cf157a657bfd03afab14774d54ba73aa84d42cfe2480c91bd94873952db"}, + {file = "matplotlib-3.9.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ad45da51be7ad02387801fd154ef74d942f49fe3fcd26a64c94842ba7ec0d865"}, + {file = "matplotlib-3.9.4.tar.gz", hash = "sha256:1e00e8be7393cbdc6fedfa8a6fba02cf3e83814b285db1c60b906a023ba41bc3"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} +kiwisolver = ">=1.3.1" +numpy = ">=1.23" +packaging = ">=20.0" +pillow = ">=8" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" + +[package.extras] +dev = ["meson-python (>=0.13.1,<0.17.0)", "numpy (>=1.25)", "pybind11 (>=2.6,!=2.13.3)", "setuptools (>=64)", "setuptools_scm (>=7)"] + [[package]] name = "matplotlib-inline" version = "0.1.7" @@ -2488,6 +3642,22 @@ files = [ [package.dependencies] typing-extensions = {version = "*", markers = "python_version < \"3.11\""} +[[package]] +name = "ml-collections" +version = "0.1.1" +description = "ML Collections is a library of Python collections designed for ML usecases." +optional = true +python-versions = ">=2.6" +files = [ + {file = "ml_collections-0.1.1.tar.gz", hash = "sha256:3fefcc72ec433aa1e5d32307a3e474bbb67f405be814ea52a2166bfc9dbe68cc"}, +] + +[package.dependencies] +absl-py = "*" +contextlib2 = "*" +PyYAML = "*" +six = "*" + [[package]] name = "mpmath" version = "1.3.0" @@ -2900,24 +4070,6 @@ files = [ {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, ] -[[package]] -name = "networkx" -version = "3.1" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.8" -files = [ - {file = "networkx-3.1-py3-none-any.whl", hash = "sha256:4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36"}, - {file = "networkx-3.1.tar.gz", hash = "sha256:de346335408f84de0eada6ff9fafafff9bcda11f0a0dfaa931133debb146ab61"}, -] - -[package.extras] -default = ["matplotlib (>=3.4)", "numpy (>=1.20)", "pandas (>=1.3)", "scipy (>=1.8)"] -developer = ["mypy (>=1.1)", "pre-commit (>=3.2)"] -doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.13)", "sphinx (>=6.1)", "sphinx-gallery (>=0.12)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"] -test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] - [[package]] name = "networkx" version = "3.2.1" @@ -2936,6 +4088,31 @@ doc = ["nb2plots (>=0.7)", "nbconvert (<7.9)", "numpydoc (>=1.6)", "pillow (>=9. extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.11)", "sympy (>=1.10)"] test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] +[[package]] +name = "nltk" +version = "3.9.2" +description = "Natural Language Toolkit" +optional = true +python-versions = ">=3.9" +files = [ + {file = "nltk-3.9.2-py3-none-any.whl", hash = "sha256:1e209d2b3009110635ed9709a67a1a3e33a10f799490fa71cf4bec218c11c88a"}, + {file = "nltk-3.9.2.tar.gz", hash = "sha256:0f409e9b069ca4177c1903c3e843eef90c7e92992fa4931ae607da6de49e1419"}, +] + +[package.dependencies] +click = "*" +joblib = "*" +regex = ">=2021.8.3" +tqdm = "*" + +[package.extras] +all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] +corenlp = ["requests"] +machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] +plot = ["matplotlib"] +tgrep = ["pyparsing"] +twitter = ["twython"] + [[package]] name = "notebook" version = "7.3.3" @@ -2976,6 +4153,40 @@ jupyter-server = ">=1.8,<3" [package.extras] test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] +[[package]] +name = "numba" +version = "0.60.0" +description = "compiling Python code using LLVM" +optional = true +python-versions = ">=3.9" +files = [ + {file = "numba-0.60.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d761de835cd38fb400d2c26bb103a2726f548dc30368853121d66201672e651"}, + {file = "numba-0.60.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:159e618ef213fba758837f9837fb402bbe65326e60ba0633dbe6c7f274d42c1b"}, + {file = "numba-0.60.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1527dc578b95c7c4ff248792ec33d097ba6bef9eda466c948b68dfc995c25781"}, + {file = "numba-0.60.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe0b28abb8d70f8160798f4de9d486143200f34458d34c4a214114e445d7124e"}, + {file = "numba-0.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:19407ced081d7e2e4b8d8c36aa57b7452e0283871c296e12d798852bc7d7f198"}, + {file = "numba-0.60.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a17b70fc9e380ee29c42717e8cc0bfaa5556c416d94f9aa96ba13acb41bdece8"}, + {file = "numba-0.60.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fb02b344a2a80efa6f677aa5c40cd5dd452e1b35f8d1c2af0dfd9ada9978e4b"}, + {file = "numba-0.60.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f4fde652ea604ea3c86508a3fb31556a6157b2c76c8b51b1d45eb40c8598703"}, + {file = "numba-0.60.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4142d7ac0210cc86432b818338a2bc368dc773a2f5cf1e32ff7c5b378bd63ee8"}, + {file = "numba-0.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:cac02c041e9b5bc8cf8f2034ff6f0dbafccd1ae9590dc146b3a02a45e53af4e2"}, + {file = "numba-0.60.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7da4098db31182fc5ffe4bc42c6f24cd7d1cb8a14b59fd755bfee32e34b8404"}, + {file = "numba-0.60.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38d6ea4c1f56417076ecf8fc327c831ae793282e0ff51080c5094cb726507b1c"}, + {file = "numba-0.60.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:62908d29fb6a3229c242e981ca27e32a6e606cc253fc9e8faeb0e48760de241e"}, + {file = "numba-0.60.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0ebaa91538e996f708f1ab30ef4d3ddc344b64b5227b67a57aa74f401bb68b9d"}, + {file = "numba-0.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:f75262e8fe7fa96db1dca93d53a194a38c46da28b112b8a4aca168f0df860347"}, + {file = "numba-0.60.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:01ef4cd7d83abe087d644eaa3d95831b777aa21d441a23703d649e06b8e06b74"}, + {file = "numba-0.60.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:819a3dfd4630d95fd574036f99e47212a1af41cbcb019bf8afac63ff56834449"}, + {file = "numba-0.60.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b983bd6ad82fe868493012487f34eae8bf7dd94654951404114f23c3466d34b"}, + {file = "numba-0.60.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c151748cd269ddeab66334bd754817ffc0cabd9433acb0f551697e5151917d25"}, + {file = "numba-0.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:3031547a015710140e8c87226b4cfe927cac199835e5bf7d4fe5cb64e814e3ab"}, + {file = "numba-0.60.0.tar.gz", hash = "sha256:5df6158e5584eece5fc83294b949fd30b9f1125df7708862205217e068aabf16"}, +] + +[package.dependencies] +llvmlite = "==0.43.*" +numpy = ">=1.22,<2.1" + [[package]] name = "numpy" version = "1.24.4" @@ -3547,13 +4758,15 @@ files = [ ] [package.dependencies] +jinja2 = {version = ">=3.0.0", optional = true, markers = "extra == \"output_formatting\""} numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" +tabulate = {version = ">=0.8.9", optional = true, markers = "extra == \"output_formatting\""} tzdata = ">=2022.1" [package.extras] @@ -3608,9 +4821,11 @@ files = [ ] [package.dependencies] +jinja2 = {version = ">=3.1.2", optional = true, markers = "extra == \"output-formatting\""} numpy = {version = ">=1.23.2", markers = "python_version >= \"3.11\""} python-dateutil = ">=2.8.2" pytz = ">=2020.1" +tabulate = {version = ">=0.8.10", optional = true, markers = "extra == \"output-formatting\""} tzdata = ">=2022.1" [package.extras] @@ -3702,9 +4917,11 @@ files = [ ] [package.dependencies] +jinja2 = {version = ">=3.1.2", optional = true, markers = "extra == \"output-formatting\""} numpy = {version = ">=1.26.0", markers = "python_version >= \"3.12\""} python-dateutil = ">=2.8.2" pytz = ">=2020.1" +tabulate = {version = ">=0.9.0", optional = true, markers = "extra == \"output-formatting\""} tzdata = ">=2022.7" [package.extras] @@ -4040,6 +5257,39 @@ files = [ [package.dependencies] six = ">=1.5.2" +[[package]] +name = "portalocker" +version = "3.2.0" +description = "Wraps the portalocker recipe for easy usage" +optional = true +python-versions = ">=3.9" +files = [ + {file = "portalocker-3.2.0-py3-none-any.whl", hash = "sha256:3cdc5f565312224bc570c49337bd21428bba0ef363bbcf58b9ef4a9f11779968"}, + {file = "portalocker-3.2.0.tar.gz", hash = "sha256:1f3002956a54a8c3730586c5c77bf18fae4149e07eaf1c29fc3faf4d5a3f89ac"}, +] + +[package.dependencies] +pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} + +[package.extras] +docs = ["portalocker[tests]"] +redis = ["redis"] +tests = ["coverage-conditional-plugin (>=0.9.0)", "portalocker[redis]", "pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-rerunfailures (>=15.0)", "pytest-timeout (>=2.1.0)", "sphinx (>=6.0.0)", "types-pywin32 (>=310.0.0.20250429)", "types-redis"] + +[[package]] +name = "portpicker" +version = "1.6.0" +description = "A library to choose unique available network ports." +optional = true +python-versions = ">=3.6" +files = [ + {file = "portpicker-1.6.0-py3-none-any.whl", hash = "sha256:b2787a41404cf7edbe29b07b9e0ed863b09f2665dcc01c1eb0c2261c1e7d0755"}, + {file = "portpicker-1.6.0.tar.gz", hash = "sha256:bd507fd6f96f65ee02781f2e674e9dc6c99bbfa6e3c39992e3916204c9d431fa"}, +] + +[package.dependencies] +psutil = "*" + [[package]] name = "prometheus-client" version = "0.21.1" @@ -4175,6 +5425,23 @@ files = [ {file = "propcache-0.2.0.tar.gz", hash = "sha256:df81779732feb9d01e5d513fad0122efb3d53bbc75f61b2a4f29a020bc985e70"}, ] +[[package]] +name = "proto-plus" +version = "1.27.1" +description = "Beautiful, Pythonic protocol buffers" +optional = true +python-versions = ">=3.7" +files = [ + {file = "proto_plus-1.27.1-py3-none-any.whl", hash = "sha256:e4643061f3a4d0de092d62aa4ad09fa4756b2cbb89d4627f3985018216f9fefc"}, + {file = "proto_plus-1.27.1.tar.gz", hash = "sha256:912a7460446625b792f6448bade9e55cd4e41e6ac10e27009ef71a7f317fa147"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<7.0.0" + +[package.extras] +testing = ["google-api-core (>=1.31.5)"] + [[package]] name = "protobuf" version = "5.29.5" @@ -4294,6 +5561,31 @@ numpy = ">=1.16.6" [package.extras] test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] +[[package]] +name = "pyasn1" +version = "0.6.2" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pyasn1-0.6.2-py3-none-any.whl", hash = "sha256:1eb26d860996a18e9b6ed05e7aae0e9fc21619fcee6af91cca9bad4fbea224bf"}, + {file = "pyasn1-0.6.2.tar.gz", hash = "sha256:9b59a2b25ba7e4f8197db7686c09fb33e658b98339fadb826e9512629017833b"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +description = "A collection of ASN.1-based protocols modules" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a"}, + {file = "pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6"}, +] + +[package.dependencies] +pyasn1 = ">=0.6.1,<0.7.0" + [[package]] name = "pycln" version = "2.5.0" @@ -4480,6 +5772,20 @@ files = [ [package.extras] windows-terminal = ["colorama (>=0.4.6)"] +[[package]] +name = "pyparsing" +version = "3.3.2" +description = "pyparsing - Classes and methods to define and execute parsing grammars" +optional = true +python-versions = ">=3.9" +files = [ + {file = "pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d"}, + {file = "pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + [[package]] name = "pytest" version = "8.3.5" @@ -4784,6 +6090,112 @@ files = [ [package.dependencies] cffi = {version = "*", markers = "implementation_name == \"pypy\""} +[[package]] +name = "rapidfuzz" +version = "3.13.0" +description = "rapid fuzzy string matching" +optional = true +python-versions = ">=3.9" +files = [ + {file = "rapidfuzz-3.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aafc42a1dc5e1beeba52cd83baa41372228d6d8266f6d803c16dbabbcc156255"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:85c9a131a44a95f9cac2eb6e65531db014e09d89c4f18c7b1fa54979cb9ff1f3"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d7cec4242d30dd521ef91c0df872e14449d1dffc2a6990ede33943b0dae56c3"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e297c09972698c95649e89121e3550cee761ca3640cd005e24aaa2619175464e"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ef0f5f03f61b0e5a57b1df7beafd83df993fd5811a09871bad6038d08e526d0d"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d8cf5f7cd6e4d5eb272baf6a54e182b2c237548d048e2882258336533f3f02b7"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9256218ac8f1a957806ec2fb9a6ddfc6c32ea937c0429e88cf16362a20ed8602"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1bdd2e6d0c5f9706ef7595773a81ca2b40f3b33fd7f9840b726fb00c6c4eb2e"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5280be8fd7e2bee5822e254fe0a5763aa0ad57054b85a32a3d9970e9b09bbcbf"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd742c03885db1fce798a1cd87a20f47f144ccf26d75d52feb6f2bae3d57af05"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:5435fcac94c9ecf0504bf88a8a60c55482c32e18e108d6079a0089c47f3f8cf6"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:93a755266856599be4ab6346273f192acde3102d7aa0735e2f48b456397a041f"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-win32.whl", hash = "sha256:3abe6a4e8eb4cfc4cda04dd650a2dc6d2934cbdeda5def7e6fd1c20f6e7d2a0b"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:e8ddb58961401da7d6f55f185512c0d6bd24f529a637078d41dd8ffa5a49c107"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-win_arm64.whl", hash = "sha256:c523620d14ebd03a8d473c89e05fa1ae152821920c3ff78b839218ff69e19ca3"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d395a5cad0c09c7f096433e5fd4224d83b53298d53499945a9b0e5a971a84f3a"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b7b3eda607a019169f7187328a8d1648fb9a90265087f6903d7ee3a8eee01805"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98e0bfa602e1942d542de077baf15d658bd9d5dcfe9b762aff791724c1c38b70"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bef86df6d59667d9655905b02770a0c776d2853971c0773767d5ef8077acd624"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fedd316c165beed6307bf754dee54d3faca2c47e1f3bcbd67595001dfa11e969"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5158da7f2ec02a930be13bac53bb5903527c073c90ee37804090614cab83c29e"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b6f913ee4618ddb6d6f3e387b76e8ec2fc5efee313a128809fbd44e65c2bbb2"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d25fdbce6459ccbbbf23b4b044f56fbd1158b97ac50994eaae2a1c0baae78301"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:25343ccc589a4579fbde832e6a1e27258bfdd7f2eb0f28cb836d6694ab8591fc"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a9ad1f37894e3ffb76bbab76256e8a8b789657183870be11aa64e306bb5228fd"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5dc71ef23845bb6b62d194c39a97bb30ff171389c9812d83030c1199f319098c"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b7f4c65facdb94f44be759bbd9b6dda1fa54d0d6169cdf1a209a5ab97d311a75"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-win32.whl", hash = "sha256:b5104b62711565e0ff6deab2a8f5dbf1fbe333c5155abe26d2cfd6f1849b6c87"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:9093cdeb926deb32a4887ebe6910f57fbcdbc9fbfa52252c10b56ef2efb0289f"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:f70f646751b6aa9d05be1fb40372f006cc89d6aad54e9d79ae97bd1f5fce5203"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a1a6a906ba62f2556372282b1ef37b26bca67e3d2ea957277cfcefc6275cca7"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fd0975e015b05c79a97f38883a11236f5a24cca83aa992bd2558ceaa5652b26"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d4e13593d298c50c4f94ce453f757b4b398af3fa0fd2fde693c3e51195b7f69"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed6f416bda1c9133000009d84d9409823eb2358df0950231cc936e4bf784eb97"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dc82b6ed01acb536b94a43996a94471a218f4d89f3fdd9185ab496de4b2a981"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9d824de871daa6e443b39ff495a884931970d567eb0dfa213d234337343835f"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d18228a2390375cf45726ce1af9d36ff3dc1f11dce9775eae1f1b13ac6ec50f"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5fe634c9482ec5d4a6692afb8c45d370ae86755e5f57aa6c50bfe4ca2bdd87"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:694eb531889f71022b2be86f625a4209c4049e74be9ca836919b9e395d5e33b3"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:11b47b40650e06147dee5e51a9c9ad73bb7b86968b6f7d30e503b9f8dd1292db"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:98b8107ff14f5af0243f27d236bcc6e1ef8e7e3b3c25df114e91e3a99572da73"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b836f486dba0aceb2551e838ff3f514a38ee72b015364f739e526d720fdb823a"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-win32.whl", hash = "sha256:4671ee300d1818d7bdfd8fa0608580d7778ba701817216f0c17fb29e6b972514"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e2065f68fb1d0bf65adc289c1bdc45ba7e464e406b319d67bb54441a1b9da9e"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:65cc97c2fc2c2fe23586599686f3b1ceeedeca8e598cfcc1b7e56dc8ca7e2aa7"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:09e908064d3684c541d312bd4c7b05acb99a2c764f6231bd507d4b4b65226c23"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:57c390336cb50d5d3bfb0cfe1467478a15733703af61f6dffb14b1cd312a6fae"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0da54aa8547b3c2c188db3d1c7eb4d1bb6dd80baa8cdaeaec3d1da3346ec9caa"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df8e8c21e67afb9d7fbe18f42c6111fe155e801ab103c81109a61312927cc611"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:461fd13250a2adf8e90ca9a0e1e166515cbcaa5e9c3b1f37545cbbeff9e77f6b"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2b3dd5d206a12deca16870acc0d6e5036abeb70e3cad6549c294eff15591527"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1343d745fbf4688e412d8f398c6e6d6f269db99a54456873f232ba2e7aeb4939"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b1b065f370d54551dcc785c6f9eeb5bd517ae14c983d2784c064b3aa525896df"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:11b125d8edd67e767b2295eac6eb9afe0b1cdc82ea3d4b9257da4b8e06077798"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c33f9c841630b2bb7e69a3fb5c84a854075bb812c47620978bddc591f764da3d"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ae4574cb66cf1e85d32bb7e9ec45af5409c5b3970b7ceb8dea90168024127566"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e05752418b24bbd411841b256344c26f57da1148c5509e34ea39c7eb5099ab72"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-win32.whl", hash = "sha256:0e1d08cb884805a543f2de1f6744069495ef527e279e05370dd7c83416af83f8"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9a7c6232be5f809cd39da30ee5d24e6cadd919831e6020ec6c2391f4c3bc9264"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:3f32f15bacd1838c929b35c84b43618481e1b3d7a61b5ed2db0291b70ae88b53"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cc64da907114d7a18b5e589057e3acaf2fec723d31c49e13fedf043592a3f6a7"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4d9d7f84c8e992a8dbe5a3fdbea73d733da39bf464e62c912ac3ceba9c0cff93"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a79a2f07786a2070669b4b8e45bd96a01c788e7a3c218f531f3947878e0f956"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9f338e71c45b69a482de8b11bf4a029993230760120c8c6e7c9b71760b6825a1"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:adb40ca8ddfcd4edd07b0713a860be32bdf632687f656963bcbce84cea04b8d8"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48719f7dcf62dfb181063b60ee2d0a39d327fa8ad81b05e3e510680c44e1c078"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9327a4577f65fc3fb712e79f78233815b8a1c94433d0c2c9f6bc5953018b3565"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:200030dfc0a1d5d6ac18e993c5097c870c97c41574e67f227300a1fb74457b1d"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cc269e74cad6043cb8a46d0ce580031ab642b5930562c2bb79aa7fbf9c858d26"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:e62779c6371bd2b21dbd1fdce89eaec2d93fd98179d36f61130b489f62294a92"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f4797f821dc5d7c2b6fc818b89f8a3f37bcc900dd9e4369e6ebf1e525efce5db"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d21f188f6fe4fbf422e647ae9d5a68671d00218e187f91859c963d0738ccd88c"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-win32.whl", hash = "sha256:45dd4628dd9c21acc5c97627dad0bb791764feea81436fb6e0a06eef4c6dceaa"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:624a108122039af89ddda1a2b7ab2a11abe60c1521956f142f5d11bcd42ef138"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-win_arm64.whl", hash = "sha256:435071fd07a085ecbf4d28702a66fd2e676a03369ee497cc38bcb69a46bc77e2"}, + {file = "rapidfuzz-3.13.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe5790a36d33a5d0a6a1f802aa42ecae282bf29ac6f7506d8e12510847b82a45"}, + {file = "rapidfuzz-3.13.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:cdb33ee9f8a8e4742c6b268fa6bd739024f34651a06b26913381b1413ebe7590"}, + {file = "rapidfuzz-3.13.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c99b76b93f7b495eee7dcb0d6a38fb3ce91e72e99d9f78faa5664a881cb2b7d"}, + {file = "rapidfuzz-3.13.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6af42f2ede8b596a6aaf6d49fdee3066ca578f4856b85ab5c1e2145de367a12d"}, + {file = "rapidfuzz-3.13.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c0efa73afbc5b265aca0d8a467ae2a3f40d6854cbe1481cb442a62b7bf23c99"}, + {file = "rapidfuzz-3.13.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7ac21489de962a4e2fc1e8f0b0da4aa1adc6ab9512fd845563fecb4b4c52093a"}, + {file = "rapidfuzz-3.13.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1ba007f4d35a45ee68656b2eb83b8715e11d0f90e5b9f02d615a8a321ff00c27"}, + {file = "rapidfuzz-3.13.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d7a217310429b43be95b3b8ad7f8fc41aba341109dc91e978cd7c703f928c58f"}, + {file = "rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:558bf526bcd777de32b7885790a95a9548ffdcce68f704a81207be4a286c1095"}, + {file = "rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:202a87760f5145140d56153b193a797ae9338f7939eb16652dd7ff96f8faf64c"}, + {file = "rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfcccc08f671646ccb1e413c773bb92e7bba789e3a1796fd49d23c12539fe2e4"}, + {file = "rapidfuzz-3.13.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f219f1e3c3194d7a7de222f54450ce12bc907862ff9a8962d83061c1f923c86"}, + {file = "rapidfuzz-3.13.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ccbd0e7ea1a216315f63ffdc7cd09c55f57851afc8fe59a74184cb7316c0598b"}, + {file = "rapidfuzz-3.13.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a50856f49a4016ef56edd10caabdaf3608993f9faf1e05c3c7f4beeac46bd12a"}, + {file = "rapidfuzz-3.13.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fd05336db4d0b8348d7eaaf6fa3c517b11a56abaa5e89470ce1714e73e4aca7"}, + {file = "rapidfuzz-3.13.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:573ad267eb9b3f6e9b04febce5de55d8538a87c56c64bf8fd2599a48dc9d8b77"}, + {file = "rapidfuzz-3.13.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30fd1451f87ccb6c2f9d18f6caa483116bbb57b5a55d04d3ddbd7b86f5b14998"}, + {file = "rapidfuzz-3.13.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6dd36d4916cf57ddb05286ed40b09d034ca5d4bca85c17be0cb6a21290597d9"}, + {file = "rapidfuzz-3.13.0.tar.gz", hash = "sha256:d2eaf3839e52cbcc0accbe9817a67b4b0fcf70aaeb229cfddc1c28061f9ce5d8"}, +] + +[package.extras] +all = ["numpy"] + [[package]] name = "referencing" version = "0.35.1" @@ -4967,6 +6379,22 @@ typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.1 [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "rouge-score" +version = "0.1.2" +description = "Pure python implementation of ROUGE-1.5.5." +optional = true +python-versions = ">=3.7" +files = [ + {file = "rouge_score-0.1.2.tar.gz", hash = "sha256:c7d4da2683e68c9abf0135ef915d63a46643666f848e558a1b9f7ead17ff0f04"}, +] + +[package.dependencies] +absl-py = "*" +nltk = "*" +numpy = "*" +six = ">=1.14.0" + [[package]] name = "rpds-py" version = "0.20.1" @@ -5079,6 +6507,20 @@ files = [ {file = "rpds_py-0.20.1.tar.gz", hash = "sha256:e1791c4aabd117653530dccd24108fa03cc6baf21f58b950d0a73c3b3b29a350"}, ] +[[package]] +name = "rsa" +version = "4.9.1" +description = "Pure-Python RSA implementation" +optional = true +python-versions = "<4,>=3.6" +files = [ + {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, + {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + [[package]] name = "ruff" version = "0.14.6" @@ -5107,6 +6549,30 @@ files = [ {file = "ruff-0.14.6.tar.gz", hash = "sha256:6f0c742ca6a7783a736b867a263b9a7a80a45ce9bee391eeda296895f1b4e1cc"}, ] +[[package]] +name = "sacrebleu" +version = "2.6.0" +description = "Hassle-free computation of shareable, comparable, and reproducible BLEU, chrF, and TER scores" +optional = true +python-versions = ">=3.9" +files = [ + {file = "sacrebleu-2.6.0-py3-none-any.whl", hash = "sha256:3edc1531575cfe4ad04ce53491a9307e234af1c3f805a1f491cbec844229a8a8"}, + {file = "sacrebleu-2.6.0.tar.gz", hash = "sha256:91499b6cd46138d95154fff1e863c2f9be57e82f0c719d8dd718d0006cf6c566"}, +] + +[package.dependencies] +colorama = "*" +lxml = "*" +numpy = ">=1.17" +portalocker = "*" +regex = "*" +tabulate = ">=0.8.9" + +[package.extras] +dev = ["lxml-stubs", "mypy", "pytest", "setuptools", "types-tabulate", "wheel"] +ja = ["ipadic (>=1.0,<2.0)", "mecab-python3 (>=1.0.9,<2.0.0)"] +ko = ["mecab-ko (>=1.0.2,<2.0.0)", "mecab-ko-dic (>=1.0,<2.0)"] + [[package]] name = "safehttpx" version = "0.1.7" @@ -5161,6 +6627,169 @@ tensorflow = ["safetensors[numpy]", "tensorflow (>=2.11.0)"] testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools-rust (>=1.5.2)"] torch = ["safetensors[numpy]", "torch (>=1.10)"] +[[package]] +name = "saliency" +version = "0.2.1" +description = "Framework-agnostic saliency methods" +optional = true +python-versions = "*" +files = [ + {file = "saliency-0.2.1-py2.py3-none-any.whl", hash = "sha256:f388286129c6bca459326fa334d2ab0c65a90607da796cb756296274f2b8f23d"}, + {file = "saliency-0.2.1.tar.gz", hash = "sha256:79a3f64393a3ce89620bf46629af120c36a061019eff51b32b173378c8b18c63"}, +] + +[package.dependencies] +numpy = "*" +scikit-image = "*" + +[package.extras] +full = ["tensorflow (>=1.15)"] +tf1 = ["tensorflow (>=1.15)"] + +[[package]] +name = "scikit-image" +version = "0.24.0" +description = "Image processing in Python" +optional = true +python-versions = ">=3.9" +files = [ + {file = "scikit_image-0.24.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cb3bc0264b6ab30b43c4179ee6156bc18b4861e78bb329dd8d16537b7bbf827a"}, + {file = "scikit_image-0.24.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:9c7a52e20cdd760738da38564ba1fed7942b623c0317489af1a598a8dedf088b"}, + {file = "scikit_image-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93f46e6ce42e5409f4d09ce1b0c7f80dd7e4373bcec635b6348b63e3c886eac8"}, + {file = "scikit_image-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39ee0af13435c57351a3397eb379e72164ff85161923eec0c38849fecf1b4764"}, + {file = "scikit_image-0.24.0-cp310-cp310-win_amd64.whl", hash = "sha256:7ac7913b028b8aa780ffae85922894a69e33d1c0bf270ea1774f382fe8bf95e7"}, + {file = "scikit_image-0.24.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:272909e02a59cea3ed4aa03739bb88df2625daa809f633f40b5053cf09241831"}, + {file = "scikit_image-0.24.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:190ebde80b4470fe8838764b9b15f232a964f1a20391663e31008d76f0c696f7"}, + {file = "scikit_image-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59c98cc695005faf2b79904e4663796c977af22586ddf1b12d6af2fa22842dc2"}, + {file = "scikit_image-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa27b3a0dbad807b966b8db2d78da734cb812ca4787f7fbb143764800ce2fa9c"}, + {file = "scikit_image-0.24.0-cp311-cp311-win_amd64.whl", hash = "sha256:dacf591ac0c272a111181afad4b788a27fe70d213cfddd631d151cbc34f8ca2c"}, + {file = "scikit_image-0.24.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6fccceb54c9574590abcddc8caf6cefa57c13b5b8b4260ab3ff88ad8f3c252b3"}, + {file = "scikit_image-0.24.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ccc01e4760d655aab7601c1ba7aa4ddd8b46f494ac46ec9c268df6f33ccddf4c"}, + {file = "scikit_image-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18836a18d3a7b6aca5376a2d805f0045826bc6c9fc85331659c33b4813e0b563"}, + {file = "scikit_image-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8579bda9c3f78cb3b3ed8b9425213c53a25fa7e994b7ac01f2440b395babf660"}, + {file = "scikit_image-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:82ab903afa60b2da1da2e6f0c8c65e7c8868c60a869464c41971da929b3e82bc"}, + {file = "scikit_image-0.24.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef04360eda372ee5cd60aebe9be91258639c86ae2ea24093fb9182118008d009"}, + {file = "scikit_image-0.24.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:e9aadb442360a7e76f0c5c9d105f79a83d6df0e01e431bd1d5757e2c5871a1f3"}, + {file = "scikit_image-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e37de6f4c1abcf794e13c258dc9b7d385d5be868441de11c180363824192ff7"}, + {file = "scikit_image-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4688c18bd7ec33c08d7bf0fd19549be246d90d5f2c1d795a89986629af0a1e83"}, + {file = "scikit_image-0.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:56dab751d20b25d5d3985e95c9b4e975f55573554bd76b0aedf5875217c93e69"}, + {file = "scikit_image-0.24.0.tar.gz", hash = "sha256:5d16efe95da8edbeb363e0c4157b99becbd650a60b77f6e3af5768b66cf007ab"}, +] + +[package.dependencies] +imageio = ">=2.33" +lazy-loader = ">=0.4" +networkx = ">=2.8" +numpy = ">=1.23" +packaging = ">=21" +pillow = ">=9.1" +scipy = ">=1.9" +tifffile = ">=2022.8.12" + +[package.extras] +build = ["Cython (>=3.0.4)", "build", "meson-python (>=0.15)", "ninja", "numpy (>=2.0.0rc1)", "packaging (>=21)", "pythran", "setuptools (>=67)", "spin (==0.8)", "wheel"] +data = ["pooch (>=1.6.0)"] +developer = ["ipython", "pre-commit", "tomli"] +docs = ["PyWavelets (>=1.1.1)", "dask[array] (>=2022.9.2)", "ipykernel", "ipywidgets", "kaleido", "matplotlib (>=3.6)", "myst-parser", "numpydoc (>=1.7)", "pandas (>=1.5)", "plotly (>=5.10)", "pooch (>=1.6)", "pydata-sphinx-theme (>=0.15.2)", "pytest-doctestplus", "pytest-runner", "scikit-learn (>=1.1)", "seaborn (>=0.11)", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-gallery (>=0.14)", "sphinx_design (>=0.5)", "tifffile (>=2022.8.12)"] +optional = ["PyWavelets (>=1.1.1)", "SimpleITK", "astropy (>=5.0)", "cloudpickle (>=0.2.1)", "dask[array] (>=2021.1.0)", "matplotlib (>=3.6)", "pooch (>=1.6.0)", "pyamg", "scikit-learn (>=1.1)"] +test = ["asv", "numpydoc (>=1.7)", "pooch (>=1.6.0)", "pytest (>=7.0)", "pytest-cov (>=2.11.0)", "pytest-doctestplus", "pytest-faulthandler", "pytest-localserver"] + +[[package]] +name = "scikit-learn" +version = "1.6.1" +description = "A set of python modules for machine learning and data mining" +optional = true +python-versions = ">=3.9" +files = [ + {file = "scikit_learn-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d056391530ccd1e501056160e3c9673b4da4805eb67eb2bdf4e983e1f9c9204e"}, + {file = "scikit_learn-1.6.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0c8d036eb937dbb568c6242fa598d551d88fb4399c0344d95c001980ec1c7d36"}, + {file = "scikit_learn-1.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8634c4bd21a2a813e0a7e3900464e6d593162a29dd35d25bdf0103b3fce60ed5"}, + {file = "scikit_learn-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:775da975a471c4f6f467725dff0ced5c7ac7bda5e9316b260225b48475279a1b"}, + {file = "scikit_learn-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:8a600c31592bd7dab31e1c61b9bbd6dea1b3433e67d264d17ce1017dbdce8002"}, + {file = "scikit_learn-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:72abc587c75234935e97d09aa4913a82f7b03ee0b74111dcc2881cba3c5a7b33"}, + {file = "scikit_learn-1.6.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:b3b00cdc8f1317b5f33191df1386c0befd16625f49d979fe77a8d44cae82410d"}, + {file = "scikit_learn-1.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc4765af3386811c3ca21638f63b9cf5ecf66261cc4815c1db3f1e7dc7b79db2"}, + {file = "scikit_learn-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25fc636bdaf1cc2f4a124a116312d837148b5e10872147bdaf4887926b8c03d8"}, + {file = "scikit_learn-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:fa909b1a36e000a03c382aade0bd2063fd5680ff8b8e501660c0f59f021a6415"}, + {file = "scikit_learn-1.6.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:926f207c804104677af4857b2c609940b743d04c4c35ce0ddc8ff4f053cddc1b"}, + {file = "scikit_learn-1.6.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2c2cae262064e6a9b77eee1c8e768fc46aa0b8338c6a8297b9b6759720ec0ff2"}, + {file = "scikit_learn-1.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1061b7c028a8663fb9a1a1baf9317b64a257fcb036dae5c8752b2abef31d136f"}, + {file = "scikit_learn-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e69fab4ebfc9c9b580a7a80111b43d214ab06250f8a7ef590a4edf72464dd86"}, + {file = "scikit_learn-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:70b1d7e85b1c96383f872a519b3375f92f14731e279a7b4c6cfd650cf5dffc52"}, + {file = "scikit_learn-1.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2ffa1e9e25b3d93990e74a4be2c2fc61ee5af85811562f1288d5d055880c4322"}, + {file = "scikit_learn-1.6.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:dc5cf3d68c5a20ad6d571584c0750ec641cc46aeef1c1507be51300e6003a7e1"}, + {file = "scikit_learn-1.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c06beb2e839ecc641366000ca84f3cf6fa9faa1777e29cf0c04be6e4d096a348"}, + {file = "scikit_learn-1.6.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8ca8cb270fee8f1f76fa9bfd5c3507d60c6438bbee5687f81042e2bb98e5a97"}, + {file = "scikit_learn-1.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:7a1c43c8ec9fde528d664d947dc4c0789be4077a3647f232869f41d9bf50e0fb"}, + {file = "scikit_learn-1.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a17c1dea1d56dcda2fac315712f3651a1fea86565b64b48fa1bc090249cbf236"}, + {file = "scikit_learn-1.6.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6a7aa5f9908f0f28f4edaa6963c0a6183f1911e63a69aa03782f0d924c830a35"}, + {file = "scikit_learn-1.6.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0650e730afb87402baa88afbf31c07b84c98272622aaba002559b614600ca691"}, + {file = "scikit_learn-1.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:3f59fe08dc03ea158605170eb52b22a105f238a5d512c4470ddeca71feae8e5f"}, + {file = "scikit_learn-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6849dd3234e87f55dce1db34c89a810b489ead832aaf4d4550b7ea85628be6c1"}, + {file = "scikit_learn-1.6.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:e7be3fa5d2eb9be7d77c3734ff1d599151bb523674be9b834e8da6abe132f44e"}, + {file = "scikit_learn-1.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44a17798172df1d3c1065e8fcf9019183f06c87609b49a124ebdf57ae6cb0107"}, + {file = "scikit_learn-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b7a3b86e411e4bce21186e1c180d792f3d99223dcfa3b4f597ecc92fa1a422"}, + {file = "scikit_learn-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7a73d457070e3318e32bdb3aa79a8d990474f19035464dfd8bede2883ab5dc3b"}, + {file = "scikit_learn-1.6.1.tar.gz", hash = "sha256:b4fc2525eca2c69a59260f583c56a7557c6ccdf8deafdba6e060f94c1c59738e"}, +] + +[package.dependencies] +joblib = ">=1.2.0" +numpy = ">=1.19.5" +scipy = ">=1.6.0" +threadpoolctl = ">=3.1.0" + +[package.extras] +benchmark = ["matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "pandas (>=1.1.5)"] +build = ["cython (>=3.0.10)", "meson-python (>=0.16.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pydata-sphinx-theme (>=0.15.3)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=7.3.7)", "sphinx-copybutton (>=0.5.2)", "sphinx-design (>=0.5.0)", "sphinx-design (>=0.6.0)", "sphinx-gallery (>=0.17.1)", "sphinx-prompt (>=1.4.0)", "sphinx-remove-toctrees (>=1.0.0.post1)", "sphinxcontrib-sass (>=0.3.4)", "sphinxext-opengraph (>=0.9.1)", "towncrier (>=24.8.0)"] +examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] +install = ["joblib (>=1.2.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)", "threadpoolctl (>=3.1.0)"] +maintenance = ["conda-lock (==2.5.6)"] +tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.5.1)", "scikit-image (>=0.17.2)"] + +[[package]] +name = "scipy" +version = "1.13.1" +description = "Fundamental algorithms for scientific computing in Python" +optional = true +python-versions = ">=3.9" +files = [ + {file = "scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca"}, + {file = "scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f"}, + {file = "scipy-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfa31f1def5c819b19ecc3a8b52d28ffdcc7ed52bb20c9a7589669dd3c250989"}, + {file = "scipy-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26264b282b9da0952a024ae34710c2aff7d27480ee91a2e82b7b7073c24722f"}, + {file = "scipy-1.13.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:eccfa1906eacc02de42d70ef4aecea45415f5be17e72b61bafcfd329bdc52e94"}, + {file = "scipy-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:2831f0dc9c5ea9edd6e51e6e769b655f08ec6db6e2e10f86ef39bd32eb11da54"}, + {file = "scipy-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:27e52b09c0d3a1d5b63e1105f24177e544a222b43611aaf5bc44d4a0979e32f9"}, + {file = "scipy-1.13.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:54f430b00f0133e2224c3ba42b805bfd0086fe488835effa33fa291561932326"}, + {file = "scipy-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e89369d27f9e7b0884ae559a3a956e77c02114cc60a6058b4e5011572eea9299"}, + {file = "scipy-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a78b4b3345f1b6f68a763c6e25c0c9a23a9fd0f39f5f3d200efe8feda560a5fa"}, + {file = "scipy-1.13.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45484bee6d65633752c490404513b9ef02475b4284c4cfab0ef946def50b3f59"}, + {file = "scipy-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:5713f62f781eebd8d597eb3f88b8bf9274e79eeabf63afb4a737abc6c84ad37b"}, + {file = "scipy-1.13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5d72782f39716b2b3509cd7c33cdc08c96f2f4d2b06d51e52fb45a19ca0c86a1"}, + {file = "scipy-1.13.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:017367484ce5498445aade74b1d5ab377acdc65e27095155e448c88497755a5d"}, + {file = "scipy-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:949ae67db5fa78a86e8fa644b9a6b07252f449dcf74247108c50e1d20d2b4627"}, + {file = "scipy-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de3ade0e53bc1f21358aa74ff4830235d716211d7d077e340c7349bc3542e884"}, + {file = "scipy-1.13.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ac65fb503dad64218c228e2dc2d0a0193f7904747db43014645ae139c8fad16"}, + {file = "scipy-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:cdd7dacfb95fea358916410ec61bbc20440f7860333aee6d882bb8046264e949"}, + {file = "scipy-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:436bbb42a94a8aeef855d755ce5a465479c721e9d684de76bf61a62e7c2b81d5"}, + {file = "scipy-1.13.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:8335549ebbca860c52bf3d02f80784e91a004b71b059e3eea9678ba994796a24"}, + {file = "scipy-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d533654b7d221a6a97304ab63c41c96473ff04459e404b83275b60aa8f4b7004"}, + {file = "scipy-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637e98dcf185ba7f8e663e122ebf908c4702420477ae52a04f9908707456ba4d"}, + {file = "scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c"}, + {file = "scipy-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:392e4ec766654852c25ebad4f64e4e584cf19820b980bc04960bca0b0cd6eaa2"}, + {file = "scipy-1.13.1.tar.gz", hash = "sha256:095a87a0312b08dfd6a6155cbbd310a8c51800fc931b8c0b84003014b874ed3c"}, +] + +[package.dependencies] +numpy = ">=1.22.4,<2.3" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] +doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] +test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + [[package]] name = "semantic-version" version = "2.10.0" @@ -5439,6 +7068,59 @@ enabler = ["pytest-enabler (>=2.2)"] test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "ruff (<=0.7.1)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.12.*)", "pytest-mypy"] +[[package]] +name = "shap" +version = "0.45.1" +description = "A unified approach to explain the output of any machine learning model." +optional = true +python-versions = ">=3.9" +files = [ + {file = "shap-0.45.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:40559fa935d712a36eadd3d4b6ce5b9b891c9e99242b54291d97b789438d01e2"}, + {file = "shap-0.45.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:493e824e48704d40129113310c18abfc6a6e7693a61ac2407028df37036bd05b"}, + {file = "shap-0.45.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eacaf6a41de0e0ca52056f2d141f57897044279a44772e1484dcb4b251731eda"}, + {file = "shap-0.45.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e8ec0f7be8c22f2dc14e951cea552ade087446a5417a1c8113a8fc382be55b5"}, + {file = "shap-0.45.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3c9bcea3f5ba8bdbba653ea33912dd197a646189df93a1924a7549fdbf305e3a"}, + {file = "shap-0.45.1-cp310-cp310-win_amd64.whl", hash = "sha256:d48f8bf9db76c979a1f7a5601e8efaa6f814a8be65673ed9fa7bb4f963c0ab98"}, + {file = "shap-0.45.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a467e6753f01e6d8dc6a5251a4846cc5bc14f6126f04829bdf5d66f03ca02e8e"}, + {file = "shap-0.45.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:395052459542475d458afc6607fa37820374216ffa0739177b1105bcd551db9c"}, + {file = "shap-0.45.1-cp311-cp311-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d4fe1ea1c0332ccd36ed24925cbd1ec56f787e5184ef19b682d866075261c7d"}, + {file = "shap-0.45.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc416553d44c0ab38f3ff964af2b1081384e1bd51952c9f58a5879a1a1f34d6d"}, + {file = "shap-0.45.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e76637e78ac475e1f711643c062b2f350b473b1baf59f5d8173df65b433bb8d"}, + {file = "shap-0.45.1-cp311-cp311-win_amd64.whl", hash = "sha256:2fd753424a5ae8b3124da08e54ad9b092c2a184fd37ec43f1c4bcd50161c16bb"}, + {file = "shap-0.45.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b54c8f893a67564bcf726d39123782829c6bd174a4ff24538282f74502a18d75"}, + {file = "shap-0.45.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c80a4dbc810d64efe2e4a8d80d275eecf251297b53748e1700708bd7f0b25401"}, + {file = "shap-0.45.1-cp312-cp312-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14de63c54b16919d45558054ee60d8046aaa3cf901fa58f8db77a2575aaa735"}, + {file = "shap-0.45.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bce2d114836ad2de11b26484e74473e3131eccd6ba0f4833ce251f539f04097"}, + {file = "shap-0.45.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2322fff6744b8c895d925629fcd7a485f3e99daa4b88e12a76642192f8ad9951"}, + {file = "shap-0.45.1-cp312-cp312-win_amd64.whl", hash = "sha256:35e3ce132e833e8d53bac8f9b4a52b387bc2ad47c3383f3fc2a356d9864e36b4"}, + {file = "shap-0.45.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1abbbc3685c6d8439083e740277782cefee2792e96c82f63505ef251391f4a05"}, + {file = "shap-0.45.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:73f4b22cac47096d02337a98dd97edae30241c1bcdaf8ef5e4e08dc1e4c17c80"}, + {file = "shap-0.45.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70f34b6fe6780db22851ef6d232f136213b7fd9bdbb41d6c3f28a05b27661340"}, + {file = "shap-0.45.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:510d5208b557dc28ca5dc3395bc2be997f0e874147b64b8ea9eeac37b1a8e121"}, + {file = "shap-0.45.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c7baf7f736b59f4c98ccd728776e0bc0a151f3726a07876329231744ae773ea"}, + {file = "shap-0.45.1-cp39-cp39-win_amd64.whl", hash = "sha256:cab7265cd283bce19906a0fe9399be98a741ba9a47116f105220bbcfb5ef339c"}, + {file = "shap-0.45.1.tar.gz", hash = "sha256:24e7d7e2c0d6b798701b83eacee063d64926426a150a0d261b4a135f60639f10"}, +] + +[package.dependencies] +cloudpickle = "*" +numba = "*" +numpy = "*" +packaging = ">20.9" +pandas = "*" +scikit-learn = "*" +scipy = "*" +slicer = "0.0.8" +tqdm = ">=4.27.0" + +[package.extras] +docs = ["ipython", "matplotlib", "myst-parser (==2.0.0)", "nbsphinx (==0.9.3)", "numpydoc", "requests", "sphinx (==7.2.6)", "sphinx-github-changelog (==1.2.1)", "sphinx-rtd-theme (==2.0.0)"] +others = ["lime"] +plots = ["ipython", "matplotlib"] +test = ["catboost", "gpboost", "lightgbm", "ngboost", "opencv-python", "protobuf (==3.20.3)", "pyod", "pyspark", "pytest", "pytest-cov", "pytest-mpl", "sentencepiece", "tensorflow (<2.16)", "torch", "torch (==2.2.0)", "torchvision", "transformers", "xgboost"] +test-core = ["pytest", "pytest-cov", "pytest-mpl"] +test-notebooks = ["datasets", "jupyter", "keras", "nbconvert", "nbformat", "nlp", "transformers"] + [[package]] name = "shellingham" version = "1.5.4" @@ -5461,6 +7143,17 @@ files = [ {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] +[[package]] +name = "slicer" +version = "0.0.8" +description = "A small package for big slicing." +optional = true +python-versions = ">=3.6" +files = [ + {file = "slicer-0.0.8-py3-none-any.whl", hash = "sha256:6c206258543aecd010d497dc2eca9d2805860a0b3758673903456b7df7934dc3"}, + {file = "slicer-0.0.8.tar.gz", hash = "sha256:2e7553af73f0c0c2d355f4afcc3ecf97c6f2156fcf4593955c3f56cf6c4d6eb7"}, +] + [[package]] name = "smmap" version = "5.0.2" @@ -5762,6 +7455,20 @@ files = [ [package.extras] widechars = ["wcwidth"] +[[package]] +name = "termcolor" +version = "3.1.0" +description = "ANSI color formatting for output in terminal" +optional = true +python-versions = ">=3.9" +files = [ + {file = "termcolor-3.1.0-py3-none-any.whl", hash = "sha256:591dd26b5c2ce03b9e43f391264626557873ce1d379019786f99b0c2bee140aa"}, + {file = "termcolor-3.1.0.tar.gz", hash = "sha256:6a6dd7fbee581909eeec6a756cff1d7f7c376063b14e4a298dc4980309e55970"}, +] + +[package.extras] +tests = ["pytest", "pytest-cov"] + [[package]] name = "terminado" version = "0.18.1" @@ -5783,6 +7490,39 @@ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] +[[package]] +name = "threadpoolctl" +version = "3.6.0" +description = "threadpoolctl" +optional = true +python-versions = ">=3.9" +files = [ + {file = "threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb"}, + {file = "threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e"}, +] + +[[package]] +name = "tifffile" +version = "2024.8.30" +description = "Read and write TIFF files" +optional = true +python-versions = ">=3.9" +files = [ + {file = "tifffile-2024.8.30-py3-none-any.whl", hash = "sha256:8bc59a8f02a2665cd50a910ec64961c5373bee0b8850ec89d3b7b485bf7be7ad"}, + {file = "tifffile-2024.8.30.tar.gz", hash = "sha256:2c9508fe768962e30f87def61819183fb07692c258cb175b3c114828368485a4"}, +] + +[package.dependencies] +numpy = "*" + +[package.extras] +all = ["defusedxml", "fsspec", "imagecodecs (>=2023.8.12)", "lxml", "matplotlib", "zarr"] +codecs = ["imagecodecs (>=2023.8.12)"] +plot = ["matplotlib"] +test = ["cmapfile", "czifile", "dask", "defusedxml", "fsspec", "imagecodecs", "lfdfiles", "lxml", "ndtiff", "oiffile", "psdtags", "pytest", "roifile", "xarray", "zarr"] +xml = ["defusedxml", "lxml"] +zarr = ["fsspec", "zarr"] + [[package]] name = "tiktoken" version = "0.7.0" @@ -6750,6 +8490,23 @@ files = [ {file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}, ] +[[package]] +name = "werkzeug" +version = "3.1.5" +description = "The comprehensive WSGI web application library." +optional = true +python-versions = ">=3.9" +files = [ + {file = "werkzeug-3.1.5-py3-none-any.whl", hash = "sha256:5111e36e91086ece91f93268bb39b4a35c1e6f1feac762c9c822ded0a4e322dc"}, + {file = "werkzeug-3.1.5.tar.gz", hash = "sha256:6a548b0e88955dd07ccb25539d7d0cc97417ee9e179677d22c7041c8f078ce67"}, +] + +[package.dependencies] +markupsafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + [[package]] name = "widgetsnbextension" version = "4.0.14" @@ -7024,7 +8781,10 @@ enabler = ["pytest-enabler (>=2.2)"] test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] +[extras] +lit = ["lit-nlp"] + [metadata] lock-version = "2.0" python-versions = ">=3.8,<4.0" -content-hash = "3e08d4fcc9f7430a9c157448ff531ef2a95dac7848aa49ea8a39291158006c32" +content-hash = "7392dfb6dfd463fddb12d357e8a951781adad3ee1d515bf6a3867ef24a0b8a24" diff --git a/pyproject.toml b/pyproject.toml index 75d5f3e1c..6b292168a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,6 +41,11 @@ typeguard="^4.2" typing-extensions="*" wandb=">=0.13.5" + # Optional dependencies + lit-nlp = {version = "^1.3", optional = true, python = ">=3.9"} + + [tool.poetry.extras] + lit = ["lit-nlp"] [tool.poetry.group] [tool.poetry.group.dev.dependencies] diff --git a/tests/acceptance/test_hooked_encoder_decoder.py b/tests/acceptance/test_hooked_encoder_decoder.py index 77e926d6f..17cd2f192 100644 --- a/tests/acceptance/test_hooked_encoder_decoder.py +++ b/tests/acceptance/test_hooked_encoder_decoder.py @@ -173,7 +173,7 @@ def test_decoder_attention(our_model, huggingface_model, hello_world_tokens): input_len = hello_world_tokens.shape[1] cache_position = torch.arange(input_len) huggingface_attn_out = huggingface_attn(embed_out, cache_position=cache_position)[0] - assert_close(our_attn_out, huggingface_attn_out, rtol=3e-4, atol=1e-5) + assert_close(our_attn_out, huggingface_attn_out, rtol=5e-4, atol=1e-5) def test_attention_layer(our_model, huggingface_model, hello_world_tokens): diff --git a/tests/unit/test_lit.py b/tests/unit/test_lit.py new file mode 100644 index 000000000..89b3b7db1 --- /dev/null +++ b/tests/unit/test_lit.py @@ -0,0 +1,571 @@ +"""Tests for the LIT integration module. + +This module contains unit and integration tests for the TransformerLens +LIT integration. Tests are designed to work both with and without +the optional lit-nlp dependency. + +To run tests: + pytest tests/unit/test_lit.py -v + +To run with LIT installed: + pip install lit-nlp + pytest tests/unit/test_lit.py -v +""" + +from __future__ import annotations + +from unittest.mock import MagicMock, patch + +import numpy as np +import pytest +import torch + +# Checking if LIT is installed +try: + from lit_nlp.api import types as lit_types + + LIT_AVAILABLE = True +except ImportError: + LIT_AVAILABLE = False + lit_types = None + + +# Fixtures +@pytest.fixture +def mock_hooked_transformer(): + """Create a mock HookedTransformer for testing.""" + mock = MagicMock() + + # Mock config + mock.cfg = MagicMock() + mock.cfg.model_name = "test-model" + mock.cfg.n_layers = 4 + mock.cfg.n_heads = 4 + mock.cfg.d_model = 64 + mock.cfg.d_head = 16 + mock.cfg.d_mlp = 256 + mock.cfg.d_vocab = 100 + mock.cfg.n_ctx = 512 + mock.cfg.act_fn = "gelu" + mock.cfg.normalization_type = "LN" + mock.cfg.positional_embedding_type = "standard" + mock.cfg.device = "cpu" + + # Mock tokenizer + mock.tokenizer = MagicMock() + mock.tokenizer.encode.return_value = [1, 2, 3, 4, 5] + mock.tokenizer.decode.return_value = "test" + mock.tokenizer.convert_ids_to_tokens.return_value = ["", "test", "token", "s", ""] + mock.tokenizer.padding_side = "right" + mock.tokenizer.pad_token = "" + mock.tokenizer.eos_token = "" + mock.tokenizer.bos_token = "" + + # Mock to_tokens + mock.to_tokens.return_value = torch.tensor([[1, 2, 3, 4, 5]]) + + # Mock embed + mock.embed.return_value = torch.randn(1, 5, 64) + + # Mock pos_embed + mock.pos_embed.return_value = torch.randn(1, 5, 64) + + # Mock forward + mock.return_value = torch.randn(1, 5, 100) + + # Mock run_with_cache + def mock_run_with_cache(*args, **kwargs): + logits = torch.randn(1, 5, 100) + cache = MagicMock() + + # Mock cache access + def getitem(key): + if "hook_embed" in key: + return torch.randn(1, 5, 64) + elif "hook_resid_post" in key: + return torch.randn(1, 5, 64) + elif "hook_pattern" in key: + return torch.randn(1, 4, 5, 5) # [batch, heads, q, k] + return torch.randn(1, 5, 64) + + cache.__getitem__ = getitem + return logits, cache + + mock.run_with_cache = mock_run_with_cache + + return mock + + +@pytest.fixture +def sample_texts(): + """Sample texts for testing.""" + return [ + "The quick brown fox jumps over the lazy dog.", + "Hello, world!", + "Machine learning is fascinating.", + ] + + +@pytest.fixture +def sample_examples(sample_texts): + """Sample examples in LIT format.""" + return [{"text": text} for text in sample_texts] + + +# Tests for utils.py + + +class TestUtils: + """Tests for utility functions.""" + + def test_check_lit_installed(self): + """Test LIT installation check.""" + from transformer_lens.lit.utils import check_lit_installed + + # Should return a boolean + result = check_lit_installed() + assert isinstance(result, bool) + assert result == LIT_AVAILABLE + + def test_tensor_to_numpy_tensor(self): + """Test tensor to numpy conversion with tensor input.""" + from transformer_lens.lit.utils import tensor_to_numpy + + tensor = torch.randn(3, 4) + result = tensor_to_numpy(tensor) + + assert isinstance(result, np.ndarray) + assert result.shape == (3, 4) + np.testing.assert_array_almost_equal(result, tensor.numpy()) + + def test_tensor_to_numpy_array(self): + """Test tensor to numpy conversion with numpy input.""" + from transformer_lens.lit.utils import tensor_to_numpy + + array = np.random.randn(3, 4) + result = tensor_to_numpy(array) + + assert isinstance(result, np.ndarray) + assert result is array # Should return same object + + def test_tensor_to_numpy_none(self): + """Test tensor to numpy conversion with None input.""" + from transformer_lens.lit.utils import tensor_to_numpy + + result = tensor_to_numpy(None) + assert result is None + + def test_numpy_to_tensor(self): + """Test numpy to tensor conversion.""" + from transformer_lens.lit.utils import numpy_to_tensor + + array = np.random.randn(3, 4).astype(np.float32) + result = numpy_to_tensor(array) + + assert isinstance(result, torch.Tensor) + assert result.shape == (3, 4) + + def test_numpy_to_tensor_with_device(self): + """Test numpy to tensor conversion with device specification.""" + from transformer_lens.lit.utils import numpy_to_tensor + + array = np.random.randn(3, 4).astype(np.float32) + result = numpy_to_tensor(array, device="cpu") + + assert isinstance(result, torch.Tensor) + assert result.device.type == "cpu" + + def test_clean_token_string(self): + """Test token string cleaning.""" + from transformer_lens.lit.utils import clean_token_string + + # GPT-2 style + assert clean_token_string("Ġhello") == "▁hello" + + # SentencePiece style + assert clean_token_string("▁world") == "▁world" + + # BERT style + assert clean_token_string("##ing") == "ing" + + # Regular token + assert clean_token_string("test") == "test" + + def test_clean_token_strings(self): + """Test batch token string cleaning.""" + from transformer_lens.lit.utils import clean_token_strings + + tokens = ["Ġhello", "▁world", "##ing", "test"] + result = clean_token_strings(tokens) + + assert result == ["▁hello", "▁world", "ing", "test"] + + def test_batch_examples(self): + """Test example batching.""" + from transformer_lens.lit.utils import batch_examples + + examples = [{"text": f"example {i}"} for i in range(10)] + batches = batch_examples(examples, batch_size=3) + + assert len(batches) == 4 # 10 / 3 = 4 (rounded up) + assert len(batches[0]) == 3 + assert len(batches[1]) == 3 + assert len(batches[2]) == 3 + assert len(batches[3]) == 1 + + def test_unbatch_outputs(self): + """Test output unbatching.""" + from transformer_lens.lit.utils import unbatch_outputs + + batched = { + "logits": np.random.randn(3, 5, 10), + "tokens": [["a", "b"], ["c", "d"], ["e", "f"]], + } + result = unbatch_outputs(batched) + + assert len(result) == 3 + assert result[0]["logits"].shape == (5, 10) + assert result[0]["tokens"] == ["a", "b"] + + def test_get_model_info(self, mock_hooked_transformer): + """Test model info extraction.""" + from transformer_lens.lit.utils import get_model_info + + info = get_model_info(mock_hooked_transformer) + + assert info["model_name"] == "test-model" + assert info["n_layers"] == 4 + assert info["n_heads"] == 4 + assert info["d_model"] == 64 + + +# Tests for constants.py + + +class TestConstants: + """Tests for constants module.""" + + def test_input_field_names(self): + """Test input field names are defined.""" + from transformer_lens.lit.constants import INPUT_FIELDS + + assert INPUT_FIELDS.TEXT == "text" + assert INPUT_FIELDS.TOKENS == "tokens" + assert INPUT_FIELDS.TARGET_MASK == "target_mask" + + def test_output_field_names(self): + """Test output field names are defined.""" + from transformer_lens.lit.constants import OUTPUT_FIELDS + + assert OUTPUT_FIELDS.TOKENS == "tokens" + assert OUTPUT_FIELDS.TOP_K_TOKENS == "top_k_tokens" + assert OUTPUT_FIELDS.CLS_EMBEDDING == "cls_embedding" + + def test_default_config(self): + """Test default configuration values.""" + from transformer_lens.lit.constants import DEFAULTS + + assert DEFAULTS.MAX_SEQ_LENGTH == 512 + assert DEFAULTS.BATCH_SIZE == 8 + assert DEFAULTS.TOP_K == 10 + assert isinstance(DEFAULTS.COMPUTE_GRADIENTS, bool) + + def test_hook_point_names(self): + """Test hook point name templates.""" + from transformer_lens.lit.constants import HOOK_POINTS + + assert HOOK_POINTS.HOOK_EMBED == "hook_embed" + assert "{layer}" in HOOK_POINTS.RESID_PRE_TEMPLATE + assert "{layer}" in HOOK_POINTS.ATTN_PATTERN_TEMPLATE + + def test_error_messages(self): + """Test error messages are defined.""" + from transformer_lens.lit.constants import ERRORS + + assert "tokenizer" in ERRORS.NO_TOKENIZER.lower() + assert "lit" in ERRORS.LIT_NOT_INSTALLED.lower() + + +# Tests for dataset.py + + +class TestDatasets: + """Tests for dataset classes.""" + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_simple_text_dataset_init(self, sample_examples): + """Test SimpleTextDataset initialization.""" + from transformer_lens.lit.dataset import SimpleTextDataset + + dataset = SimpleTextDataset(sample_examples, name="TestDataset") + + assert len(dataset.examples) == 3 + assert dataset.examples[0]["text"] == sample_examples[0]["text"] + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_simple_text_dataset_from_strings(self, sample_texts): + """Test creating dataset from strings.""" + from transformer_lens.lit.dataset import SimpleTextDataset + + dataset = SimpleTextDataset.from_strings(sample_texts) + + assert len(dataset.examples) == 3 + assert dataset.examples[0]["text"] == sample_texts[0] + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_simple_text_dataset_spec(self, sample_examples): + """Test dataset spec method.""" + from transformer_lens.lit.dataset import SimpleTextDataset + + dataset = SimpleTextDataset(sample_examples) + spec = dataset.spec() + + assert "text" in spec + assert isinstance(spec["text"], lit_types.TextSegment) # type: ignore[union-attr] + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_simple_text_dataset_missing_text(self): + """Test dataset validation for missing text field.""" + from transformer_lens.lit.dataset import SimpleTextDataset + + with pytest.raises(ValueError, match="missing required field"): + SimpleTextDataset([{"other_field": "value"}]) + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_prompt_completion_dataset(self): + """Test PromptCompletionDataset.""" + from transformer_lens.lit.dataset import PromptCompletionDataset + + examples = [ + {"prompt": "Hello", "completion": " world"}, + {"prompt": "The answer is", "completion": " 42"}, + ] + dataset = PromptCompletionDataset(examples) + + assert len(dataset.examples) == 2 + assert dataset.examples[0]["text"] == "Hello world" + assert dataset.examples[0]["prompt"] == "Hello" + assert dataset.examples[0]["completion"] == " world" + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_prompt_completion_from_pairs(self): + """Test creating PromptCompletionDataset from pairs.""" + from transformer_lens.lit.dataset import PromptCompletionDataset + + pairs = [("Hello", " world"), ("The answer is", " 42")] + dataset = PromptCompletionDataset.from_pairs(pairs) + + assert len(dataset.examples) == 2 + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_ioi_dataset_generate(self): + """Test IOI dataset generation.""" + from transformer_lens.lit.dataset import IOIDataset + + dataset = IOIDataset.generate(n_examples=10, seed=42) + + assert len(dataset.examples) == 10 + # Check structure + ex = dataset.examples[0] + assert "text" in ex + assert "name1" in ex + assert "name2" in ex + assert "answer" in ex + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_induction_dataset_generate(self): + """Test Induction dataset generation.""" + from transformer_lens.lit.dataset import InductionDataset + + dataset = InductionDataset.generate_simple(n_examples=10, seed=42) + + assert len(dataset.examples) == 10 + ex = dataset.examples[0] + assert "text" in ex + assert "pattern" in ex + + +# Tests for model.py + + +class TestModel: + """Tests for model wrapper classes.""" + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_config_defaults(self): + """Test HookedTransformerLITConfig defaults.""" + from transformer_lens.lit.model import HookedTransformerLITConfig + + config = HookedTransformerLITConfig() + + assert config.max_seq_length == 512 + assert config.batch_size == 8 + assert config.top_k == 10 + assert config.compute_gradients is True + assert config.output_attention is True + assert config.output_embeddings is True + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_config_custom(self): + """Test custom configuration.""" + from transformer_lens.lit.model import HookedTransformerLITConfig + + config = HookedTransformerLITConfig( + max_seq_length=256, + batch_size=4, + compute_gradients=False, + ) + + assert config.max_seq_length == 256 + assert config.batch_size == 4 + assert config.compute_gradients is False + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_model_wrapper_init(self, mock_hooked_transformer): + """Test HookedTransformerLIT initialization.""" + from transformer_lens.lit.model import HookedTransformerLIT + + # Need to mock the isinstance check - patch where it's imported + with patch("transformer_lens.HookedTransformer", type(mock_hooked_transformer)): + wrapper = HookedTransformerLIT(mock_hooked_transformer) + + assert wrapper.model is mock_hooked_transformer + assert wrapper.config is not None + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_model_wrapper_invalid_model(self): + """Test that invalid model type raises error.""" + from transformer_lens.lit.model import HookedTransformerLIT + + with pytest.raises(TypeError): + HookedTransformerLIT("not a model") # type: ignore[union-attr] + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_model_input_spec(self, mock_hooked_transformer): + """Test input_spec method.""" + from transformer_lens.lit.model import HookedTransformerLIT + + with patch("transformer_lens.HookedTransformer", type(mock_hooked_transformer)): + wrapper = HookedTransformerLIT(mock_hooked_transformer) + spec = wrapper.input_spec() + + assert "text" in spec + assert isinstance(spec["text"], lit_types.TextSegment) # type: ignore[union-attr] + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_model_output_spec(self, mock_hooked_transformer): + """Test output_spec method.""" + from transformer_lens.lit.model import HookedTransformerLIT + + with patch("transformer_lens.HookedTransformer", type(mock_hooked_transformer)): + wrapper = HookedTransformerLIT(mock_hooked_transformer) + spec = wrapper.output_spec() + + assert "tokens" in spec + assert "top_k_tokens" in spec + # With default config, should have embeddings + assert "cls_embedding" in spec + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_model_output_spec_no_embeddings(self, mock_hooked_transformer): + """Test output_spec without embeddings.""" + from transformer_lens.lit.model import ( + HookedTransformerLIT, + HookedTransformerLITConfig, + ) + + # Must also disable compute_gradients since gradients require embeddings + config = HookedTransformerLITConfig(output_embeddings=False, compute_gradients=False) + + with patch("transformer_lens.HookedTransformer", type(mock_hooked_transformer)): + wrapper = HookedTransformerLIT(mock_hooked_transformer, config=config) + spec = wrapper.output_spec() + + assert "tokens" in spec + assert "cls_embedding" not in spec + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_model_description(self, mock_hooked_transformer): + """Test model description.""" + from transformer_lens.lit.model import HookedTransformerLIT + + with patch("transformer_lens.HookedTransformer", type(mock_hooked_transformer)): + wrapper = HookedTransformerLIT(mock_hooked_transformer) + desc = wrapper.description() + + assert "test-model" in desc + assert "4L" in desc # n_layers + assert "4H" in desc # n_heads + + +# Tests for __init__.py + + +class TestInit: + """Tests for module initialization and exports.""" + + def test_exports_available(self): + """Test that expected exports are available.""" + from transformer_lens import lit + + # Check key exports exist + assert hasattr(lit, "HookedTransformerLIT") + assert hasattr(lit, "HookedTransformerLITConfig") + assert hasattr(lit, "SimpleTextDataset") + assert hasattr(lit, "serve") + assert hasattr(lit, "LITWidget") + assert hasattr(lit, "check_lit_installed") + + def test_constants_exported(self): + """Test that constants are exported.""" + from transformer_lens.lit import INPUT_FIELDS, OUTPUT_FIELDS + + assert INPUT_FIELDS.TEXT == "text" + assert OUTPUT_FIELDS.TOKENS == "tokens" + + @pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") + def test_all_exports(self): + """Test __all__ exports are importable.""" + from transformer_lens import lit + + for name in lit.__all__: + assert hasattr(lit, name), f"Missing export: {name}" + + +# Integration Tests + + +@pytest.mark.skipif(not LIT_AVAILABLE, reason="LIT not installed") +class TestIntegration: + """Integration tests that require both LIT and a model.""" + + def test_full_prediction_flow(self, mock_hooked_transformer): + """Test full prediction flow with mock model.""" + from transformer_lens.lit.model import HookedTransformerLIT + + with patch("transformer_lens.HookedTransformer", type(mock_hooked_transformer)): + wrapper = HookedTransformerLIT(mock_hooked_transformer) + + # This would fail with the mock, but we can at least check the structure + # In a real test with a real model, this would work + input_spec = wrapper.input_spec() + output_spec = wrapper.output_spec() + + assert "text" in input_spec + assert "tokens" in output_spec + + def test_dataset_model_compatibility(self): + """Test that datasets are compatible with model input spec.""" + from transformer_lens.lit.dataset import SimpleTextDataset + + dataset = SimpleTextDataset.from_strings(["test"]) + spec = dataset.spec() + + # Check that dataset spec matches expected model input + assert "text" in spec + + +# Run tests + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/transformer_lens/__init__.py b/transformer_lens/__init__.py index 1e2ff1e1a..02e5f2561 100644 --- a/transformer_lens/__init__.py +++ b/transformer_lens/__init__.py @@ -20,6 +20,13 @@ from . import patching from . import train +# LIT integration (optional, requires lit-nlp package) +try: + from . import lit +except ImportError: + # LIT is an optional dependency + lit = None # type: ignore + from .past_key_value_caching import ( HookedTransformerKeyValueCache as EasyTransformerKeyValueCache, HookedTransformerKeyValueCacheEntry as EasyTransformerKeyValueCacheEntry, diff --git a/transformer_lens/lit/README.md b/transformer_lens/lit/README.md new file mode 100644 index 000000000..e036700ea --- /dev/null +++ b/transformer_lens/lit/README.md @@ -0,0 +1,260 @@ +# TransformerLens LIT Integration + +This module provides integration between [TransformerLens](https://github.com/TransformerLensOrg/TransformerLens) and Google's [Learning Interpretability Tool (LIT)](https://pair-code.github.io/lit/). + +## Features + +- **Interactive Model Exploration**: Visualize attention patterns, embeddings, and predictions +- **Token Salience**: Gradient-based importance scores for input tokens +- **Embedding Projector**: UMAP/t-SNE visualization of token and sequence embeddings +- **Attention Visualization**: Multi-head attention patterns across all layers +- **Top-K Predictions**: Token predictions with probabilities at each position +- **Built-in Datasets**: IOI, Induction, and custom dataset support + +## Installation + +Install TransformerLens with LIT support: + +```bash +pip install transformer-lens[lit] +``` + +Or install LIT separately: + +```bash +pip install transformer-lens lit-nlp +``` + +## Quick Start + +### In a Jupyter/Colab Notebook + +```python +from transformer_lens import HookedTransformer +from transformer_lens.lit import ( + HookedTransformerLIT, + HookedTransformerLITConfig, + SimpleTextDataset, + LITWidget, +) + +# Load model +model = HookedTransformer.from_pretrained("gpt2-small") + +# Create LIT wrapper +config = HookedTransformerLITConfig( + max_seq_length=256, + compute_gradients=True, + output_attention=True, +) +lit_model = HookedTransformerLIT(model, config=config) + +# Create dataset +dataset = SimpleTextDataset.from_strings([ + "The capital of France is Paris.", + "The quick brown fox jumps over the lazy dog.", +]) + +# Launch widget +widget = LITWidget( + models={"gpt2": lit_model}, + datasets={"examples": dataset}, +) +widget.render() +``` + +### As a Standalone Server + +```python +from transformer_lens import HookedTransformer +from transformer_lens.lit import ( + HookedTransformerLIT, + SimpleTextDataset, + serve, +) + +model = HookedTransformer.from_pretrained("gpt2-small") +lit_model = HookedTransformerLIT(model) + +serve( + models={"gpt2": lit_model}, + datasets={"examples": SimpleTextDataset.from_strings(["Hello world!"])}, + port=5432, +) +# Navigate to http://localhost:5432 +``` + +## Configuration + +### HookedTransformerLITConfig + +| Parameter | Type | Default | Description | +|-----------|------|---------|-------------| +| `max_seq_length` | int | 512 | Maximum sequence length | +| `batch_size` | int | 8 | Batch size for inference | +| `top_k` | int | 10 | Number of top predictions per position | +| `compute_gradients` | bool | True | Enable gradient-based salience | +| `output_attention` | bool | True | Output attention patterns | +| `output_embeddings` | bool | True | Output embeddings | +| `output_all_layers` | bool | False | Output all layer embeddings | +| `prepend_bos` | bool | True | Prepend BOS token | +| `device` | str | None | Device (auto-detected if None) | + +## Datasets + +### SimpleTextDataset + +Basic dataset for text inputs: + +```python +dataset = SimpleTextDataset.from_strings([ + "Text 1", + "Text 2", +]) +``` + +### PromptCompletionDataset + +For prompt-completion pairs: + +```python +dataset = PromptCompletionDataset.from_pairs([ + ("The capital of France is", " Paris"), + ("2 + 2 =", " 4"), +]) +``` + +### IOIDataset + +Indirect Object Identification task: + +```python +dataset = IOIDataset.generate(n_examples=100, seed=42) +# Generates: "When Mary and John went to the store, John gave a book to" +# Answer: "Mary" +``` + +### InductionDataset + +For analyzing induction heads: + +```python +dataset = InductionDataset.generate_simple(n_examples=50) +# Generates patterns like: "A B C D A B" -> expects "C" +``` + +## Output Fields + +The wrapper produces these outputs for LIT: + +| Field | Type | Description | +|-------|------|-------------| +| `tokens` | Tokens | Tokenized input | +| `probabilities` | MulticlassPreds | Token probabilities | +| `top_k_tokens` | TokenTopKPreds | Top-K predictions per position | +| `cls_embedding` | Embeddings | First token embedding | +| `mean_embedding` | Embeddings | Mean-pooled embedding | +| `input_embeddings` | TokenEmbeddings | Per-token embeddings [seq, emb_dim] | +| `layer_N/attention` | AttentionHeads | Attention per layer | +| `layer_N/embeddings` | TokenEmbeddings | Embeddings per layer | +| `grad_l2` | TokenGradients | Per-token gradients [seq, emb_dim] | +| `grad_dot_input` | TokenGradients | Per-token gradients [seq, emb_dim] | + +Note: LIT internally computes L2 norms and dot products from the gradient arrays. + +## LIT Features Supported + +- **Attention Visualization**: See which tokens attend to which +- **Embedding Projector**: UMAP/t-SNE of embeddings +- **Token Salience**: Gradient-based importance +- **Prediction Analysis**: Top-K token predictions +- **Data Table**: Browse and filter examples +- **Counterfactual Generation**: Test modified inputs + +## Examples + +### Visualizing Attention Patterns + +```python +import matplotlib.pyplot as plt + +# Get outputs +outputs = list(lit_model.predict([{"text": "Hello world!"}]))[0] + +# Plot attention for layer 5, head 0 +attn = outputs["layer_5/attention"] # [heads, q, k] +plt.imshow(attn[0]) +plt.colorbar() +plt.show() +``` + +### Computing Token Salience + +LIT's built-in salience modules (GradientNorm, GradientDotInput) compute +token importance from the raw gradient arrays automatically. + +```python +import numpy as np + +outputs = list(lit_model.predict([{"text": "The answer is 42"}]))[0] + +# Raw gradients [seq_len, emb_dim] +gradients = outputs["grad_l2"] + +# Compute L2 norm manually (LIT does this internally) +salience = np.linalg.norm(gradients, axis=1) +for token, score in zip(outputs["tokens"], salience): + print(f"{token}: {score:.4f}") +``` + +## API Reference + +### Classes + +- `HookedTransformerLIT`: Main LIT model wrapper +- `HookedTransformerLITBatched`: Batched inference wrapper +- `HookedTransformerLITConfig`: Configuration dataclass +- `SimpleTextDataset`: Basic text dataset +- `PromptCompletionDataset`: Prompt-completion pairs +- `IOIDataset`: IOI benchmark dataset +- `InductionDataset`: Induction head dataset +- `LITWidget`: Jupyter/Colab widget + +### Functions + +- `serve(models, datasets, ...)`: Start LIT server +- `check_lit_installed()`: Check if LIT is available +- `wrap_for_lit(examples)`: Wrap examples for LIT + +## Troubleshooting + +### LIT not found + +``` +ImportError: lit-nlp is not installed +``` + +Install LIT: `pip install lit-nlp` + +### CUDA out of memory + +Reduce batch size or use a smaller model: + +```python +config = HookedTransformerLITConfig(batch_size=1) +``` + +### Widget not rendering + +Make sure you're in a Jupyter/Colab environment with JavaScript enabled. + +## Contributing + +See the main TransformerLens [CONTRIBUTING.md](../../CONTRIBUTING.md) for guidelines. + +## References + +- [LIT Paper](https://arxiv.org/abs/2008.05122): Tenney et al., "The Language Interpretability Tool" (EMNLP 2020) +- [TransformerLens](https://github.com/TransformerLensOrg/TransformerLens) +- [LIT Documentation](https://pair-code.github.io/lit/) +- [IOI Paper](https://arxiv.org/abs/2211.00593): Wang et al., "Interpretability in the Wild" (2022) diff --git a/transformer_lens/lit/__init__.py b/transformer_lens/lit/__init__.py new file mode 100644 index 000000000..448d8e4f3 --- /dev/null +++ b/transformer_lens/lit/__init__.py @@ -0,0 +1,302 @@ +"""LIT (Learning Interpretability Tool) integration for TransformerLens. + +This module provides integration between TransformerLens and Google's Learning +Interpretability Tool (LIT), enabling interactive visualization and analysis +of transformer models. + +Quick Start: + >>> from transformer_lens import HookedTransformer # doctest: +SKIP + >>> from transformer_lens.lit import HookedTransformerLIT, SimpleTextDataset, serve # doctest: +SKIP + >>> + >>> # Load model and create LIT wrapper + >>> model = HookedTransformer.from_pretrained("gpt2-small") # doctest: +SKIP + >>> lit_model = HookedTransformerLIT(model) # doctest: +SKIP + >>> + >>> # Create a dataset + >>> dataset = SimpleTextDataset.from_strings([ # doctest: +SKIP + ... "The capital of France is Paris.", + ... "Machine learning is a field of AI.", + ... ]) + >>> + >>> # Start LIT server + >>> serve({"gpt2": lit_model}, {"examples": dataset}) # doctest: +SKIP + +For Colab/Jupyter notebooks: + >>> from transformer_lens.lit import LITWidget # doctest: +SKIP + >>> + >>> widget = LITWidget({"gpt2": lit_model}, {"examples": dataset}) # doctest: +SKIP + >>> widget.render() # doctest: +SKIP + +Features: + - Interactive token predictions and top-k analysis + - Attention pattern visualization across all layers and heads + - Embedding projector for layer-wise representations + - Token salience/gradient visualization + - Support for IOI and Induction datasets + +Requirements: + - lit-nlp >= 1.0 (install with: pip install lit-nlp) + +References: + - LIT: https://pair-code.github.io/lit/ + - TransformerLens: https://github.com/TransformerLensOrg/TransformerLens + +Note: + This module requires the optional `lit-nlp` dependency. Install it with: + ``` + pip install lit-nlp + ``` + or + ``` + pip install transformer-lens[lit] + ``` +""" + +from __future__ import annotations + +import logging +from typing import Any, Dict, Union + +# Check if LIT is available +from .utils import check_lit_installed + +__all__ = [ + # Model wrappers + "HookedTransformerLIT", + "HookedTransformerLITBatched", + "HookedTransformerLITConfig", + # Datasets + "SimpleTextDataset", + "PromptCompletionDataset", + "IOIDataset", + "InductionDataset", + "wrap_for_lit", + # Server utilities + "serve", + "LITWidget", + # Configuration + "HookedTransformerLITConfig", + # Constants + "INPUT_FIELDS", + "OUTPUT_FIELDS", + # Utilities + "check_lit_installed", +] + +logger = logging.getLogger(__name__) + +# Import constants (always available) +from .constants import ERRORS, INPUT_FIELDS, OUTPUT_FIELDS, SERVER_CONFIG # noqa: E402 + +# Import datasets (handles LIT availability internally) +from .dataset import ( # noqa: E402 + InductionDataset, + IOIDataset, + PromptCompletionDataset, + SimpleTextDataset, + wrap_for_lit, +) + +# Import model wrapper (handles LIT availability internally) +from .model import HookedTransformerLIT, HookedTransformerLITConfig # noqa: E402 + +# Conditional imports that require LIT +_LIT_AVAILABLE = check_lit_installed() + +if _LIT_AVAILABLE: + from .model import HookedTransformerLITBatched # noqa: E402 +else: + HookedTransformerLITBatched = None # type: ignore[misc, assignment] + + +def serve( + models: Union[Dict[str, Any], Any], + datasets: Union[Dict[str, Any], Any], + port: int = SERVER_CONFIG.DEFAULT_PORT, + host: str = SERVER_CONFIG.DEFAULT_HOST, + page_title: str = SERVER_CONFIG.DEFAULT_TITLE, + **kwargs, +) -> None: + """Start a LIT server with the given models and datasets. + + This is a convenience function to quickly start a LIT server + for interactive model exploration. + + Args: + models: Either a single HookedTransformer/HookedTransformerLIT, or + a dictionary mapping model names to model wrappers. + datasets: Either a single dataset, or a dictionary mapping + dataset names to datasets. + port: Port number for the server. + host: Host address for the server. + page_title: Title shown in the browser tab. + **kwargs: Additional arguments passed to LIT server. + + Example: + >>> from transformer_lens import HookedTransformer # doctest: +SKIP + >>> from transformer_lens.lit import SimpleTextDataset, serve # doctest: +SKIP + >>> + >>> model = HookedTransformer.from_pretrained("gpt2-small") # doctest: +SKIP + >>> dataset = SimpleTextDataset.from_strings(["Hello world!"]) # doctest: +SKIP + >>> + >>> # Simple usage with single model and dataset + >>> serve(model, dataset) # doctest: +SKIP + >>> + >>> # Or with explicit names + >>> serve({"gpt2": model}, {"examples": dataset}) # doctest: +SKIP + + Note: + This function will block and run the server. Press Ctrl+C to stop. + """ + if not _LIT_AVAILABLE: + raise ImportError(ERRORS.LIT_NOT_INSTALLED) + + from lit_nlp import dev_server + + # Handle single model vs dictionary of models + if not isinstance(models, dict): + # Single model passed - check if it's a HookedTransformer that needs wrapping + model = models + if hasattr(model, "cfg") and hasattr(model, "run_with_cache"): + # It's a HookedTransformer, wrap it + model = HookedTransformerLIT(model) + models = {"model": model} + + # Handle single dataset vs dictionary of datasets + if not isinstance(datasets, dict): + datasets = {"dataset": datasets} + + # Wrap datasets if needed + wrapped_datasets = {} + for name, dataset in datasets.items(): + if hasattr(dataset, "_examples"): + # Our custom dataset, wrap it + wrapped_datasets[name] = wrap_for_lit(dataset) + else: + # Already a LIT dataset + wrapped_datasets[name] = dataset + + # Get the LIT client root path and layout + import os + + import lit_nlp + from lit_nlp.api import layout as lit_layout + + client_root = os.path.join(os.path.dirname(lit_nlp.__file__), "client", "build", "default") + + # Use default layouts if not provided + if "layouts" not in kwargs: + kwargs["layouts"] = lit_layout.DEFAULT_LAYOUTS + if "default_layout" not in kwargs: + kwargs["default_layout"] = "default" + + # Create and start server + server = dev_server.Server( + models, + wrapped_datasets, + port=port, + host=host, + page_title=page_title, + client_root=client_root, + **kwargs, + ) + + logger.info(f"Starting LIT server at http://{host}:{port}") + server.serve() + + +class LITWidget: + """LIT Widget for Jupyter/Colab notebooks. + + This class provides an easy way to use LIT within notebook environments + without needing to run a separate server. + + Example: + >>> from transformer_lens import HookedTransformer # doctest: +SKIP + >>> from transformer_lens.lit import HookedTransformerLIT, SimpleTextDataset, LITWidget # doctest: +SKIP + >>> + >>> model = HookedTransformer.from_pretrained("gpt2-small") # doctest: +SKIP + >>> lit_model = HookedTransformerLIT(model) # doctest: +SKIP + >>> dataset = SimpleTextDataset.from_strings(["Hello world!"]) # doctest: +SKIP + >>> + >>> widget = LITWidget({"gpt2": lit_model}, {"examples": dataset}) # doctest: +SKIP + >>> widget.render() # Displays in the notebook # doctest: +SKIP + + Note: + VSCode notebooks don't support iframe rendering. Use `widget.url` to + get the URL and open it manually in your browser. + """ + + def __init__( + self, + models: Dict[str, Any], + datasets: Dict[str, Any], + height: int = 800, + **kwargs, + ): + """Initialize the LIT widget. + + Args: + models: Dictionary mapping model names to model wrappers. + datasets: Dictionary mapping dataset names to datasets. + height: Height of the widget in pixels. + **kwargs: Additional arguments for the LIT widget. + """ + if not _LIT_AVAILABLE: + raise ImportError(ERRORS.LIT_NOT_INSTALLED) + + from lit_nlp import notebook + + # Wrap datasets if needed + wrapped_datasets = {} + for name, dataset in datasets.items(): + if hasattr(dataset, "_examples"): + wrapped_datasets[name] = wrap_for_lit(dataset) + else: + wrapped_datasets[name] = dataset + + # LitWidget expects models and datasets as positional args + # Remove default_layout from kwargs as it's handled internally by LitWidget + kwargs.pop("default_layout", None) + + self._widget = notebook.LitWidget( + models, + wrapped_datasets, + height=height, + render=False, # Don't auto-render + **kwargs, + ) + + @property + def url(self) -> str: + """Get the URL of the LIT server. + + Use this to manually open LIT in a browser when notebook + rendering doesn't work (e.g., in VSCode). + + Returns: + The URL to access the LIT UI. + """ + port = self._widget._server.port + return f"http://localhost:{port}" + + def render(self, open_in_new_tab: bool = False, **kwargs): + """Render the LIT widget. + + Args: + open_in_new_tab: If True, opens in a new browser tab. + **kwargs: Additional render arguments. + + Note: + If rendering doesn't work in your environment (e.g., VSCode), + use `print(widget.url)` and open that URL in your browser. + """ + self._widget.render(open_in_new_tab=open_in_new_tab, **kwargs) + + def stop(self): + """Stop the widget's server and free resources.""" + self._widget.stop() + + +# Version info +__version__ = "1.0.0" diff --git a/transformer_lens/lit/constants.py b/transformer_lens/lit/constants.py new file mode 100644 index 000000000..3244592f9 --- /dev/null +++ b/transformer_lens/lit/constants.py @@ -0,0 +1,229 @@ +"""Constants for the LIT integration module. + +This module defines constants used throughout the LIT integration with TransformerLens. +These include default configuration values, field names, and other settings that +ensure consistency across the integration. + +Note: LIT (Learning Interpretability Tool) is Google's framework-agnostic tool for +ML model interpretability. See: https://pair-code.github.io/lit/ + +References: + - LIT Documentation: https://pair-code.github.io/lit/documentation/ + - LIT API: https://pair-code.github.io/lit/documentation/api + - TransformerLens: https://github.com/TransformerLensOrg/TransformerLens +""" + +from dataclasses import dataclass +from typing import Optional + +# ============================================================================= +# Field Names - Used in input_spec and output_spec +# ============================================================================= + + +@dataclass(frozen=True) +class InputFieldNames: + """Field names for model inputs in LIT.""" + + # Primary text input + TEXT: str = "text" + # Optional pre-tokenized input + TOKENS: str = "tokens" + # Optional token embeddings for integrated gradients + TOKEN_EMBEDDINGS: str = "token_embeddings" + # Target for gradient computation + TARGET: str = "target" + # Gradient target mask (for sequence salience) + TARGET_MASK: str = "target_mask" + + +@dataclass(frozen=True) +class OutputFieldNames: + """Field names for model outputs in LIT.""" + + # Tokens (tokenized input) + TOKENS: str = "tokens" + # Token IDs + TOKEN_IDS: str = "token_ids" + # Logits over vocabulary + LOGITS: str = "logits" + # Top-k predicted tokens + TOP_K_TOKENS: str = "top_k_tokens" + # Generated text (for autoregressive generation) + GENERATED_TEXT: str = "generated_text" + # Probabilities for next token prediction + PROBAS: str = "probas" + # Loss per token + LOSS: str = "loss" + # Embeddings at specific layer (template) + LAYER_EMB_TEMPLATE: str = "layer_{layer}/embeddings" + # CLS-style embedding (first token of final layer) + CLS_EMBEDDING: str = "cls_embedding" + # Mean pooled embedding + MEAN_EMBEDDING: str = "mean_embedding" + # Attention pattern for layer/head (template) + ATTENTION_TEMPLATE: str = "layer_{layer}/head_{head}/attention" + # Full attention tensor per layer + LAYER_ATTENTION_TEMPLATE: str = "layer_{layer}/attention" + # Token gradients for salience + TOKEN_GRADIENTS: str = "token_gradients" + # Gradient L2 norm (scalar per token) + GRAD_L2: str = "grad_l2" + # Gradient dot input (scalar per token) + GRAD_DOT_INPUT: str = "grad_dot_input" + # Input token embeddings (for integrated gradients) + INPUT_EMBEDDINGS: str = "input_embeddings" + + +# Instantiate as singletons for easy access +INPUT_FIELDS = InputFieldNames() +OUTPUT_FIELDS = OutputFieldNames() + +# ============================================================================= +# Default Configuration Values +# ============================================================================= + + +@dataclass(frozen=True) +class DefaultConfig: + """Default configuration values for the LIT wrapper.""" + + # Maximum sequence length for tokenization + MAX_SEQ_LENGTH: int = 512 + # Batch size for inference + BATCH_SIZE: int = 8 + # Number of top-k tokens to return for predictions + TOP_K: int = 10 + # Whether to compute and return gradients + COMPUTE_GRADIENTS: bool = True + # Whether to return attention patterns + OUTPUT_ATTENTION: bool = True + # Whether to return embeddings per layer + OUTPUT_EMBEDDINGS: bool = True + # Whether to output all layer embeddings or just final + OUTPUT_ALL_LAYERS: bool = False + # Layers to include for embeddings (None = all) + EMBEDDING_LAYERS: Optional[tuple] = None + # Whether to prepend BOS token + PREPEND_BOS: bool = True + # Device for computation (None = auto-detect) + DEVICE: Optional[str] = None + # Whether to use FP16 for memory efficiency + USE_FP16: bool = False + + +DEFAULTS = DefaultConfig() + +# ============================================================================= +# Hook Point Names - TransformerLens specific +# ============================================================================= + + +@dataclass(frozen=True) +class HookPointNames: + """Common hook point names used in TransformerLens. + + These correspond to the hook points defined in HookedTransformer where + we can intercept and extract intermediate activations. + """ + + # Embedding hooks + HOOK_EMBED: str = "hook_embed" + HOOK_POS_EMBED: str = "hook_pos_embed" + HOOK_TOKENS: str = "hook_tokens" + + # Residual stream hooks (template - requires layer number) + RESID_PRE_TEMPLATE: str = "blocks.{layer}.hook_resid_pre" + RESID_POST_TEMPLATE: str = "blocks.{layer}.hook_resid_post" + RESID_MID_TEMPLATE: str = "blocks.{layer}.hook_resid_mid" + + # Attention hooks (template) + ATTN_OUT_TEMPLATE: str = "blocks.{layer}.hook_attn_out" + ATTN_PATTERN_TEMPLATE: str = "blocks.{layer}.attn.hook_pattern" + ATTN_SCORES_TEMPLATE: str = "blocks.{layer}.attn.hook_attn_scores" + + # QKV hooks + Q_TEMPLATE: str = "blocks.{layer}.attn.hook_q" + K_TEMPLATE: str = "blocks.{layer}.attn.hook_k" + V_TEMPLATE: str = "blocks.{layer}.attn.hook_v" + + # MLP hooks + MLP_OUT_TEMPLATE: str = "blocks.{layer}.hook_mlp_out" + MLP_PRE_TEMPLATE: str = "blocks.{layer}.mlp.hook_pre" + MLP_POST_TEMPLATE: str = "blocks.{layer}.mlp.hook_post" + + # Final layer norm + LN_FINAL: str = "ln_final.hook_normalized" + + +HOOK_POINTS = HookPointNames() + +# ============================================================================= +# LIT Type Mappings +# ============================================================================= + +# Mapping from TransformerLens output types to LIT types +# This helps with automatic spec generation +LIT_TYPE_MAPPING = { + "text": "TextSegment", + "tokens": "Tokens", + "embeddings": "Embeddings", + "token_embeddings": "TokenEmbeddings", + "attention": "AttentionHeads", + "gradients": "TokenGradients", + "multiclass": "MulticlassPreds", + "regression": "RegressionScore", + "generated_text": "GeneratedText", + "top_k_tokens": "TokenTopKPreds", +} + +# ============================================================================= +# Error Messages +# ============================================================================= + + +@dataclass(frozen=True) +class ErrorMessages: + """Standard error messages for the LIT integration.""" + + NO_TOKENIZER: str = ( + "HookedTransformer has no tokenizer. " + "Please load a model with a tokenizer or set one manually." + ) + INVALID_MODEL: str = "Model must be an instance of HookedTransformer. " "Got: {model_type}" + LIT_NOT_INSTALLED: str = ( + "LIT (lit-nlp) is not installed. " "Please install it with: pip install lit-nlp" + ) + INCOMPATIBLE_INPUT: str = ( + "Input does not match the expected input_spec. " + "Expected fields: {expected}, got: {actual}" + ) + BATCH_SIZE_MISMATCH: str = "Batch size mismatch. Expected {expected}, got {actual}" + + +ERRORS = ErrorMessages() + +# ============================================================================= +# LIT Server Defaults +# ============================================================================= + + +@dataclass(frozen=True) +class ServerConfig: + """Default configuration for the LIT server.""" + + # Default port for LIT server + DEFAULT_PORT: int = 5432 + # Default host + DEFAULT_HOST: str = "localhost" + # Page title + DEFAULT_TITLE: str = "TransformerLens + LIT" + # Development mode (hot reload) + DEV_MODE: bool = False + # Warm start (load examples on startup) + WARM_START: bool = True + # Maximum examples to load + MAX_EXAMPLES: int = 1000 + + +SERVER_CONFIG = ServerConfig() diff --git a/transformer_lens/lit/dataset.py b/transformer_lens/lit/dataset.py new file mode 100644 index 000000000..f88ce7f78 --- /dev/null +++ b/transformer_lens/lit/dataset.py @@ -0,0 +1,701 @@ +"""LIT Dataset wrapper for TransformerLens. + +This module provides LIT-compatible Dataset wrappers for use with TransformerLens +models. It includes utilities for loading common datasets and creating custom +datasets for model analysis. + +Example usage: + >>> from transformer_lens.lit import SimpleTextDataset # doctest: +SKIP + >>> + >>> # Create a dataset from examples + >>> examples = [ # doctest: +SKIP + ... {"text": "The capital of France is Paris."}, + ... {"text": "Machine learning is a subset of AI."}, + ... ] + >>> dataset = SimpleTextDataset(examples) # doctest: +SKIP + >>> + >>> # Use with LIT server + >>> from lit_nlp import dev_server # doctest: +SKIP + >>> server = dev_server.Server(models, {"my_data": dataset}) # doctest: +SKIP + +References: + - LIT Dataset API: https://pair-code.github.io/lit/documentation/api#datasets + - TransformerLens: https://github.com/TransformerLensOrg/TransformerLens +""" + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from pathlib import Path +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union + +from .constants import INPUT_FIELDS +from .utils import check_lit_installed + +if TYPE_CHECKING: + from lit_nlp.api import dataset as lit_dataset_types # noqa: F401 + from lit_nlp.api import types as lit_types_module # noqa: F401 + +# Check for LIT installation +if check_lit_installed(): + from lit_nlp.api import ( # type: ignore[import-not-found] # noqa: F401 + dataset as lit_dataset, + ) + from lit_nlp.api import ( # type: ignore[import-not-found] # noqa: F401 + types as lit_types, + ) + + _LIT_AVAILABLE = True + # Dynamic base class for proper LIT Dataset inheritance + _LITDatasetBase = lit_dataset.Dataset +else: + _LIT_AVAILABLE = False + lit_dataset = None # type: ignore[assignment] + lit_types = None # type: ignore[assignment] + _LITDatasetBase = object # type: ignore[assignment, misc] + +logger = logging.getLogger(__name__) + + +def _ensure_lit_available(): + """Raise ImportError if LIT is not available.""" + if not _LIT_AVAILABLE: + raise ImportError( + "LIT (lit-nlp) is not installed. " "Please install it with: pip install lit-nlp" + ) + + +@dataclass +class DatasetConfig: + """Configuration for LIT datasets. + + Attributes: + max_examples: Maximum number of examples to load. + shuffle: Whether to shuffle the examples. + seed: Random seed for shuffling. + """ + + max_examples: Optional[int] = None + shuffle: bool = False + seed: int = 42 + + +class SimpleTextDataset(_LITDatasetBase): # type: ignore[misc, valid-type] + """Simple text dataset for use with HookedTransformerLIT. + + This is a basic dataset class that holds text examples for analysis + with LIT. Each example is a dictionary with at least a "text" field. + + Attributes: + examples: List of example dictionaries. + name: Optional dataset name for display. + + Example: + >>> dataset = SimpleTextDataset([ # doctest: +SKIP + ... {"text": "Hello world"}, + ... {"text": "How are you?"}, + ... ]) + >>> len(dataset.examples) # doctest: +SKIP + 2 + """ + + def __init__( + self, + examples: Optional[List[Dict[str, Any]]] = None, + name: str = "SimpleTextDataset", + ): + """Initialize the dataset. + + Args: + examples: List of example dictionaries with "text" field. + name: Name for the dataset (shown in LIT UI). + """ + _ensure_lit_available() + + self._examples = examples or [] + self._name = name + + # Validate examples + for i, ex in enumerate(self._examples): + if INPUT_FIELDS.TEXT not in ex: + raise ValueError(f"Example {i} missing required field '{INPUT_FIELDS.TEXT}'") + + @property + def examples(self) -> List[Dict[str, Any]]: + """Return all examples in the dataset.""" + return self._examples + + def __len__(self) -> int: + """Return the number of examples.""" + return len(self._examples) + + def __iter__(self): + """Iterate over examples.""" + return iter(self._examples) + + def description(self) -> str: + """Return a description of the dataset.""" + return f"{self._name}: {len(self._examples)} examples" + + def spec(self) -> Dict[str, Any]: + """Return the spec describing the dataset fields. + + This tells LIT what fields each example contains and their types. + + Returns: + Dictionary mapping field names to LIT type specs. + """ + return { + INPUT_FIELDS.TEXT: lit_types.TextSegment(), # type: ignore[union-attr] + } + + @classmethod + def from_strings( + cls, + texts: Sequence[str], + name: str = "TextDataset", + ) -> "SimpleTextDataset": + """Create a dataset from a list of strings. + + Args: + texts: Sequence of text strings. + name: Dataset name. + + Returns: + SimpleTextDataset instance. + + Example: + >>> dataset = SimpleTextDataset.from_strings([ # doctest: +SKIP + ... "First example", + ... "Second example", + ... ]) + """ + examples = [{INPUT_FIELDS.TEXT: text} for text in texts] + return cls(examples, name=name) + + @classmethod + def from_file( + cls, + filepath: Union[str, Path], + name: Optional[str] = None, + max_examples: Optional[int] = None, + ) -> "SimpleTextDataset": + """Load a dataset from a text file. + + Each line in the file becomes one example. + + Args: + filepath: Path to the text file. + name: Optional dataset name (defaults to filename). + max_examples: Maximum number of examples to load. + + Returns: + SimpleTextDataset instance. + """ + filepath = Path(filepath) + + if name is None: + name = filepath.stem + + with open(filepath, "r", encoding="utf-8") as f: + lines = f.readlines() + + if max_examples is not None: + lines = lines[:max_examples] + + texts = [line.strip() for line in lines if line.strip()] + return cls.from_strings(texts, name=name) + + +class PromptCompletionDataset(_LITDatasetBase): # type: ignore[misc, valid-type] + """Dataset with prompt-completion pairs for generation analysis. + + This dataset type is useful for analyzing model generation behavior, + where each example has a prompt and an expected completion. + + Attributes: + examples: List of example dictionaries with prompt and completion. + + Example: + >>> dataset = PromptCompletionDataset([ # doctest: +SKIP + ... {"prompt": "The capital of France is", "completion": " Paris"}, + ... {"prompt": "2 + 2 =", "completion": " 4"}, + ... ]) + """ + + # Field names for this dataset type + PROMPT_FIELD = "prompt" + COMPLETION_FIELD = "completion" + FULL_TEXT_FIELD = "text" + + def __init__( + self, + examples: Optional[List[Dict[str, Any]]] = None, + name: str = "PromptCompletionDataset", + ): + """Initialize the dataset. + + Args: + examples: List of example dictionaries with prompt/completion. + name: Name for the dataset. + """ + _ensure_lit_available() + + self._name = name + self._examples: List[Dict[str, Any]] = [] + + if examples: + for ex in examples: + self._add_example(ex) + + def _add_example(self, example: Dict[str, Any]) -> None: + """Add and validate an example. + + Args: + example: Example dictionary. + """ + if self.PROMPT_FIELD not in example: + raise ValueError(f"Example missing required field '{self.PROMPT_FIELD}'") + + # Ensure completion field exists (can be empty) + if self.COMPLETION_FIELD not in example: + example[self.COMPLETION_FIELD] = "" + + # Create full text field + example[self.FULL_TEXT_FIELD] = example[self.PROMPT_FIELD] + example[self.COMPLETION_FIELD] + + # Also set as "text" for compatibility with model wrapper + example[INPUT_FIELDS.TEXT] = example[self.FULL_TEXT_FIELD] + + self._examples.append(example) + + @property + def examples(self) -> List[Dict[str, Any]]: + """Return all examples.""" + return self._examples + + def __len__(self) -> int: + """Return the number of examples.""" + return len(self._examples) + + def __iter__(self): + """Iterate over examples.""" + return iter(self._examples) + + def description(self) -> str: + """Return a description of the dataset.""" + return f"{self._name}: {len(self._examples)} prompt-completion pairs" + + def spec(self) -> Dict[str, Any]: + """Return the spec describing the dataset fields.""" + return { + self.PROMPT_FIELD: lit_types.TextSegment(), # type: ignore[union-attr] + self.COMPLETION_FIELD: lit_types.TextSegment(), # type: ignore[union-attr] + self.FULL_TEXT_FIELD: lit_types.TextSegment(), # type: ignore[union-attr] + INPUT_FIELDS.TEXT: lit_types.TextSegment(), # type: ignore[union-attr] + } + + @classmethod + def from_pairs( + cls, + pairs: Sequence[tuple], + name: str = "PromptCompletionDataset", + ) -> "PromptCompletionDataset": + """Create a dataset from (prompt, completion) tuples. + + Args: + pairs: Sequence of (prompt, completion) tuples. + name: Dataset name. + + Returns: + PromptCompletionDataset instance. + + Example: + >>> dataset = PromptCompletionDataset.from_pairs([ # doctest: +SKIP + ... ("Hello, my name is", " Alice"), + ... ("The weather today is", " sunny"), + ... ]) + """ + examples = [ + {cls.PROMPT_FIELD: prompt, cls.COMPLETION_FIELD: completion} + for prompt, completion in pairs + ] + return cls(examples, name=name) + + +class IOIDataset(_LITDatasetBase): # type: ignore[misc, valid-type] + """Indirect Object Identification (IOI) dataset. + + This dataset contains examples for the Indirect Object Identification + task, commonly used in mechanistic interpretability research. + + Each example has the format: + "When {name1} and {name2} went to the {place}, {name1} gave a {object} to" + + The model should complete with name2 (the indirect object). + + Reference: + Wang et al. "Interpretability in the Wild: a Circuit for Indirect + Object Identification in GPT-2 small" + https://arxiv.org/abs/2211.00593 + """ + + # Common names for IOI examples + NAMES = [ + "Mary", + "John", + "Alice", + "Bob", + "Charlie", + "Diana", + "Emma", + "Frank", + "Grace", + "Henry", + "Ivy", + "Jack", + ] + + # Common places + PLACES = [ + "store", + "park", + "beach", + "restaurant", + "library", + "museum", + "cafe", + "market", + "school", + "hospital", + ] + + # Common objects + OBJECTS = [ + "book", + "gift", + "letter", + "key", + "phone", + "drink", + "flower", + "card", + "ticket", + "bag", + ] + + TEMPLATE = "When {name1} and {name2} went to the {place}, {name1} gave a {object} to" + + def __init__( + self, + examples: Optional[List[Dict[str, Any]]] = None, + name: str = "IOI Dataset", + ): + """Initialize the IOI dataset. + + Args: + examples: Optional pre-defined examples. + name: Dataset name. + """ + _ensure_lit_available() + + self._name = name + self._examples = examples or [] + + @property + def examples(self) -> List[Dict[str, Any]]: + """Return all examples.""" + return self._examples + + def __len__(self) -> int: + """Return the number of examples.""" + return len(self._examples) + + def __iter__(self): + """Iterate over examples.""" + return iter(self._examples) + + def description(self) -> str: + """Return a description of the dataset.""" + return f"{self._name}: {len(self._examples)} IOI examples" + + def spec(self) -> Dict[str, Any]: + """Return the spec describing the dataset fields.""" + return { + INPUT_FIELDS.TEXT: lit_types.TextSegment(), # type: ignore[union-attr] + "name1": lit_types.CategoryLabel(), # type: ignore[union-attr] + "name2": lit_types.CategoryLabel(), # type: ignore[union-attr] + "place": lit_types.CategoryLabel(), # type: ignore[union-attr] + "object": lit_types.CategoryLabel(), # type: ignore[union-attr] + "answer": lit_types.CategoryLabel(), # type: ignore[union-attr] + } + + def add_example( + self, + name1: str, + name2: str, + place: str, + obj: str, + ) -> None: + """Add a single IOI example. + + Args: + name1: Subject name (gives the object). + name2: Indirect object name (receives the object). + place: Location. + obj: Object being given. + """ + text = self.TEMPLATE.format( + name1=name1, + name2=name2, + place=place, + object=obj, + ) + self._examples.append( + { + INPUT_FIELDS.TEXT: text, + "name1": name1, + "name2": name2, + "place": place, + "object": obj, + "answer": name2, # The correct completion + } + ) + + @classmethod + def generate( + cls, + n_examples: int = 100, + seed: int = 42, + name: str = "IOI Dataset", + ) -> "IOIDataset": + """Generate random IOI examples. + + Args: + n_examples: Number of examples to generate. + seed: Random seed for reproducibility. + name: Dataset name. + + Returns: + IOIDataset with generated examples. + """ + import random + + random.seed(seed) + + dataset = cls(name=name) + + for _ in range(n_examples): + # Select two different names + name1, name2 = random.sample(cls.NAMES, 2) + place = random.choice(cls.PLACES) + obj = random.choice(cls.OBJECTS) + + dataset.add_example(name1, name2, place, obj) + + return dataset + + +class InductionDataset(_LITDatasetBase): # type: ignore[misc, valid-type] + """Dataset for induction head analysis. + + Induction heads are attention heads that perform pattern matching + of the form [A][B] ... [A] -> [B]. This dataset provides examples + designed to trigger induction behavior. + + Example pattern: + "The cat sat on the mat. The cat sat on the" -> " mat" + + Reference: + Olsson et al. "In-context Learning and Induction Heads" + https://arxiv.org/abs/2209.11895 + """ + + def __init__( + self, + examples: Optional[List[Dict[str, Any]]] = None, + name: str = "Induction Dataset", + ): + """Initialize the induction dataset. + + Args: + examples: Optional pre-defined examples. + name: Dataset name. + """ + _ensure_lit_available() + + self._name = name + self._examples = examples or [] + + @property + def examples(self) -> List[Dict[str, Any]]: + """Return all examples.""" + return self._examples + + def __len__(self) -> int: + """Return the number of examples.""" + return len(self._examples) + + def __iter__(self): + """Iterate over examples.""" + return iter(self._examples) + + def description(self) -> str: + """Return a description of the dataset.""" + return f"{self._name}: {len(self._examples)} induction examples" + + def spec(self) -> Dict[str, Any]: + """Return the spec describing the dataset fields.""" + return { + INPUT_FIELDS.TEXT: lit_types.TextSegment(), # type: ignore[union-attr] + "pattern": lit_types.TextSegment(), # type: ignore[union-attr] + "expected_completion": lit_types.TextSegment(), # type: ignore[union-attr] + } + + def add_example( + self, + pattern: str, + repeated_text: str, + completion: str, + ) -> None: + """Add an induction example. + + Args: + pattern: The pattern that is repeated. + repeated_text: The text before the second occurrence. + completion: The expected completion. + """ + # Create the full text: pattern + separator + repeated start + text = f"{pattern} {repeated_text} {pattern.split()[0]}" + + self._examples.append( + { + INPUT_FIELDS.TEXT: text, + "pattern": pattern, + "expected_completion": completion, + } + ) + + @classmethod + def generate_simple( + cls, + n_examples: int = 50, + seed: int = 42, + name: str = "Induction Dataset", + ) -> "InductionDataset": + """Generate simple induction examples. + + Args: + n_examples: Number of examples to generate. + seed: Random seed. + name: Dataset name. + + Returns: + InductionDataset with generated examples. + """ + import random + + random.seed(seed) + + # Simple word pairs + patterns = [ + ("The cat sat", "on the mat"), + ("Hello my name", "is Alice"), + ("The quick brown", "fox jumps"), + ("Once upon a", "time there"), + ("In the beginning", "was the"), + ("To be or", "not to"), + ("The sun rises", "in the"), + ("Water flows down", "the hill"), + ] + + dataset = cls(name=name) + + for i in range(n_examples): + pattern_start, pattern_end = patterns[i % len(patterns)] + full_pattern = f"{pattern_start} {pattern_end}" + + # Add some random connecting text + connectors = ["Then, later,", "After that,", "Subsequently,", "Next,"] + connector = random.choice(connectors) + + dataset.add_example( + pattern=full_pattern, + repeated_text=connector, + completion=pattern_end, + ) + + return dataset + + +# Wrapper to make datasets LIT-compatible if LIT is available +if _LIT_AVAILABLE: + + class LITDatasetWrapper(lit_dataset.Dataset): # type: ignore[union-attr] + """Wrapper to make our datasets inherit from lit_dataset.Dataset. + + This wrapper takes TransformerLens dataset classes and makes them + compatible with LIT's Dataset interface. + """ + + def __init__(self, examples: List[Dict[str, Any]], spec_dict: Dict[str, Any], name: str): + """Create a LIT-compatible dataset. + + Args: + examples: List of example dictionaries. + spec_dict: The spec dictionary describing the fields. + name: Name/description of the dataset. + """ + super().__init__() + self._examples = examples + self._spec_dict = spec_dict + self._name = name + + @classmethod + def init_spec(cls) -> None: + """Return None to indicate this dataset is not UI-configurable.""" + return None + + def spec(self) -> Dict[str, Any]: + return self._spec_dict + + def description(self) -> str: + return self._name + + @property + def examples(self) -> List[Dict[str, Any]]: + """Return the examples list.""" + return self._examples + + def __len__(self) -> int: + """Return the number of examples.""" + return len(self._examples) + + def __iter__(self): + """Iterate over examples.""" + return iter(self._examples) + + def wrap_for_lit(dataset: Any) -> LITDatasetWrapper: + """Wrap a dataset for use with LIT. + + Args: + dataset: One of our dataset classes (SimpleTextDataset, + PromptCompletionDataset, IOIDataset, or InductionDataset). + + Returns: + LIT-compatible dataset. + """ + return LITDatasetWrapper( + examples=list(dataset.examples), + spec_dict=dataset.spec(), + name=dataset.description(), + ) + +else: + # Define wrap_for_lit when LIT is not available + def wrap_for_lit(dataset: Any) -> Any: # type: ignore[misc] + """Placeholder when LIT is not available.""" + raise ImportError( + "LIT (lit-nlp) is not installed. " "Please install it with: pip install lit-nlp" + ) diff --git a/transformer_lens/lit/model.py b/transformer_lens/lit/model.py new file mode 100644 index 000000000..0545bc04b --- /dev/null +++ b/transformer_lens/lit/model.py @@ -0,0 +1,757 @@ +"""LIT Model wrapper for TransformerLens HookedTransformer. + +This module provides a LIT-compatible wrapper around TransformerLens's HookedTransformer, +enabling the use of Google's Learning Interpretability Tool (LIT) for model visualization +and analysis. + +The wrapper exposes: +- Token predictions (logits, top-k tokens) +- Per-layer embeddings (residual stream) +- Attention patterns (all layers/heads) +- Token gradients for salience maps +- Loss computation + +Example usage: + >>> from transformer_lens import HookedTransformer # doctest: +SKIP + >>> from transformer_lens.lit import HookedTransformerLIT # doctest: +SKIP + >>> + >>> # Load model + >>> model = HookedTransformer.from_pretrained("gpt2-small") # doctest: +SKIP + >>> + >>> # Create LIT wrapper + >>> lit_model = HookedTransformerLIT(model) # doctest: +SKIP + >>> + >>> # Run prediction + >>> inputs = [{"text": "Hello, world!"}] # doctest: +SKIP + >>> outputs = list(lit_model.predict(inputs)) # doctest: +SKIP + +References: + - LIT Model API: https://pair-code.github.io/lit/documentation/api#models + - TransformerLens: https://github.com/TransformerLensOrg/TransformerLens +""" + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any, Dict, Iterable, Iterator, List, Optional + +import torch + +from .constants import DEFAULTS, ERRORS, INPUT_FIELDS, OUTPUT_FIELDS +from .utils import ( + check_lit_installed, + clean_token_strings, + extract_attention_from_cache, + get_model_info, + get_tokens_from_model, + tensor_to_numpy, +) + +if TYPE_CHECKING: + from lit_nlp.api import model as lit_model_types # noqa: F401 + from lit_nlp.api import types as lit_types_module # noqa: F401 + +# Check for LIT installation and import conditionally +if check_lit_installed(): + from lit_nlp.api import ( # type: ignore[import-not-found] # noqa: F401 + model as lit_model, + ) + from lit_nlp.api import ( # type: ignore[import-not-found] # noqa: F401 + types as lit_types, + ) + from lit_nlp.lib import utils as lit_utils # type: ignore[import-not-found] + + _LIT_AVAILABLE = True +else: + _LIT_AVAILABLE = False + # Create placeholder when LIT not installed + lit_model = None # type: ignore[assignment] + lit_types = None # type: ignore[assignment] + lit_utils = None # type: ignore[assignment] + +logger = logging.getLogger(__name__) + + +@dataclass +class HookedTransformerLITConfig: + """Configuration for the HookedTransformerLIT wrapper. + + Attributes: + max_seq_length: Maximum sequence length for tokenization. + batch_size: Batch size for inference. + top_k: Number of top predictions to return. + compute_gradients: Whether to compute token gradients. + output_attention: Whether to output attention patterns. + output_embeddings: Whether to output layer embeddings. + output_all_layers: Whether to output embeddings from all layers. + embedding_layers: Specific layers to include (None = based on output_all_layers). + prepend_bos: Whether to prepend BOS token. + device: Device for computation (None = auto-detect). + """ + + max_seq_length: int = DEFAULTS.MAX_SEQ_LENGTH + batch_size: int = DEFAULTS.BATCH_SIZE + top_k: int = DEFAULTS.TOP_K + compute_gradients: bool = DEFAULTS.COMPUTE_GRADIENTS + output_attention: bool = DEFAULTS.OUTPUT_ATTENTION + output_embeddings: bool = DEFAULTS.OUTPUT_EMBEDDINGS + output_all_layers: bool = DEFAULTS.OUTPUT_ALL_LAYERS + embedding_layers: Optional[List[int]] = None + prepend_bos: bool = DEFAULTS.PREPEND_BOS + device: Optional[str] = None + + +def _ensure_lit_available(): + """Raise ImportError if LIT is not available.""" + if not _LIT_AVAILABLE: + raise ImportError(ERRORS.LIT_NOT_INSTALLED) + + +# Create base class dynamically based on LIT availability +if _LIT_AVAILABLE: + _LITModelBase = lit_model.Model +else: + _LITModelBase = object # type: ignore[misc,assignment] + + +class HookedTransformerLIT(_LITModelBase): # type: ignore[valid-type,misc] + """LIT Model wrapper for TransformerLens HookedTransformer. + + This wrapper implements the LIT Model API, enabling the use of LIT's + visualization and analysis tools with TransformerLens models. + + The wrapper provides: + - Token predictions with top-k probabilities + - Per-layer embeddings for embedding projector + - Attention patterns for attention visualization + - Token gradients for salience maps + + Attributes: + model: The wrapped HookedTransformer model. + config: Configuration options for the wrapper. + + Example: + >>> model = HookedTransformer.from_pretrained("gpt2-small") # doctest: +SKIP + >>> lit_model = HookedTransformerLIT(model) # doctest: +SKIP + >>> lit_model.input_spec() # doctest: +SKIP + {'text': TextSegment(), ...} + """ + + def __init__( + self, + model: Any, + config: Optional[HookedTransformerLITConfig] = None, + ): + """Initialize the LIT wrapper. + + Args: + model: TransformerLens HookedTransformer model. + config: Optional configuration. Uses defaults if not provided. + + Raises: + ImportError: If lit-nlp is not installed. + TypeError: If model is not a HookedTransformer. + """ + _ensure_lit_available() + + # Validate model type + from transformer_lens import HookedTransformer + + if not isinstance(model, HookedTransformer): + raise TypeError(ERRORS.INVALID_MODEL.format(model_type=type(model))) + + self.model = model + self.config = config or HookedTransformerLITConfig() + + # Gradients require embeddings to be output (for alignment) + if self.config.compute_gradients and not self.config.output_embeddings: + logger.info("Enabling output_embeddings (required for compute_gradients)") + self.config.output_embeddings = True + + # Set device + if self.config.device is None: + self.config.device = str(model.cfg.device) + + # Cache model info + self._model_info = get_model_info(model) + + logger.info(f"Created HookedTransformerLIT wrapper for {self._model_info['model_name']}") + + @property + def supports_concurrent_predictions(self) -> bool: + """Whether this model supports concurrent predictions. + + Returns False as PyTorch models typically aren't thread-safe. + """ + return False + + def description(self) -> str: + """Return a human-readable description of the model. + + Returns: + Model description string. + """ + info = self._model_info + return ( + f"TransformerLens: {info['model_name']} " + f"({info['n_layers']}L, {info['n_heads']}H, d={info['d_model']})" + ) + + @classmethod + def init_spec(cls) -> Dict[str, Any]: + """Return spec for model initialization in LIT UI. + + This allows loading new models through the LIT interface. + + Returns: + Specification for initialization parameters. + """ + _ensure_lit_available() + return { + "model_name": lit_types.String( # type: ignore[union-attr] + default="gpt2-small", + required=True, + ), + "max_seq_length": lit_types.Integer( # type: ignore[union-attr] + default=DEFAULTS.MAX_SEQ_LENGTH, + min_val=1, + max_val=2048, + required=False, + ), + "compute_gradients": lit_types.Boolean( # type: ignore[union-attr] + default=DEFAULTS.COMPUTE_GRADIENTS, + required=False, + ), + "output_attention": lit_types.Boolean( # type: ignore[union-attr] + default=DEFAULTS.OUTPUT_ATTENTION, + required=False, + ), + "output_embeddings": lit_types.Boolean( # type: ignore[union-attr] + default=DEFAULTS.OUTPUT_EMBEDDINGS, + required=False, + ), + } + + def input_spec(self) -> Dict[str, Any]: + """Return spec describing the model inputs. + + Defines the expected input format for the model. LIT uses this + to validate inputs and generate appropriate UI controls. + + Returns: + Dictionary mapping field names to LIT type specs. + """ + _ensure_lit_available() + + spec = { + # Primary text input + INPUT_FIELDS.TEXT: lit_types.TextSegment(), # type: ignore[union-attr] + # Optional pre-tokenized input (for Integrated Gradients) + INPUT_FIELDS.TOKENS: lit_types.Tokens( # type: ignore[union-attr] + parent=INPUT_FIELDS.TEXT, + required=False, + ), + } + + # Add optional embeddings input for Integrated Gradients + if self.config.output_embeddings: + spec[INPUT_FIELDS.TOKEN_EMBEDDINGS] = lit_types.TokenEmbeddings( # type: ignore[union-attr] + align=INPUT_FIELDS.TOKENS, + required=False, + ) + + # Add target mask for sequence salience + if self.config.compute_gradients: + spec[INPUT_FIELDS.TARGET_MASK] = lit_types.Tokens( # type: ignore[union-attr] + parent=INPUT_FIELDS.TEXT, + required=False, + ) + + return spec + + def output_spec(self) -> Dict[str, Any]: + """Return spec describing the model outputs. + + Defines all the outputs that the model produces. LIT uses this + to determine which visualizations to show. + + Returns: + Dictionary mapping field names to LIT type specs. + """ + _ensure_lit_available() + + spec = {} + + # Tokens (always output) + spec[OUTPUT_FIELDS.TOKENS] = lit_types.Tokens( # type: ignore[union-attr] + parent=INPUT_FIELDS.TEXT, + ) + + # Top-K predictions for next token + spec[OUTPUT_FIELDS.TOP_K_TOKENS] = lit_types.TokenTopKPreds( # type: ignore[union-attr] + align=OUTPUT_FIELDS.TOKENS, + ) + + # Embeddings + if self.config.output_embeddings: + # Input embeddings (for Integrated Gradients) + spec[OUTPUT_FIELDS.INPUT_EMBEDDINGS] = lit_types.TokenEmbeddings( # type: ignore[union-attr] + align=OUTPUT_FIELDS.TOKENS, + ) + + # Final layer embedding (CLS-style) + spec[OUTPUT_FIELDS.CLS_EMBEDDING] = lit_types.Embeddings() # type: ignore[union-attr] + + # Mean pooled embedding + spec[OUTPUT_FIELDS.MEAN_EMBEDDING] = lit_types.Embeddings() # type: ignore[union-attr] + + # Per-layer embeddings + layers_to_output = self._get_embedding_layers() + for layer in layers_to_output: + field_name = OUTPUT_FIELDS.LAYER_EMB_TEMPLATE.format(layer=layer) + spec[field_name] = lit_types.Embeddings() # type: ignore[union-attr] + + # Attention patterns + if self.config.output_attention: + for layer in range(self._model_info["n_layers"]): + field_name = OUTPUT_FIELDS.LAYER_ATTENTION_TEMPLATE.format(layer=layer) + spec[field_name] = lit_types.AttentionHeads( # type: ignore[union-attr] + align_in=OUTPUT_FIELDS.TOKENS, + align_out=OUTPUT_FIELDS.TOKENS, + ) + + # Gradients for salience + if self.config.compute_gradients: + # TokenGradients spec requirements (per LIT API): + # - align: must point to a Tokens field (for token alignment) + # - grad_for: must point to a TokenEmbeddings field (for grad-dot-input) + # LIT's GradientNorm component computes L2 norm internally + # LIT's GradientDotInput component computes dot product with embeddings + spec[OUTPUT_FIELDS.GRAD_L2] = lit_types.TokenGradients( # type: ignore[union-attr] + align=OUTPUT_FIELDS.TOKENS, + grad_for=OUTPUT_FIELDS.INPUT_EMBEDDINGS, + ) + # Gradient dot input uses same format + spec[OUTPUT_FIELDS.GRAD_DOT_INPUT] = lit_types.TokenGradients( # type: ignore[union-attr] + align=OUTPUT_FIELDS.TOKENS, + grad_for=OUTPUT_FIELDS.INPUT_EMBEDDINGS, + ) + + return spec + + def _get_embedding_layers(self) -> List[int]: + """Get the layers to output embeddings for. + + Returns: + List of layer indices. + """ + if self.config.embedding_layers is not None: + return self.config.embedding_layers + + n_layers = self._model_info["n_layers"] + + if self.config.output_all_layers: + return list(range(n_layers)) + else: + # Output first, middle, and last layers by default + if n_layers <= 3: + return list(range(n_layers)) + return [0, n_layers // 2, n_layers - 1] + + def predict( + self, + inputs: Iterable[Dict[str, Any]], + ) -> Iterator[Dict[str, Any]]: + """Run prediction on a sequence of inputs. + + This is the main entry point for LIT to get model outputs. + + Args: + inputs: Iterable of input dictionaries, each with fields + matching input_spec(). + + Yields: + Output dictionaries for each input, with fields matching + output_spec(). + """ + for example in inputs: + yield self._predict_single(example) + + def _predict_single( + self, + example: Dict[str, Any], + ) -> Dict[str, Any]: + """Run prediction on a single example. + + Args: + example: Input dictionary with text field. + + Returns: + Output dictionary with predictions. + """ + text = example[INPUT_FIELDS.TEXT] + + # Check for pre-tokenized input (reserved for future use) + _ = example.get(INPUT_FIELDS.TOKENS) + _ = example.get(INPUT_FIELDS.TOKEN_EMBEDDINGS) + + # Initialize output + output: Dict[str, Any] = {} + + # Tokenize + if self.model.tokenizer is None: + raise ValueError(ERRORS.NO_TOKENIZER) + + tokens, token_ids = get_tokens_from_model( + self.model, + text, + prepend_bos=self.config.prepend_bos, + max_length=self.config.max_seq_length, + ) + output[OUTPUT_FIELDS.TOKENS] = clean_token_strings(tokens) + + # Prepare input + input_tokens = token_ids.unsqueeze(0).to(self.config.device) + + # Run with cache to get all activations + with torch.no_grad(): + result, cache = self.model.run_with_cache( + input_tokens, + return_type="logits", + ) + # Ensure logits is a tensor (run_with_cache returns Output type) + logits: torch.Tensor = ( + result if isinstance(result, torch.Tensor) else torch.tensor(result) + ) + + # Top-K predictions + output[OUTPUT_FIELDS.TOP_K_TOKENS] = self._get_top_k_per_position(logits, len(tokens)) + + # Embeddings + if self.config.output_embeddings: + output.update(self._extract_embeddings(cache, len(tokens))) + + # Attention + if self.config.output_attention: + output.update(self._extract_attention(cache)) + + # Gradients (requires separate forward pass with gradients enabled) + if self.config.compute_gradients: + output.update(self._compute_gradients(text, example)) + + return output + + def _get_top_k_per_position( + self, + logits: torch.Tensor, + seq_len: int, + ) -> List[List[tuple]]: + """Get top-k predictions for each position. + + Args: + logits: Model logits [batch, pos, vocab]. + seq_len: Sequence length. + + Returns: + List of lists of (token, probability) tuples. + """ + results = [] + # Ensure logits is a tensor (handle Output type from run_with_cache) + if not isinstance(logits, torch.Tensor): + logits = torch.tensor(logits) + probs = torch.softmax(logits[0], dim=-1) + + for pos in range(seq_len): + top_probs, top_indices = torch.topk(probs[pos], self.config.top_k) + pos_results = [] + for prob, idx in zip(top_probs.tolist(), top_indices.tolist()): + if self.model.tokenizer is not None: + token_str = self.model.tokenizer.decode([idx]) + else: + token_str = f"<{idx}>" + pos_results.append((token_str, prob)) + results.append(pos_results) + + return results + + def _extract_embeddings( + self, + cache: Any, + seq_len: int, + ) -> Dict[str, Any]: + """Extract embeddings from the activation cache. + + Args: + cache: Activation cache from forward pass. + seq_len: Sequence length. + + Returns: + Dictionary of embedding arrays. + """ + output = {} + + # Input embeddings (from hook_embed) + input_emb = cache["hook_embed"][0] # [seq_len, d_model] + output[OUTPUT_FIELDS.INPUT_EMBEDDINGS] = tensor_to_numpy(input_emb) + + # Final layer embeddings + final_layer = self._model_info["n_layers"] - 1 + final_resid = cache[f"blocks.{final_layer}.hook_resid_post"][0] + + # CLS-style (first token) + output[OUTPUT_FIELDS.CLS_EMBEDDING] = tensor_to_numpy(final_resid[0]) + + # Mean pooled + output[OUTPUT_FIELDS.MEAN_EMBEDDING] = tensor_to_numpy(final_resid.mean(dim=0)) + + # Per-layer embeddings + for layer in self._get_embedding_layers(): + resid = cache[f"blocks.{layer}.hook_resid_post"][0] + # Use mean pooled embedding for the layer + field_name = OUTPUT_FIELDS.LAYER_EMB_TEMPLATE.format(layer=layer) + output[field_name] = tensor_to_numpy(resid.mean(dim=0)) + + return output + + def _extract_attention( + self, + cache: Any, + ) -> Dict[str, Any]: + """Extract attention patterns from the activation cache. + + Args: + cache: Activation cache from forward pass. + + Returns: + Dictionary of attention pattern arrays. + """ + output = {} + + for layer in range(self._model_info["n_layers"]): + # Get attention pattern for this layer + attn = extract_attention_from_cache(cache, layer, head=None, batch_idx=0) + # attn shape: [num_heads, query_pos, key_pos] + field_name = OUTPUT_FIELDS.LAYER_ATTENTION_TEMPLATE.format(layer=layer) + output[field_name] = attn + + return output + + def _compute_gradients( + self, + text: str, + example: Dict[str, Any], + ) -> Dict[str, Any]: + """Compute token gradients for salience. + + Args: + text: Input text. + example: Full input example (may contain target_mask). + + Returns: + Dictionary with gradient arrays. + """ + output = {} + + # Tokenize + tokens, token_ids = get_tokens_from_model( + self.model, + text, + prepend_bos=self.config.prepend_bos, + max_length=self.config.max_seq_length, + ) + input_tokens = token_ids.unsqueeze(0).to(self.config.device) + + # Get target mask if provided + target_mask = example.get(INPUT_FIELDS.TARGET_MASK) + + # Get embeddings with gradient tracking + with torch.enable_grad(): + # Get input embeddings and make them a leaf tensor for gradients + embed = self.model.embed(input_tokens).detach().clone() + embed.requires_grad_(True) + + # Add positional embeddings if applicable + if self.model.cfg.positional_embedding_type == "standard": + pos_embed = self.model.pos_embed(input_tokens) + residual = embed + pos_embed + else: + residual = embed + + # Forward through the rest of the model + logits = self.model(residual, start_at_layer=0) + + # Compute loss or target logit + if target_mask is not None: + # Use masked tokens as targets + # For now, use simple next-token prediction loss + pass + + # Use last token prediction as target + target_idx = token_ids[-1].item() # Predict last token + target_logit = logits[0, -2, target_idx] # Logit at second-to-last position + + # Backward pass + target_logit.backward() + + # Get gradients - now embed is a leaf tensor so grad should be populated + if embed.grad is None: + # Fallback: return zeros if gradients couldn't be computed + gradients = torch.zeros_like(embed[0]) + else: + gradients = embed.grad[0] # [seq_len, d_model] + + # Return the full gradient tensor - LIT computes norms internally + # TokenGradients expects shape [num_tokens, emb_dim] + output[OUTPUT_FIELDS.GRAD_L2] = tensor_to_numpy(gradients) + output[OUTPUT_FIELDS.GRAD_DOT_INPUT] = tensor_to_numpy(gradients) + + return output + + def max_minibatch_size(self) -> int: + """Return the maximum batch size for prediction. + + Returns: + Maximum batch size. + """ + return self.config.batch_size + + def get_embedding_table(self) -> tuple: + """Return the token embedding table. + + Required by LIT for certain generators like HotFlip. + + Returns: + Tuple of (vocab_list, embedding_matrix) where vocab_list is + a list of token strings and embedding_matrix is [vocab, d_model]. + """ + # Get the embedding matrix from the model + embed_weight = self.model.embed.W_E.detach().cpu().numpy() + + # Get vocabulary list - use tokenizer's vocab size to avoid index errors + if self.model.tokenizer is not None: + # Use the tokenizer's actual vocabulary size + tokenizer_vocab_size = len(self.model.tokenizer) + # Use the smaller of embedding size and tokenizer vocab size + vocab_size = min(embed_weight.shape[0], tokenizer_vocab_size) + vocab_list = [] + for i in range(vocab_size): + try: + token = self.model.tokenizer.decode([i]) + vocab_list.append(token) + except Exception: + vocab_list.append(f"<{i}>") + # Truncate embedding matrix to match vocab_list + embed_weight = embed_weight[:vocab_size] + else: + vocab_list = [f"<{i}>" for i in range(embed_weight.shape[0])] + + return vocab_list, embed_weight + + @classmethod + def from_pretrained( + cls, + model_name: str, + config: Optional[HookedTransformerLITConfig] = None, + **model_kwargs, + ) -> "HookedTransformerLIT": + """Create a LIT wrapper from a pretrained model name. + + Convenience method that loads the HookedTransformer model + and wraps it for LIT. + + Args: + model_name: Name of the pretrained model (e.g., "gpt2-small"). + config: Optional wrapper configuration. + **model_kwargs: Additional arguments for HookedTransformer.from_pretrained. + + Returns: + HookedTransformerLIT wrapper instance. + + Example: + >>> lit_model = HookedTransformerLIT.from_pretrained("gpt2-small") # doctest: +SKIP + """ + from transformer_lens import HookedTransformer + + model = HookedTransformer.from_pretrained(model_name, **model_kwargs) + return cls(model, config=config) + + +# If LIT is available, register as a proper LIT BatchedModel subclass +if _LIT_AVAILABLE: + + class HookedTransformerLITBatched(lit_model.BatchedModel): # type: ignore[union-attr] + """Batched version of HookedTransformerLIT for better performance. + + This class implements the BatchedModel interface for efficient + batch processing. Use this for production deployments. + """ + + def __init__( + self, + model: Any, + config: Optional[HookedTransformerLITConfig] = None, + ): + """Initialize the batched LIT wrapper. + + Args: + model: TransformerLens HookedTransformer model. + config: Optional configuration. + """ + # Use the non-batched wrapper internally + self._wrapper = HookedTransformerLIT(model, config) + self.model = model + self.config = self._wrapper.config + + def description(self) -> str: + return self._wrapper.description() + + @classmethod + def init_spec(cls) -> Dict[str, Any]: + return HookedTransformerLIT.init_spec() + + def input_spec(self) -> Dict[str, Any]: + return self._wrapper.input_spec() + + def output_spec(self) -> Dict[str, Any]: + return self._wrapper.output_spec() + + def max_minibatch_size(self) -> int: + return self._wrapper.max_minibatch_size() + + def predict_minibatch( # type: ignore[union-attr] + self, + inputs, # type: ignore[override] + ): + """Run prediction on a minibatch of inputs. + + Args: + inputs: List of input dictionaries. + + Returns: + List of output dictionaries. + """ + # For now, just iterate (can be optimized for true batching) + return [self._wrapper._predict_single(ex) for ex in inputs] # type: ignore[union-attr] + + @classmethod + def from_pretrained( + cls, + model_name: str, + config: Optional[HookedTransformerLITConfig] = None, + **model_kwargs, + ) -> "HookedTransformerLITBatched": + """Create a batched LIT wrapper from a pretrained model. + + Args: + model_name: Name of the pretrained model. + config: Optional wrapper configuration. + **model_kwargs: Additional arguments for model loading. + + Returns: + HookedTransformerLITBatched instance. + """ + from transformer_lens import HookedTransformer + + model = HookedTransformer.from_pretrained(model_name, **model_kwargs) + return cls(model, config=config) diff --git a/transformer_lens/lit/utils.py b/transformer_lens/lit/utils.py new file mode 100644 index 000000000..00b6f2e85 --- /dev/null +++ b/transformer_lens/lit/utils.py @@ -0,0 +1,455 @@ +"""Utility functions for the LIT integration module. + +This module provides helper functions for converting between TransformerLens +data structures and LIT-compatible formats, as well as other utilities. + +References: + - LIT API: https://pair-code.github.io/lit/documentation/api + - TransformerLens: https://github.com/TransformerLensOrg/TransformerLens +""" + +from __future__ import annotations + +import logging +from typing import Any, Dict, List, Optional, Tuple, Union + +import numpy as np +import torch + +logger = logging.getLogger(__name__) + + +def check_lit_installed() -> bool: + """Check if LIT (lit-nlp) is installed. + + Returns: + bool: True if LIT is installed, False otherwise. + """ + try: + import lit_nlp # noqa: F401 + + return True + except ImportError: + return False + + +def tensor_to_numpy( + tensor: Union[torch.Tensor, np.ndarray, None], +) -> Optional[np.ndarray]: + """Convert a PyTorch tensor to a NumPy array. + + LIT expects all data to be in NumPy format, so this helper ensures + proper conversion with detach and CPU transfer. + + Args: + tensor: PyTorch tensor or None. + + Returns: + NumPy array or None if input was None. + """ + if tensor is None: + return None + if isinstance(tensor, np.ndarray): + return tensor + if isinstance(tensor, torch.Tensor): + return tensor.detach().cpu().numpy() + raise TypeError(f"Expected torch.Tensor or np.ndarray, got {type(tensor)}") + + +def numpy_to_tensor( + array: Union[np.ndarray, torch.Tensor, None], + device: Optional[Union[str, torch.device]] = None, + dtype: Optional[torch.dtype] = None, +) -> Optional[torch.Tensor]: + """Convert a NumPy array to a PyTorch tensor. + + Args: + array: NumPy array or None. + device: Target device for the tensor. + dtype: Target dtype for the tensor. + + Returns: + PyTorch tensor or None if input was None. + """ + if array is None: + return None + if isinstance(array, torch.Tensor): + tensor = array + else: + tensor = torch.from_numpy(array) + + if dtype is not None: + tensor = tensor.to(dtype) + if device is not None: + tensor = tensor.to(device) + return tensor + + +def get_tokens_from_model( + model: Any, + text: str, + prepend_bos: bool = True, + truncate: bool = True, + max_length: Optional[int] = None, +) -> Tuple[List[str], torch.Tensor]: + """Get tokens and token IDs from a HookedTransformer model. + + Args: + model: HookedTransformer model with tokenizer. + text: Input text to tokenize. + prepend_bos: Whether to prepend the BOS token. + truncate: Whether to truncate to max_length. + max_length: Maximum sequence length. + + Returns: + Tuple of (token strings, token ID tensor). + + Raises: + ValueError: If model has no tokenizer. + """ + if model.tokenizer is None: + raise ValueError("Model must have a tokenizer to convert text to tokens") + + # Get token IDs + token_ids = model.to_tokens(text, prepend_bos=prepend_bos, truncate=truncate) + + if max_length is not None and token_ids.shape[1] > max_length: + token_ids = token_ids[:, :max_length] + + # Convert IDs to strings + token_strings = model.tokenizer.convert_ids_to_tokens(token_ids.squeeze(0).tolist()) + + return token_strings, token_ids.squeeze(0) + + +def clean_token_string(token: str) -> str: + """Clean a token string for display. + + Handles common tokenizer artifacts like: + - Ġ (GPT-2 style space prefix) + - ▁ (SentencePiece space prefix) + - ## (BERT style subword prefix) + + Args: + token: Raw token string from tokenizer. + + Returns: + Cleaned token string for display. + """ + # Handle GPT-2/RoBERTa style space encoding + if token.startswith("Ġ"): + return "▁" + token[1:] # Use Unicode space indicator + # Handle SentencePiece + if token.startswith("▁"): + return token # Already in preferred format + # Handle BERT style + if token.startswith("##"): + return token[2:] # Remove ## prefix + return token + + +def clean_token_strings(tokens: List[str]) -> List[str]: + """Clean a list of token strings for display. + + Args: + tokens: List of raw token strings. + + Returns: + List of cleaned token strings. + """ + return [clean_token_string(t) for t in tokens] + + +def extract_attention_from_cache( + cache: Any, + layer: int, + head: Optional[int] = None, + batch_idx: int = 0, +) -> Optional[np.ndarray]: + """Extract attention patterns from an activation cache. + + Args: + cache: TransformerLens ActivationCache object. + layer: Layer index to extract from. + head: Optional head index. If None, returns all heads. + batch_idx: Batch index to extract. + + Returns: + Attention pattern as numpy array. + Shape: [query_pos, key_pos] if head specified + Shape: [num_heads, query_pos, key_pos] if head is None + """ + # Get attention pattern from cache + attn_pattern = cache[f"blocks.{layer}.attn.hook_pattern"] + + # Remove batch dimension + if attn_pattern.dim() == 4: + attn_pattern = attn_pattern[batch_idx] + + # attn_pattern shape: [num_heads, query_pos, key_pos] + if head is not None: + attn_pattern = attn_pattern[head] + + return tensor_to_numpy(attn_pattern) + + +def extract_embeddings_from_cache( + cache: Any, + layer: int, + position: str = "all", + batch_idx: int = 0, +) -> Optional[np.ndarray]: + """Extract embeddings from a specific layer in the activation cache. + + Args: + cache: TransformerLens ActivationCache object. + layer: Layer index to extract from. + position: "all" for all positions, "first" for CLS-like, "last" for final token. + batch_idx: Batch index to extract. + + Returns: + Embeddings as numpy array. + """ + # Get residual stream at layer + resid = cache[f"blocks.{layer}.hook_resid_post"] + + # Remove batch dimension + if resid.dim() == 3: + resid = resid[batch_idx] + + # resid shape: [seq_len, d_model] + if position == "first": + embeddings = resid[0] + elif position == "last": + embeddings = resid[-1] + elif position == "mean": + embeddings = resid.mean(dim=0) + else: # "all" + embeddings = resid + + return tensor_to_numpy(embeddings) + + +def compute_token_gradients( + model: Any, + text: str, + target_idx: Optional[int] = None, + prepend_bos: bool = True, +) -> Tuple[Optional[np.ndarray], Optional[np.ndarray], List[str]]: + """Compute token-level gradients for salience. + + Uses gradient of the loss with respect to token embeddings to compute + importance scores for each token. + + Args: + model: HookedTransformer model. + text: Input text. + target_idx: Target token index for gradient computation. + If None, uses the last token. + prepend_bos: Whether to prepend BOS token. + + Returns: + Tuple of (grad_l2, grad_dot_input, tokens) where: + - grad_l2: L2 norm of gradients per token [seq_len] + - grad_dot_input: Gradient dot input embedding per token [seq_len] + - tokens: List of token strings + """ + # Tokenize + tokens, token_ids = get_tokens_from_model(model, text, prepend_bos=prepend_bos) + token_ids = token_ids.unsqueeze(0).to(model.cfg.device) + + # Get input embeddings + input_embeds = model.embed(token_ids) + input_embeds.requires_grad_(True) + + # Forward pass + logits = model(input_embeds, start_at_layer=0) + + # Determine target + if target_idx is None: + target_idx = -1 # Last token + + # Get target logit and compute gradient + target_logit = logits[0, target_idx, token_ids[0, target_idx + 1]] + target_logit.backward() + + # Get gradients + gradients = input_embeds.grad[0] # [seq_len, d_model] + + # Compute gradient L2 norm per token + grad_l2 = torch.norm(gradients, dim=-1) # [seq_len] + + # Compute gradient dot input + grad_dot_input = (gradients * input_embeds[0].detach()).sum(dim=-1) # [seq_len] + + return ( + tensor_to_numpy(grad_l2), + tensor_to_numpy(grad_dot_input), + tokens, + ) + + +def get_top_k_predictions( + logits: torch.Tensor, + tokenizer: Any, + k: int = 10, + position: int = -1, + batch_idx: int = 0, +) -> List[Tuple[str, float]]: + """Get top-k token predictions with their probabilities. + + Args: + logits: Model logits tensor. + tokenizer: HuggingFace tokenizer. + k: Number of top predictions to return. + position: Position index to get predictions for. + batch_idx: Batch index. + + Returns: + List of (token_string, probability) tuples. + """ + # Get logits at position + pos_logits = logits[batch_idx, position] # [d_vocab] + + # Convert to probabilities + probs = torch.softmax(pos_logits, dim=-1) + + # Get top-k + top_probs, top_indices = torch.topk(probs, k) + + # Convert to strings + results = [] + for prob, idx in zip(top_probs.tolist(), top_indices.tolist()): + token_str = tokenizer.decode([idx]) + results.append((token_str, prob)) + + return results + + +def validate_input_example( + example: Dict[str, Any], + required_fields: List[str], +) -> bool: + """Validate that an input example has all required fields. + + Args: + example: Input example dictionary. + required_fields: List of required field names. + + Returns: + True if valid, False otherwise. + """ + for field in required_fields: + if field not in example: + logger.warning(f"Missing required field '{field}' in input example") + return False + return True + + +def batch_examples( + examples: List[Dict[str, Any]], + batch_size: int, +) -> List[List[Dict[str, Any]]]: + """Split examples into batches. + + Args: + examples: List of example dictionaries. + batch_size: Size of each batch. + + Returns: + List of batches, where each batch is a list of examples. + """ + return [examples[i : i + batch_size] for i in range(0, len(examples), batch_size)] + + +def unbatch_outputs( + batched_outputs: Dict[str, np.ndarray], +) -> List[Dict[str, Any]]: + """Split batched outputs into individual examples. + + Takes a dictionary with batched arrays and returns a list of + dictionaries with individual arrays. + + Args: + batched_outputs: Dictionary mapping field names to batched arrays. + + Returns: + List of dictionaries, one per example. + """ + if not batched_outputs: + return [] + + # Get batch size from first array + first_key = next(iter(batched_outputs)) + batch_size = len(batched_outputs[first_key]) + + # Split into individual examples + results = [] + for i in range(batch_size): + example_output = {} + for key, value in batched_outputs.items(): + if isinstance(value, (np.ndarray, torch.Tensor)): + example_output[key] = value[i] + elif isinstance(value, list): + example_output[key] = value[i] + else: + example_output[key] = value + results.append(example_output) + + return results + + +def get_hook_name_for_layer(template: str, layer: int, **kwargs) -> str: + """Generate a hook point name from a template. + + Args: + template: Hook name template with {layer} placeholder. + layer: Layer index. + **kwargs: Additional template parameters. + + Returns: + Formatted hook point name. + """ + return template.format(layer=layer, **kwargs) + + +def filter_cache_by_pattern( + cache: Any, + pattern: str, +) -> Dict[str, torch.Tensor]: + """Filter activation cache entries by hook name pattern. + + Args: + cache: TransformerLens ActivationCache. + pattern: Pattern to match (e.g., "attn.hook_pattern" will match + all attention pattern hooks). + + Returns: + Dictionary of matching cache entries. + """ + return {name: value for name, value in cache.items() if pattern in name} + + +def get_model_info(model: Any) -> Dict[str, Any]: + """Extract relevant model information for LIT display. + + Args: + model: HookedTransformer model. + + Returns: + Dictionary with model metadata. + """ + cfg = model.cfg + return { + "model_name": cfg.model_name, + "n_layers": cfg.n_layers, + "n_heads": cfg.n_heads, + "d_model": cfg.d_model, + "d_head": cfg.d_head, + "d_mlp": cfg.d_mlp, + "d_vocab": cfg.d_vocab, + "n_ctx": cfg.n_ctx, + "act_fn": cfg.act_fn, + "normalization_type": cfg.normalization_type, + "positional_embedding_type": cfg.positional_embedding_type, + }