diff --git a/.env.template b/.env.template index ffce64dc..2405af77 100644 --- a/.env.template +++ b/.env.template @@ -5,16 +5,4 @@ DISABLE_DISPLAY_KEYS=false # if true, the display keys will not be shown in the frontend EXEC_PYTHON_IN_SUBPROCESS=false # if true, the python code will be executed in a subprocess to avoid crashing the main app, but it will increase the time of response -LOCAL_DB_DIR= # the directory to store the local database, if not provided, the app will use the temp directory - -# External atabase connection settings -# check https://duckdb.org/docs/stable/extensions/mysql.html -# and https://duckdb.org/docs/stable/extensions/postgres.html -USE_EXTERNAL_DB=false # if true, the app will use an external database instead of the one in the app -DB_NAME=mysql_db # the name to refer to this database connection -DB_TYPE=mysql # mysql or postgresql -DB_HOST=localhost -DB_PORT=0 -DB_DATABASE=mysql -DB_USER=root -DB_PASSWORD= \ No newline at end of file +LOCAL_DB_DIR= # the directory to store the local database, if not provided, the app will use the temp directory \ No newline at end of file diff --git a/README.md b/README.md index 0987577f..d244ad77 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,7 @@ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)  [![YouTube](https://img.shields.io/badge/YouTube-white?logo=youtube&logoColor=%23FF0000)](https://youtu.be/3ndlwt0Wi3c)  [![build](https://github.com/microsoft/data-formulator/actions/workflows/python-build.yml/badge.svg)](https://github.com/microsoft/data-formulator/actions/workflows/python-build.yml) +[![Discord](https://img.shields.io/badge/discord-chat-green?logo=discord)](https://discord.gg/mYCZMQKYZb) @@ -22,6 +23,14 @@ Transform data and create rich visualizations iteratively with AI 🪄. Try Data ## News 🔥🔥🔥 +- [05-13-2025] Data Formulator 0.2.1: External Data Loader + - We introduced external data loader class to make import data easier. [Readme](https://github.com/microsoft/data-formulator/tree/main/py-src/data_formulator/data_loader) and [Demo](https://github.com/microsoft/data-formulator/pull/155) + - Example data loaders from MySQL and Azure Data Explorer (Kusto) are provided. + - Call for action [link](https://github.com/microsoft/data-formulator/issues/156): + - Users: let us know which data source you'd like to load data from. + - Developers: let's build more data loaders. + - Discord channel for discussions: join us! [![Discord](https://img.shields.io/badge/discord-chat-green?logo=discord)](https://discord.gg/mYCZMQKYZb) + - [04-23-2025] Data Formulator 0.2: working with *large* data 📦📦📦 - Explore large data by: 1. Upload large data file to the local database (powered by [DuckDB](https://github.com/duckdb/duckdb)). diff --git a/package.json b/package.json index e5215c6f..962241e0 100644 --- a/package.json +++ b/package.json @@ -4,11 +4,11 @@ "version": "0.1.0", "private": true, "dependencies": { - "@emotion/react": "^11.9.0", - "@emotion/styled": "^11.8.1", + "@emotion/react": "^11.14.0", + "@emotion/styled": "^11.14.0", "@fontsource/roboto": "^4.5.5", "@mui/icons-material": "^5.14.0", - "@mui/material": "^5.6.0", + "@mui/material": "^7.0.2", "@reduxjs/toolkit": "^1.8.6", "@types/dompurify": "^3.0.5", "@types/validator": "^13.12.2", diff --git a/py-src/data_formulator/agent_routes.py b/py-src/data_formulator/agent_routes.py index fa0c6325..81b73226 100644 --- a/py-src/data_formulator/agent_routes.py +++ b/py-src/data_formulator/agent_routes.py @@ -29,7 +29,7 @@ from data_formulator.agents.agent_data_load import DataLoadAgent from data_formulator.agents.agent_data_clean import DataCleanAgent from data_formulator.agents.agent_code_explanation import CodeExplanationAgent - +from data_formulator.agents.agent_query_completion import QueryCompletionAgent from data_formulator.agents.client_utils import Client from data_formulator.db_manager import db_manager @@ -437,4 +437,25 @@ def request_code_expl(): expl = code_expl_agent.run(input_tables, code) else: expl = "" - return expl \ No newline at end of file + return expl + +@agent_bp.route('/query-completion', methods=['POST']) +def query_completion(): + if request.is_json: + logger.info("# request data: ") + content = request.get_json() + + client = get_client(content['model']) + + data_source_metadata = content["data_source_metadata"] + query = content["query"] + + + query_completion_agent = QueryCompletionAgent(client=client) + reasoning, query = query_completion_agent.run(data_source_metadata, query) + response = flask.jsonify({ "token": "", "status": "ok", "reasoning": reasoning, "query": query }) + else: + response = flask.jsonify({ "token": "", "status": "error", "reasoning": "unable to complete query", "query": "" }) + + response.headers.add('Access-Control-Allow-Origin', '*') + return response diff --git a/py-src/data_formulator/agents/agent_py_data_rec.py b/py-src/data_formulator/agents/agent_py_data_rec.py index f67ef372..6c1db07b 100644 --- a/py-src/data_formulator/agents/agent_py_data_rec.py +++ b/py-src/data_formulator/agents/agent_py_data_rec.py @@ -165,7 +165,7 @@ def process_gpt_response(self, input_tables, messages, response): if result['status'] == 'ok': result_df = result['content'] result['content'] = { - 'rows': result_df.to_dict(orient='records'), + 'rows': json.loads(result_df.to_json(orient='records')), } else: logger.info(result['content']) diff --git a/py-src/data_formulator/agents/agent_py_data_transform.py b/py-src/data_formulator/agents/agent_py_data_transform.py index e096a449..b3cc999e 100644 --- a/py-src/data_formulator/agents/agent_py_data_transform.py +++ b/py-src/data_formulator/agents/agent_py_data_transform.py @@ -221,13 +221,11 @@ def process_gpt_response(self, input_tables, messages, response): result = py_sandbox.run_transform_in_sandbox2020(code_str, [pd.DataFrame.from_records(t['rows']) for t in input_tables], self.exec_python_in_subprocess) result['code'] = code_str - print(f"result: {result}") - if result['status'] == 'ok': # parse the content result_df = result['content'] result['content'] = { - 'rows': result_df.to_dict(orient='records'), + 'rows': json.loads(result_df.to_json(orient='records')), } else: logger.info(result['content']) diff --git a/py-src/data_formulator/agents/agent_query_completion.py b/py-src/data_formulator/agents/agent_query_completion.py new file mode 100644 index 00000000..8beed90c --- /dev/null +++ b/py-src/data_formulator/agents/agent_query_completion.py @@ -0,0 +1,80 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +import pandas as pd +import json + +from data_formulator.agents.agent_utils import extract_code_from_gpt_response, extract_json_objects +import re +import logging + + +logger = logging.getLogger(__name__) + + +SYSTEM_PROMPT = '''You are a data scientist to help with data queries. +The user will provide you with a description of the data source and tables available in the [DATA SOURCE] section and a query in the [USER INPUTS] section. +You will need to help the user complete the query and provide reasoning for the query you generated in the [OUTPUT] section. + +Input format: +* The data source description is a json object with the following fields: + * `data_source`: the name of the data source + * `tables`: a list of tables in the data source, which maps the table name to the list of columns available in the table. +* The user input is a natural language description of the query or a partial query you need to complete. + +Steps: +* Based on data source description and user input, you should first decide on what language should be used to query the data. +* Then, describe the logic for the query you generated in a json object in a block ```json``` with the following fields: + * `language`: the language of the query you generated + * `tables`: the names of the tables you will use in the query + * `logic`: the reasoning behind why you chose the tables and the logic for the query you generated +* Finally, generate the complete query in the language specified in a code block ```{language}```. + +Output format: +* The output should be in the following format, no other text should be included: + +[REASONING] +```json +{ + "language": {language}, + "tables": {tables}, + "logic": {logic} +} +``` + +[QUERY] +```{language} +{query} +``` +''' + +class QueryCompletionAgent(object): + + def __init__(self, client): + self.client = client + + def run(self, data_source_metadata, query): + + user_query = f"[DATA SOURCE]\n\n{json.dumps(data_source_metadata, indent=2)}\n\n[USER INPUTS]\n\n{query}\n\n[REASONING]\n" + + logger.info(user_query) + + messages = [{"role":"system", "content": SYSTEM_PROMPT}, + {"role":"user","content": user_query}] + + ###### the part that calls open_ai + response = self.client.get_completion(messages = messages) + response_content = '[REASONING]\n' + response.choices[0].message.content + + logger.info(f"=== query completion output ===>\n{response_content}\n") + + reasoning = extract_json_objects(response_content.split("[REASONING]")[1].split("[QUERY]")[0].strip())[0] + output_query = response_content.split("[QUERY]")[1].strip() + + # Extract the query by removing the language markers + language_pattern = r"```(\w+)\s+(.*?)```" + match = re.search(language_pattern, output_query, re.DOTALL) + if match: + output_query = match.group(2).strip() + + return reasoning, output_query diff --git a/py-src/data_formulator/app.py b/py-src/data_formulator/app.py index c2950612..edc8f55f 100644 --- a/py-src/data_formulator/app.py +++ b/py-src/data_formulator/app.py @@ -37,6 +37,7 @@ from data_formulator.tables_routes import tables_bp from data_formulator.agent_routes import agent_bp + app = Flask(__name__, static_url_path='', static_folder=os.path.join(APP_ROOT, "dist")) app.secret_key = secrets.token_hex(16) # Generate a random secret key for sessions diff --git a/py-src/data_formulator/data_loader/README.md b/py-src/data_formulator/data_loader/README.md new file mode 100644 index 00000000..c3270c15 --- /dev/null +++ b/py-src/data_formulator/data_loader/README.md @@ -0,0 +1,36 @@ +## Data Loader Module + +This module provides a framework for loading data from various external sources into DuckDB. It follows an abstract base class pattern to ensure consistent implementation across different data sources. + +### Building a New Data Loader + +The abstract class `ExternalDataLoader` defines the data loader interface. Each concrete implementation (e.g., `KustoDataLoader`, `MySQLDataLoader`) handles specific data source connections and data ingestion. + +To create a new data loader: + +1. Create a new class that inherits from `ExternalDataLoader` +2. Implement the required abstract methods: + - `list_params()`: Define required connection parameters + - `__init__()`: Initialize connection to data source + - `list_tables()`: List available tables/views + - `ingest_data()`: Load data from source + - `view_query_sample()`: Preview query results + - `ingest_data_from_query()`: Load data from custom query +3. Register the new class into `__init__.py` so that the front-end can automatically discover the new data loader. + +The UI automatically provide the query completion option to help user generate queries for the given data loader (from NL or partial queries). + +### Example Implementations + +- `KustoDataLoader`: Azure Data Explorer (Kusto) integration +- `MySQLDataLoader`: MySQL database integration + +### Testing + +Ensure your implementation: +- Handles connection errors gracefully +- Properly sanitizes table names +- Respects size limits for data ingestion +- Returns consistent metadata format + +Launch the front-end and test the data loader. \ No newline at end of file diff --git a/py-src/data_formulator/data_loader/__init__.py b/py-src/data_formulator/data_loader/__init__.py new file mode 100644 index 00000000..145ac806 --- /dev/null +++ b/py-src/data_formulator/data_loader/__init__.py @@ -0,0 +1,10 @@ +from data_formulator.data_loader.external_data_loader import ExternalDataLoader +from data_formulator.data_loader.mysql_data_loader import MySQLDataLoader +from data_formulator.data_loader.kusto_data_loader import KustoDataLoader + +DATA_LOADERS = { + "mysql": MySQLDataLoader, + "kusto": KustoDataLoader +} + +__all__ = ["ExternalDataLoader", "MySQLDataLoader", "KustoDataLoader", "DATA_LOADERS"] \ No newline at end of file diff --git a/py-src/data_formulator/data_loader/external_data_loader.py b/py-src/data_formulator/data_loader/external_data_loader.py new file mode 100644 index 00000000..540f1748 --- /dev/null +++ b/py-src/data_formulator/data_loader/external_data_loader.py @@ -0,0 +1,90 @@ +from abc import ABC, abstractmethod +from typing import Dict, Any, List +import pandas as pd +import json +import duckdb +import random +import string +import re + +def sanitize_table_name(name_as: str) -> str: + if not name_as: + raise ValueError("Table name cannot be empty") + + # Remove any SQL injection attempts + name_as = name_as.replace(";", "").replace("--", "").replace("/*", "").replace("*/", "") + + # Replace invalid characters with underscores + # This includes special characters, spaces, dots, dashes, and other non-alphanumeric chars + sanitized = re.sub(r'[^a-zA-Z0-9_]', '_', name_as) + + # Ensure the name starts with a letter or underscore + if not sanitized[0].isalpha() and sanitized[0] != '_': + sanitized = '_' + sanitized + + # Ensure the name is not a SQL keyword + sql_keywords = { + 'SELECT', 'FROM', 'WHERE', 'GROUP', 'BY', 'ORDER', 'HAVING', 'LIMIT', + 'OFFSET', 'JOIN', 'INNER', 'LEFT', 'RIGHT', 'FULL', 'OUTER', 'ON', + 'AND', 'OR', 'NOT', 'NULL', 'TRUE', 'FALSE', 'UNION', 'ALL', 'DISTINCT', + 'INSERT', 'UPDATE', 'DELETE', 'CREATE', 'DROP', 'TABLE', 'VIEW', 'INDEX', + 'ALTER', 'ADD', 'COLUMN', 'PRIMARY', 'KEY', 'FOREIGN', 'REFERENCES', + 'CONSTRAINT', 'DEFAULT', 'CHECK', 'UNIQUE', 'CASCADE', 'RESTRICT' + } + + if sanitized.upper() in sql_keywords: + sanitized = '_' + sanitized + + # Ensure the name is not too long (common SQL limit is 63 characters) + if len(sanitized) > 63: + sanitized = sanitized[:63] + + return sanitized + +class ExternalDataLoader(ABC): + + def ingest_df_to_duckdb(self, df: pd.DataFrame, table_name: str): + + base_name = table_name + counter = 1 + while True: + # Check if table exists + exists = self.duck_db_conn.execute(f"SELECT COUNT(*) FROM duckdb_tables() WHERE table_name = '{table_name}'").fetchone()[0] > 0 + if not exists: + break + # If exists, append counter to base name + table_name = f"{base_name}_{counter}" + counter += 1 + + # Create table + random_suffix = ''.join(random.choices(string.ascii_letters + string.digits, k=6)) + self.duck_db_conn.register(f'df_temp_{random_suffix}', df) + self.duck_db_conn.execute(f"CREATE TABLE {table_name} AS SELECT * FROM df_temp_{random_suffix}") + self.duck_db_conn.execute(f"DROP VIEW df_temp_{random_suffix}") # Drop the temporary view after creating the table + + @staticmethod + @abstractmethod + def list_params() -> List[Dict[str, Any]]: + pass + + @abstractmethod + def __init__(self, params: Dict[str, Any], duck_db_conn: duckdb.DuckDBPyConnection): + pass + + @abstractmethod + def list_tables(self) -> List[Dict[str, Any]]: + # should include: table_name, column_names, column_types, sample_data + pass + + @abstractmethod + def ingest_data(self, table_name: str, name_as: str = None, size: int = 1000000): + pass + + @abstractmethod + def view_query_sample(self, query: str) -> str: + pass + + @abstractmethod + def ingest_data_from_query(self, query: str, name_as: str): + pass + diff --git a/py-src/data_formulator/data_loader/kusto_data_loader.py b/py-src/data_formulator/data_loader/kusto_data_loader.py new file mode 100644 index 00000000..210c3e68 --- /dev/null +++ b/py-src/data_formulator/data_loader/kusto_data_loader.py @@ -0,0 +1,176 @@ +from typing import Dict, Any, List +import pandas as pd +import json +import duckdb +import random +import string + +from azure.kusto.data import KustoClient, KustoConnectionStringBuilder +from azure.kusto.data.helpers import dataframe_from_result_table + +from data_formulator.data_loader.external_data_loader import ExternalDataLoader, sanitize_table_name + + +class KustoDataLoader(ExternalDataLoader): + + @staticmethod + def list_params() -> bool: + params_list = [ + {"name": "kusto_cluster", "type": "string", "required": True, "description": ""}, + {"name": "kusto_database", "type": "string", "required": True, "description": ""}, + {"name": "client_id", "type": "string", "required": False, "description": "only necessary for AppKey auth"}, + {"name": "client_secret", "type": "string", "required": False, "description": "only necessary for AppKey auth"}, + {"name": "tenant_id", "type": "string", "required": False, "description": "only necessary for AppKey auth"} + ] + return params_list + + def __init__(self, params: Dict[str, Any], duck_db_conn: duckdb.DuckDBPyConnection): + + self.kusto_cluster = params.get("kusto_cluster", None) + self.kusto_database = params.get("kusto_database", None) + + self.client_id = params.get("client_id", None) + self.client_secret = params.get("client_secret", None) + self.tenant_id = params.get("tenant_id", None) + + try: + if self.client_id and self.client_secret and self.tenant_id: + # This function provides an interface to Kusto. It uses AAD application key authentication. + self.client = KustoClient(KustoConnectionStringBuilder.with_aad_application_key_authentication( + self.kusto_cluster, self.client_id, self.client_secret, self.tenant_id)) + else: + # This function provides an interface to Kusto. It uses Azure CLI auth, but you can also use other auth types. + self.client = KustoClient(KustoConnectionStringBuilder.with_az_cli_authentication(self.kusto_cluster)) + except Exception as e: + raise Exception(f"Error creating Kusto client: {e}, please authenticate with Azure CLI when starting the app.") + + self.duck_db_conn = duck_db_conn + + def query(self, kql: str) -> pd.DataFrame: + result = self.client.execute(self.kusto_database, kql) + return dataframe_from_result_table(result.primary_results[0]) + + def list_tables(self) -> List[Dict[str, Any]]: + # first list functions (views) + query = ".show functions" + function_result_df = self.query(query) + + functions = [] + for func in function_result_df.to_dict(orient="records"): + func_name = func['Name'] + result = self.query(f".show function ['{func_name}'] schema as json").to_dict(orient="records") + schema = json.loads(result[0]['Schema']) + parameters = schema['InputParameters'] + columns = [{ + 'name': r["Name"], + 'type': r["Type"] + } for r in schema['OutputColumns']] + + # skip functions with parameters at the moment + if len(parameters) > 0: + continue + + sample_query = f"['{func_name}'] | take {10}" + sample_result = self.query(sample_query).to_dict(orient="records") + + function_metadata = { + "row_count": 0, + "columns": columns, + "parameters": parameters, + "sample_rows": sample_result + } + functions.append({ + "type": "function", + "name": func_name, + "metadata": function_metadata + }) + + # then list tables + query = ".show tables" + tables_df = self.query(query) + + tables = [] + for table in tables_df.to_dict(orient="records"): + table_name = table['TableName'] + schema_result = self.query(f".show table ['{table_name}'] schema as json").to_dict(orient="records") + columns = [{ + 'name': r["Name"], + 'type': r["Type"] + } for r in json.loads(schema_result[0]['Schema'])['OrderedColumns']] + + row_count_result = self.query(f".show table ['{table_name}'] details").to_dict(orient="records") + row_count = row_count_result[0]["TotalRowCount"] + + sample_query = f"['{table_name}'] | take {10}" + sample_result = self.query(sample_query).to_dict(orient="records") + + table_metadata = { + "row_count": row_count, + "columns": columns, + "sample_rows": sample_result + } + + tables.append({ + "type": "table", + "name": table_name, + "metadata": table_metadata + }) + + return functions + tables + + def ingest_data(self, table_name: str, name_as: str = None, size: int = 5000000) -> pd.DataFrame: + if name_as is None: + name_as = table_name + + # Create a subquery that applies random ordering once with a fixed seed + total_rows_ingested = 0 + first_chunk = True + chunk_size = 100000 + + size_estimate_query = f"['{table_name}'] | take {10000} | summarize Total=sum(estimate_data_size(*))" + size_estimate_result = self.query(size_estimate_query) + size_estimate = size_estimate_result['Total'].values[0] + print(f"size_estimate: {size_estimate}") + + chunk_size = min(64 * 1024 * 1024 / size_estimate * 0.9 * 10000, 5000000) + print(f"estimated_chunk_size: {chunk_size}") + + while total_rows_ingested < size: + try: + query = f"['{table_name}'] | serialize | extend rn=row_number() | where rn >= {total_rows_ingested} and rn < {total_rows_ingested + chunk_size} | project-away rn" + chunk_df = self.query(query) + except Exception as e: + chunk_size = int(chunk_size * 0.8) + continue + + print(f"total_rows_ingested: {total_rows_ingested}") + print(chunk_df.head()) + + # Stop if no more data + if chunk_df.empty: + break + + # Sanitize the table name for SQL compatibility + name_as = sanitize_table_name(name_as) + + # For first chunk, create new table; for subsequent chunks, append + if first_chunk: + self.ingest_df_to_duckdb(chunk_df, name_as) + first_chunk = False + else: + # Append to existing table + random_suffix = ''.join(random.choices(string.ascii_letters + string.digits, k=6)) + self.duck_db_conn.register(f'df_temp_{random_suffix}', chunk_df) + self.duck_db_conn.execute(f"INSERT INTO {name_as} SELECT * FROM df_temp_{random_suffix}") + self.duck_db_conn.execute(f"DROP VIEW df_temp_{random_suffix}") + + total_rows_ingested += len(chunk_df) + + def view_query_sample(self, query: str) -> str: + return self.query(query).head(10).to_dict(orient="records") + + def ingest_data_from_query(self, query: str, name_as: str) -> pd.DataFrame: + # Sanitize the table name for SQL compatibility + name_as = sanitize_table_name(name_as) + df = self.query(query) + self.ingest_df_to_duckdb(df, name_as) \ No newline at end of file diff --git a/py-src/data_formulator/data_loader/mysql_data_loader.py b/py-src/data_formulator/data_loader/mysql_data_loader.py new file mode 100644 index 00000000..625204e3 --- /dev/null +++ b/py-src/data_formulator/data_loader/mysql_data_loader.py @@ -0,0 +1,102 @@ +import json + +import pandas as pd +import duckdb + +from data_formulator.data_loader.external_data_loader import ExternalDataLoader, sanitize_table_name +from typing import Dict, Any + +class MySQLDataLoader(ExternalDataLoader): + + @staticmethod + def list_params() -> bool: + params_list = [ + {"name": "user", "type": "string", "required": True, "default": "root", "description": ""}, + {"name": "password", "type": "string", "required": False, "default": "", "description": "leave blank for no password"}, + {"name": "host", "type": "string", "required": True, "default": "localhost", "description": ""}, + {"name": "database", "type": "string", "required": True, "default": "mysql", "description": ""} + ] + return params_list + + def __init__(self, params: Dict[str, Any], duck_db_conn: duckdb.DuckDBPyConnection): + self.params = params + self.duck_db_conn = duck_db_conn + + # Install and load the MySQL extension + self.duck_db_conn.install_extension("mysql") + self.duck_db_conn.load_extension("mysql") + + attatch_string = "" + for key, value in self.params.items(): + if value: + attatch_string += f"{key}={value} " + + # Detach existing mysqldb connection if it exists + try: + self.duck_db_conn.execute("DETACH mysqldb;") + except: + pass # Ignore if mysqldb doesn't exist + + # Register MySQL connection + self.duck_db_conn.execute(f"ATTACH '{attatch_string}' AS mysqldb (TYPE mysql);") + + def list_tables(self): + tables_df = self.duck_db_conn.execute(f""" + SELECT TABLE_SCHEMA, TABLE_NAME FROM mysqldb.information_schema.tables + WHERE table_schema NOT IN ('information_schema', 'mysql', 'performance_schema', 'sys') + """).fetch_df() + + results = [] + + for schema, table_name in tables_df.values: + + full_table_name = f"mysqldb.{schema}.{table_name}" + + # Get column information using DuckDB's information schema + columns_df = self.duck_db_conn.execute(f"DESCRIBE {full_table_name}").df() + columns = [{ + 'name': row['column_name'], + 'type': row['column_type'] + } for _, row in columns_df.iterrows()] + + # Get sample data + sample_df = self.duck_db_conn.execute(f"SELECT * FROM {full_table_name} LIMIT 10").df() + sample_rows = json.loads(sample_df.to_json(orient="records")) + + # get row count + row_count = self.duck_db_conn.execute(f"SELECT COUNT(*) FROM {full_table_name}").fetchone()[0] + + table_metadata = { + "row_count": row_count, + "columns": columns, + "sample_rows": sample_rows + } + + results.append({ + "name": full_table_name, + "metadata": table_metadata + }) + + return results + + def ingest_data(self, table_name: str, name_as: str | None = None, size: int = 1000000): + # Create table in the main DuckDB database from MySQL data + if name_as is None: + name_as = table_name.split('.')[-1] + + name_as = sanitize_table_name(name_as) + + self.duck_db_conn.execute(f""" + CREATE OR REPLACE TABLE main.{name_as} AS + SELECT * FROM {table_name} + LIMIT {size} + """) + + def view_query_sample(self, query: str) -> str: + return self.duck_db_conn.execute(query).df().head(10).to_dict(orient="records") + + def ingest_data_from_query(self, query: str, name_as: str) -> pd.DataFrame: + # Execute the query and get results as a DataFrame + df = self.duck_db_conn.execute(query).df() + # Use the base class's method to ingest the DataFrame + self.ingest_df_to_duckdb(df, name_as) \ No newline at end of file diff --git a/py-src/data_formulator/db_manager.py b/py-src/data_formulator/db_manager.py index 0f8e4888..348ef244 100644 --- a/py-src/data_formulator/db_manager.py +++ b/py-src/data_formulator/db_manager.py @@ -1,32 +1,25 @@ import duckdb import pandas as pd -from typing import Optional, Dict, List, ContextManager, Any, Tuple -import time -from flask import session +from typing import Dict import tempfile import os from contextlib import contextmanager from dotenv import load_dotenv class DuckDBManager: - def __init__(self, external_db_connections: Dict[str, Dict[str, Any]], local_db_dir: str): + def __init__(self, local_db_dir: str): # Store session db file paths self._db_files: Dict[str, str] = {} - - # External db connections and tracking of installed extensions - self._external_db_connections: Dict[str, Dict[str, Any]] = external_db_connections - self._installed_extensions: Dict[str, List[str]] = {} self._local_db_dir: str = local_db_dir @contextmanager - def connection(self, session_id: str) -> ContextManager[duckdb.DuckDBPyConnection]: + def connection(self, session_id: str): """Get a DuckDB connection as a context manager that will be closed when exiting the context""" conn = None try: conn = self.get_connection(session_id) yield conn finally: - # Close the connection after use if conn: conn.close() @@ -40,8 +33,6 @@ def get_connection(self, session_id: str) -> duckdb.DuckDBPyConnection: db_file = os.path.join(db_dir, f"df_{session_id}.duckdb") print(f"=== Creating new db file: {db_file}") self._db_files[session_id] = db_file - # Initialize extension tracking for this file - self._installed_extensions[db_file] = [] else: print(f"=== Using existing db file: {self._db_files[session_id]}") db_file = self._db_files[session_id] @@ -49,43 +40,11 @@ def get_connection(self, session_id: str) -> duckdb.DuckDBPyConnection: # Create a fresh connection to the database file conn = duckdb.connect(database=db_file) - if self._external_db_connections and self._external_db_connections['db_type'] in ['mysql', 'postgresql']: - db_name = self._external_db_connections['db_name'] - db_type = self._external_db_connections['db_type'] - - print(f"=== connecting to {db_type} extension") - # Only install if not already installed for this db file - if db_type not in self._installed_extensions.get(db_file, []): - conn.execute(f"INSTALL {db_type};") - self._installed_extensions[db_file].append(db_type) - - conn.execute(f"LOAD {db_type};") - conn.execute(f"""CREATE SECRET ( - TYPE {db_type}, - HOST '{self._external_db_connections['host']}', - PORT '{self._external_db_connections['port']}', - DATABASE '{self._external_db_connections['database']}', - USER '{self._external_db_connections['user']}', - PASSWORD '{self._external_db_connections['password']}'); - """) - conn.execute(f"ATTACH '' AS {db_name} (TYPE {db_type});") - # result = conn.execute(f"SELECT * FROM {db_name}.information_schema.tables WHERE table_schema NOT IN ('information_schema', 'mysql', 'performance_schema', 'sys');").fetch_df() - # print(f"=== result: {result}") - return conn env = load_dotenv() # Initialize the DB manager db_manager = DuckDBManager( - external_db_connections={ - "db_name": os.getenv('DB_NAME'), - "db_type": os.getenv('DB_TYPE'), - "host": os.getenv('DB_HOST'), - "port": os.getenv('DB_PORT'), - "database": os.getenv('DB_DATABASE'), - "user": os.getenv('DB_USER'), - "password": os.getenv('DB_PASSWORD') - } if os.getenv('USE_EXTERNAL_DB') == 'true' else None, local_db_dir=os.getenv('LOCAL_DB_DIR') ) \ No newline at end of file diff --git a/py-src/data_formulator/tables_routes.py b/py-src/data_formulator/tables_routes.py index 2beb4549..cb04f707 100644 --- a/py-src/data_formulator/tables_routes.py +++ b/py-src/data_formulator/tables_routes.py @@ -8,7 +8,7 @@ mimetypes.add_type('application/javascript', '.js') mimetypes.add_type('application/javascript', '.mjs') import json - +import traceback from flask import request, send_from_directory, session, jsonify, Blueprint import pandas as pd import random @@ -16,6 +16,7 @@ from pathlib import Path from data_formulator.db_manager import db_manager +from data_formulator.data_loader import DATA_LOADERS import re from typing import Tuple @@ -44,11 +45,11 @@ def list_tables(): table_metadata_list = db.execute(""" SELECT database_name, schema_name, table_name, schema_name==current_schema() as is_current_schema, 'table' as object_type FROM duckdb_tables() - WHERE internal=False + WHERE internal=False AND database_name == current_database() UNION ALL SELECT database_name, schema_name, view_name as table_name, schema_name==current_schema() as is_current_schema, 'view' as object_type FROM duckdb_views() - WHERE view_name NOT LIKE 'duckdb_%' AND view_name NOT LIKE 'sqlite_%' AND view_name NOT LIKE 'pragma_%' + WHERE view_name NOT LIKE 'duckdb_%' AND view_name NOT LIKE 'sqlite_%' AND view_name NOT LIKE 'pragma_%' AND database_name == current_database() """).fetchall() @@ -471,107 +472,6 @@ def upload_db_file(): }), status_code -def validate_db_connection_params(db_type: str, db_host: str, db_port: int, - db_database: str, db_user: str, db_password: str): - """Validate database connection parameters""" - # Validate db_type - valid_db_types = ['postgresql', 'mysql'] - if not db_type or db_type.lower() not in valid_db_types: - raise ValueError(f"Invalid database type. Must be one of: {', '.join(valid_db_types)}") - - # Validate host (basic DNS/IP format check) - if not db_host or not re.match(r'^[a-zA-Z0-9.-]+$', db_host): - raise ValueError("Invalid host format") - - # Validate port - try: - port = int(db_port) - if not (1 <= port <= 65535): - raise ValueError() - except (ValueError, TypeError): - raise ValueError("Port must be a number between 1 and 65535") - - # Validate database name (alphanumeric and underscores only) - if not db_database or not re.match(r'^[a-zA-Z0-9_]+$', db_database): - raise ValueError("Invalid database name format") - - # Validate username (alphanumeric and some special chars) - if not db_user or not re.match(r'^[a-zA-Z0-9@._-]+$', db_user): - raise ValueError("Invalid username format") - - # Validate password exists - if not db_password: - raise ValueError("Password cannot be empty") - -@tables_bp.route('/attach-external-db', methods=['POST']) -def attach_external_db(): - """Attach an external db to the session""" - try: - data = request.get_json() - db_type = data.get('db_type') - db_host = data.get('db_host') - db_port = data.get('db_port') - db_database = data.get('db_database') - db_user = data.get('db_user') - db_password = data.get('db_password') - - # Generate a random suffix for the database name - suffix = ''.join(random.choices(string.ascii_letters + string.digits, k=2)) - db_name = f"{db_type.lower()}_{suffix}" - - if 'session_id' not in session: - return jsonify({"status": "error", "message": "No session ID found"}), 400 - - with db_manager.connection(session['session_id']) as conn: - # Create secret using parameterized query - - # Install and load the extension - if db_type == 'mysql': - conn.install_extension("mysql") - conn.load_extension("mysql") - elif db_type == 'postgresql': - conn.install_extension("postgres") - conn.load_extension("postgres") - - connect_query = f"""CREATE SECRET ( - TYPE {db_type}, - HOST '{db_host}', - PORT '{db_port}', - DATABASE '{db_database}', - USER '{db_user}', - PASSWORD '{db_password}' - );""" - conn.execute(connect_query) - - # Attach the database - conn.execute(f"ATTACH '' AS {db_name} (TYPE {db_type});") - - result = conn.execute(f"SELECT * FROM {db_name}.information_schema.tables WHERE table_schema NOT IN ('information_schema', 'mysql', 'performance_schema', 'sys');").fetch_df() - - print(f"result: {result}") - - result = conn.execute(f"SELECT * FROM {db_name}.sakila.actor LIMIT 10;").fetchdf() - - print(f"result: {result}") - - # Log what we found for debugging - logger.info(f"Found {len(result)} tables: {result}") - - return jsonify({ - "status": "success", - "message": "External database attached successfully", - "result": result - }) - - except Exception as e: - logger.error(f"Error attaching external database: {str(e)}") - safe_msg, status_code = sanitize_db_error_message(e) - return jsonify({ - "status": "error", - "message": safe_msg - }), status_code - - @tables_bp.route('/download-db-file', methods=['GET']) def download_db_file(): """Download the db file for a session""" @@ -791,14 +691,21 @@ def sanitize_db_error_message(error: Exception) -> Tuple[str, int]: # Define patterns for known safe errors safe_error_patterns = { # Database table errors - r"Table.*does not exist": ("Specified table was not found", 404), - r"Table.*already exists": ("A table with this name already exists", 409), + r"Table.*does not exist": (error_msg, 404), + r"Table.*already exists": (error_msg, 409), # Query errors - r"syntax error in SQL": ("Invalid SQL query syntax", 400), - r"Invalid input syntax": ("Invalid input data format", 400), + r"syntax error": (error_msg, 400), + r"Catalog Error": (error_msg, 404), + r"Binder Error": (error_msg, 400), + r"Invalid input syntax": (error_msg, 400), + # File errors - r"No such file": ("File not found", 404), + r"No such file": (error_msg, 404), r"Permission denied": ("Access denied", 403), + + # Data loader errors + r"Entity ID": (error_msg, 500), + r"session_id": ("session_id not found, please refresh the page", 500), } # Check if error matches any safe pattern @@ -810,4 +717,150 @@ def sanitize_db_error_message(error: Exception) -> Tuple[str, int]: logger.error(f"Unexpected error occurred: {error_msg}") # Return a generic error message for unknown errors - return "An unexpected error occurred", 500 \ No newline at end of file + return "An unexpected error occurred", 500 + + +@tables_bp.route('/data-loader/list-data-loaders', methods=['GET']) +def data_loader_list_data_loaders(): + """List all available data loaders""" + + try: + return jsonify({ + "status": "success", + "data_loaders": { + name: data_loader.list_params() + for name, data_loader in DATA_LOADERS.items() + } + }) + except Exception as e: + logger.error(f"Error listing data loaders: {str(e)}") + safe_msg, status_code = sanitize_db_error_message(e) + return jsonify({ + "status": "error", + "message": safe_msg + }), status_code + +@tables_bp.route('/data-loader/list-tables', methods=['POST']) +def data_loader_list_tables(): + """List tables from a data loader""" + + try: + data = request.get_json() + data_loader_type = data.get('data_loader_type') + data_loader_params = data.get('data_loader_params') + + if data_loader_type not in DATA_LOADERS: + return jsonify({"status": "error", "message": f"Invalid data loader type. Must be one of: {', '.join(DATA_LOADERS.keys())}"}), 400 + + with db_manager.connection(session['session_id']) as duck_db_conn: + data_loader = DATA_LOADERS[data_loader_type](data_loader_params, duck_db_conn) + tables = data_loader.list_tables() + + return jsonify({ + "status": "success", + "tables": tables + }) + + except Exception as e: + logger.error(f"Error listing tables from data loader: {str(e)}") + #print(traceback.format_exc()) + safe_msg, status_code = sanitize_db_error_message(e) + return jsonify({ + "status": "error", + "message": safe_msg + }), status_code + + +@tables_bp.route('/data-loader/ingest-data', methods=['POST']) +def data_loader_ingest_data(): + """Ingest data from a data loader""" + + try: + data = request.get_json() + data_loader_type = data.get('data_loader_type') + data_loader_params = data.get('data_loader_params') + table_name = data.get('table_name') + + if data_loader_type not in DATA_LOADERS: + return jsonify({"status": "error", "message": f"Invalid data loader type. Must be one of: {', '.join(DATA_LOADERS.keys())}"}), 400 + + with db_manager.connection(session['session_id']) as duck_db_conn: + data_loader = DATA_LOADERS[data_loader_type](data_loader_params, duck_db_conn) + data_loader.ingest_data(table_name) + + return jsonify({ + "status": "success", + "message": "Successfully ingested data from data loader" + }) + + except Exception as e: + logger.error(f"Error ingesting data from data loader: {str(e)}") + safe_msg, status_code = sanitize_db_error_message(e) + return jsonify({ + "status": "error", + "message": safe_msg + }), status_code + + +@tables_bp.route('/data-loader/view-query-sample', methods=['POST']) +def data_loader_view_query_sample(): + """View a sample of data from a query""" + + try: + data = request.get_json() + data_loader_type = data.get('data_loader_type') + data_loader_params = data.get('data_loader_params') + query = data.get('query') + + if data_loader_type not in DATA_LOADERS: + return jsonify({"status": "error", "message": f"Invalid data loader type. Must be one of: {', '.join(DATA_LOADERS.keys())}"}), 400 + + with db_manager.connection(session['session_id']) as duck_db_conn: + data_loader = DATA_LOADERS[data_loader_type](data_loader_params, duck_db_conn) + sample = data_loader.view_query_sample(query) + + return jsonify({ + "status": "success", + "sample": sample, + "message": "Successfully retrieved query sample" + }) + except Exception as e: + logger.error(f"Error viewing query sample: {str(e)}") + safe_msg, status_code = sanitize_db_error_message(e) + return jsonify({ + "status": "error", + "sample": [], + "message": safe_msg + }), status_code + + +@tables_bp.route('/data-loader/ingest-data-from-query', methods=['POST']) +def data_loader_ingest_data_from_query(): + """Ingest data from a data loader""" + + try: + data = request.get_json() + data_loader_type = data.get('data_loader_type') + data_loader_params = data.get('data_loader_params') + query = data.get('query') + name_as = data.get('name_as') + + if data_loader_type not in DATA_LOADERS: + return jsonify({"status": "error", "message": f"Invalid data loader type. Must be one of: {', '.join(DATA_LOADERS.keys())}"}), 400 + + with db_manager.connection(session['session_id']) as duck_db_conn: + data_loader = DATA_LOADERS[data_loader_type](data_loader_params, duck_db_conn) + data_loader.ingest_data_from_query(query, name_as) + + return jsonify({ + "status": "success", + "message": "Successfully ingested data from data loader" + }) + + except Exception as e: + logger.error(f"Error ingesting data from data loader: {str(e)}") + safe_msg, status_code = sanitize_db_error_message(e) + return jsonify({ + "status": "error", + "message": safe_msg + }), status_code \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index f049232b..96675706 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "data_formulator" -version = "0.2.1.1" +version = "0.2.1.2" requires-python = ">=3.9" authors = [ diff --git a/src/app/App.tsx b/src/app/App.tsx index b6da1d8b..3a6a4672 100644 --- a/src/app/App.tsx +++ b/src/app/App.tsx @@ -13,7 +13,7 @@ import { getSessionId, } from './dfSlice' -import blue from '@mui/material/colors/blue'; +import { red, purple, blue, brown, yellow, orange, } from '@mui/material/colors'; import _ from 'lodash'; @@ -559,11 +559,14 @@ export const AppFC: FC = function AppFC(appProps) { primary: { main: blue[700] }, + secondary: { + main: purple[700] + }, derived: { - main: "rgb(255,215,0)", // gold + main: yellow[700], }, custom: { - main: "rgb(255, 160, 122)", //lightsalmon + main: orange[700], //lightsalmon }, warning: { main: '#bf5600', // New accessible color, original (#ed6c02) has insufficient color contrast of 3.11 diff --git a/src/app/dfSlice.tsx b/src/app/dfSlice.tsx index e1ce556a..392a7660 100644 --- a/src/app/dfSlice.tsx +++ b/src/app/dfSlice.tsx @@ -71,6 +71,8 @@ export interface DataFormulatorState { defaultChartWidth: number; defaultChartHeight: number; } + + dataLoaderConnectParams: Record>; // {table_name: {param_name: param_value}} } // Define the initial state using that type @@ -108,7 +110,9 @@ const initialState: DataFormulatorState = { maxRepairAttempts: 1, defaultChartWidth: 300, defaultChartHeight: 300, - } + }, + + dataLoaderConnectParams: {} } let getUnrefedDerivedTableIds = (state: DataFormulatorState) => { @@ -278,6 +282,8 @@ export const dataFormulatorSlice = createSlice({ state.chartSynthesisInProgress = []; state.config = initialState.config; + + //state.dataLoaderConnectParams = initialState.dataLoaderConnectParams; }, loadState: (state, action: PayloadAction) => { @@ -305,6 +311,8 @@ export const dataFormulatorSlice = createSlice({ state.chartSynthesisInProgress = []; state.config = savedState.config; + + state.dataLoaderConnectParams = savedState.dataLoaderConnectParams || {}; }, setConfig: (state, action: PayloadAction<{ formulateTimeoutSeconds: number, maxRepairAttempts: number, @@ -730,6 +738,24 @@ export const dataFormulatorSlice = createSlice({ setSessionId: (state, action: PayloadAction) => { state.sessionId = action.payload; }, + updateDataLoaderConnectParams: (state, action: PayloadAction<{dataLoaderType: string, params: Record}>) => { + let dataLoaderType = action.payload.dataLoaderType; + let params = action.payload.params; + state.dataLoaderConnectParams[dataLoaderType] = params; + }, + updateDataLoaderConnectParam: (state, action: PayloadAction<{dataLoaderType: string, paramName: string, paramValue: string}>) => { + let dataLoaderType = action.payload.dataLoaderType; + if (!state.dataLoaderConnectParams[dataLoaderType]) { + state.dataLoaderConnectParams[dataLoaderType] = {}; + } + let paramName = action.payload.paramName; + let paramValue = action.payload.paramValue; + state.dataLoaderConnectParams[dataLoaderType][paramName] = paramValue; + }, + deleteDataLoaderConnectParams: (state, action: PayloadAction) => { + let dataLoaderType = action.payload; + delete state.dataLoaderConnectParams[dataLoaderType]; + } }, extraReducers: (builder) => { builder diff --git a/src/app/utils.tsx b/src/app/utils.tsx index 3059f96e..58aee261 100644 --- a/src/app/utils.tsx +++ b/src/app/utils.tsx @@ -57,7 +57,6 @@ export function getUrls() { UPLOAD_DB_FILE: `/api/tables/upload-db-file`, DOWNLOAD_DB_FILE: `/api/tables/download-db-file`, RESET_DB_FILE: `/api/tables/reset-db-file`, - ATTACH_EXTERNAL_DB: `/api/tables/attach-external-db`, LIST_TABLES: `/api/tables/list-tables`, TABLE_DATA: `/api/tables/get-table`, @@ -66,6 +65,14 @@ export function getUrls() { GET_COLUMN_STATS: `/api/tables/analyze`, QUERY_TABLE: `/api/tables/query`, SAMPLE_TABLE: `/api/tables/sample-table`, + + DATA_LOADER_LIST_DATA_LOADERS: `/api/tables/data-loader/list-data-loaders`, + DATA_LOADER_LIST_TABLES: `/api/tables/data-loader/list-tables`, + DATA_LOADER_INGEST_DATA: `/api/tables/data-loader/ingest-data`, + DATA_LOADER_VIEW_QUERY_SAMPLE: `/api/tables/data-loader/view-query-sample`, + DATA_LOADER_INGEST_DATA_FROM_QUERY: `/api/tables/data-loader/ingest-data-from-query`, + + QUERY_COMPLETION: `/api/agent/query-completion`, }; } diff --git a/src/scss/ConceptShelf.scss b/src/scss/ConceptShelf.scss index bc2403ce..84b07da0 100644 --- a/src/scss/ConceptShelf.scss +++ b/src/scss/ConceptShelf.scss @@ -214,75 +214,6 @@ } } -.info-panel-table { - .info-panel-table-cell { - padding: 0px 1px 0px 1px; - .example-textfield { - display: flex; - } - } - - .info-panel-table-index-column { - width: fit-content; - padding: 0px; - color: darkgray; - font-size: smaller; - font-style: italic; - - } - - .info-panel-table-header { - - .info-panel-table-header-cell { - padding: 0px 1px 0px 1px; - max-width: 240px; - - .info-panel-table-header-container { - display: flex; - flex-direction: row; - justify-content: space-between; - height: auto; - padding: 2px 4px; - position: relative; - - } - - .info-panel-table-header-container.row-index { - background-color: white; - border-bottom: 2px solid lightgray; - } - .info-panel-table-header-container.custom { - background-color: #fff9f8; - border-bottom: 2px solid #ffb395; - } - .info-panel-table-header-container.original { - background-color: rgb(237, 247, 252); - border-bottom: 2px solid #0288d1; - } - - .info-panel-table-header-title { - margin: auto; - margin-left: 3px; - display: flex; - align-items: center; - } - - .info-panel-table-header-name { - font-size: inherit; - margin-left: 3px; - white-space: nowrap; - overflow: hidden; - text-overflow: ellipsis; - flex-shrink: 1; - font-style: italic; - color: #333; - } - } - - - } -} - @keyframes color { 0% { background: white; } 50% { background: rgb(237, 247, 252); } diff --git a/src/scss/VisualizationView.scss b/src/scss/VisualizationView.scss index ac2e75ef..8cb8d17b 100644 --- a/src/scss/VisualizationView.scss +++ b/src/scss/VisualizationView.scss @@ -219,10 +219,6 @@ $accelerate-ease: cubic-bezier(0.4, 0.0, 1, 1); animation: appear 0.5s ease-out } - .selected-card { - //box-shadow: 2px 2px 4px lightslategray; - border: 2px solid rgb(25, 118, 210); - } .vega-thumbnail { diff --git a/src/views/DBTableManager.tsx b/src/views/DBTableManager.tsx index 2c54d48d..a856122d 100644 --- a/src/views/DBTableManager.tsx +++ b/src/views/DBTableManager.tsx @@ -1,11 +1,11 @@ // TableManager.tsx -import React, { useState, useEffect } from 'react'; +import React, { useState, useEffect, FC } from 'react'; import { Card, CardContent, Typography, Button, - Grid, + Grid, Box, IconButton, Paper, @@ -30,8 +30,14 @@ import { CircularProgress, ButtonGroup, Tooltip, - MenuItem + MenuItem, + Chip, + Collapse, + styled, + ToggleButtonGroup, + ToggleButton } from '@mui/material'; + import DeleteIcon from '@mui/icons-material/Delete'; import UploadFileIcon from '@mui/icons-material/UploadFile'; import CloseIcon from '@mui/icons-material/Close'; @@ -44,17 +50,29 @@ import UploadIcon from '@mui/icons-material/Upload'; import DownloadIcon from '@mui/icons-material/Download'; import RestartAltIcon from '@mui/icons-material/RestartAlt'; import PolylineIcon from '@mui/icons-material/Polyline'; +import ExpandLessIcon from '@mui/icons-material/ExpandLess'; +import ExpandMoreIcon from '@mui/icons-material/ExpandMore'; +import TableRowsIcon from '@mui/icons-material/TableRows'; +import RefreshIcon from '@mui/icons-material/Refresh'; +import ArrowForwardIcon from '@mui/icons-material/ArrowForward'; import { getUrls } from '../app/utils'; import { CustomReactTable } from './ReactTable'; import { DictTable } from '../components/ComponentType'; import { Type } from '../data/types'; import { useDispatch, useSelector } from 'react-redux'; -import { dfActions } from '../app/dfSlice'; +import { dfActions, dfSelectors, getSessionId } from '../app/dfSlice'; import { alpha } from '@mui/material'; import { DataFormulatorState } from '../app/dfSlice'; import { fetchFieldSemanticType } from '../app/dfSlice'; import { AppDispatch } from '../app/store'; +import Editor from 'react-simple-code-editor'; + +import Prism from 'prismjs' +import 'prismjs/components/prism-javascript' // Language +import 'prismjs/themes/prism.css'; //Example style, you can use another +import PrecisionManufacturingIcon from '@mui/icons-material/PrecisionManufacturing'; +import CheckIcon from '@mui/icons-material/Check'; export const handleDBDownload = async (sessionId: string) => { try { @@ -102,38 +120,36 @@ interface DBTable { interface TabPanelProps { children?: React.ReactNode; - index: number; - value: number; + key: string; + show: boolean; sx?: SxProps; } function TabPanel(props: TabPanelProps, sx: SxProps) { - const { children, value, index, ...other } = props; + const { children, show, key, ...other } = props; return ( - ); } -function a11yProps(index: number) { +function a11yProps(key: string) { return { - id: `vertical-tab-${index}`, - 'aria-controls': `vertical-tabpanel-${index}`, + id: `vertical-tab-${key}`, + 'aria-controls': `vertical-tabpanel-${key}`, }; } -interface TableStatistics { +interface ColumnStatistics { column: string; type: string; statistics: { @@ -148,12 +164,12 @@ interface TableStatistics { interface TableStatisticsViewProps { tableName: string; - tableAnalysisMap: Record; + columnStats: ColumnStatistics[]; } export class TableStatisticsView extends React.Component { render() { - const { tableName, tableAnalysisMap } = this.props; + const { tableName, columnStats } = this.props; // Common styles for header cells const headerCellStyle = { @@ -197,7 +213,7 @@ export class TableStatisticsView extends React.Component - {tableAnalysisMap[tableName]?.statistics.map((stat, idx) => ( + {columnStats.map((stat, idx) => ( { return ( DB Tables} /> @@ -259,19 +270,37 @@ export const DBTableSelectionDialog: React.FC<{ buttonElement: any }> = function const sessionId = useSelector((state: DataFormulatorState) => state.sessionId); const [tableDialogOpen, setTableDialogOpen] = useState(false); - const [tableAnalysisMap, setTableAnalysisMap] = useState>({}); + const [tableAnalysisMap, setTableAnalysisMap] = useState>({}); + // maps data loader type to list of param defs + const [dataLoaderParamDefs, setDataLoaderParamDefs] = useState>({}); + const [dbTables, setDbTables] = useState([]); - const [selectedTabIndex, setSelectedTabIndex] = useState(0); - + const [selectedTabKey, setSelectedTabKey] = useState(""); + const [errorMessage, setErrorMessage] = useState<{content: string, severity: "error" | "warning" | "info" | "success"} | null>(null); const [showError, setShowError] = useState(false); const [isUploading, setIsUploading] = useState(false); useEffect(() => { fetchTables(); + fetchDataLoaders(); }, []); + useEffect(() => { + if (errorMessage?.content?.includes("session_id not found")) { + dispatch(getSessionId()); + } + }, [errorMessage]) + + useEffect(() => { + if (dbTables.length == 0) { + setSelectedTabKey(""); + } else if (!selectedTabKey.startsWith("dataLoader:") && dbTables.find(t => t.name === selectedTabKey) == undefined) { + setSelectedTabKey(dbTables[0].name); + } + }, [dbTables]); + // Fetch list of tables const fetchTables = async () => { try { @@ -281,10 +310,31 @@ export const DBTableSelectionDialog: React.FC<{ buttonElement: any }> = function setDbTables(data.tables); } } catch (error) { - console.error('Failed to fetch tables:', error); + setErrorMessage({content: 'Failed to fetch tables, please check if the server is running', severity: "error"}); + setShowError(true); } }; + const fetchDataLoaders = async () => { + fetch(getUrls().DATA_LOADER_LIST_DATA_LOADERS, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + }, + }) + .then(response => response.json()) + .then(data => { + if (data.status === "success") { + setDataLoaderParamDefs(data.data_loaders); + } else { + console.error('Failed to fetch data loader params:', data.error); + } + }) + .catch(error => { + console.error('Failed to fetch data loader params:', error); + }); + } + const handleDBUpload = async (event: React.ChangeEvent) => { const file = event.target.files?.[0]; if (!file) return; @@ -309,7 +359,7 @@ export const DBTableSelectionDialog: React.FC<{ buttonElement: any }> = function } } catch (error) { console.error('Failed to upload table:', error); - setErrorMessage({content: 'Failed to upload table. The server may need to be restarted.', severity: "error"}); + setErrorMessage({content: 'Failed to upload table, please check if the server is running', severity: "error"}); setShowError(true); } finally { setIsUploading(false); @@ -343,7 +393,7 @@ export const DBTableSelectionDialog: React.FC<{ buttonElement: any }> = function } } catch (error) { console.error('Failed to upload table:', error); - setErrorMessage({content: 'Failed to upload table. The server may need to be restarted.', severity: "error"}); + setErrorMessage({content: 'Failed to upload table, please check if the server is running', severity: "error"}); setShowError(true); } finally { setIsUploading(false); @@ -388,12 +438,14 @@ export const DBTableSelectionDialog: React.FC<{ buttonElement: any }> = function const data = await response.json(); if (data.status === 'success') { fetchTables(); - if (dbTables[selectedTabIndex]?.name === tableName) { - setSelectedTabIndex(selectedTabIndex > 0 ? selectedTabIndex - 1 : 0); - } + setSelectedTabKey(dbTables.length > 0 ? dbTables[0].name : ""); + } else { + setErrorMessage({content: data.error || 'Failed to delete table', severity: "error"}); + setShowError(true); } } catch (error) { - console.error('Failed to delete table:', error); + setErrorMessage({content: 'Failed to delete table, please check if the server is running', severity: "error"}); + setShowError(true); } }; @@ -423,7 +475,7 @@ export const DBTableSelectionDialog: React.FC<{ buttonElement: any }> = function } } catch (error) { console.error('Failed to analyze table data:', error); - setErrorMessage({content: 'Failed to analyze table data', severity: "error"}); + setErrorMessage({content: 'Failed to analyze table data, please check if the server is running', severity: "error"}); setShowError(true); } }; @@ -477,8 +529,8 @@ export const DBTableSelectionDialog: React.FC<{ buttonElement: any }> = function setTableDialogOpen(false); } - const handleTabChange = (event: React.SyntheticEvent, newValue: number) => { - setSelectedTabIndex(newValue); + const handleTabChange = (event: React.SyntheticEvent, newValue: string) => { + setSelectedTabKey(newValue); }; const handleCloseError = () => { @@ -536,44 +588,120 @@ export const DBTableSelectionDialog: React.FC<{ buttonElement: any }> = function ); } - let mainContent = dbTables.length > 0 ? - - + let mainContent = + + 8 ? "auto" : false} allowScrollButtonsMobile - value={selectedTabIndex} - onChange={handleTabChange} aria-label="Database tables" - sx={{ maxWidth: '300px', maxHeight: '360px', + sx={{ + maxHeight: '360px', + px: 0.5, + pt: 1, '& .MuiTabs-scrollButtons.Mui-disabled': { opacity: 0.3, }, }} > + + available tables + + { + fetchTables(); + }}> + + + + + {dbTables.length == 0 && + no tables available} {dbTables.map((t, i) => ( + {t.name} + + } + onClick={() => { + setSelectedTabKey(t.name); + }} + sx={{textTransform: "none", minHeight: 24, p: 0.5, ml: 2}} + {...a11yProps(t.name)} + /> + ))} + + + + connect external data + {["file upload", "mysql", "kusto"].map((dataLoaderType, i) => ( + - {t.name}} - sx={{textTransform: "none", minHeight: 24, padding: 1}} - {...a11yProps(i)} + {dataLoaderType}} + onClick={() => { + setSelectedTabKey('dataLoader:' + dataLoaderType); + }} + sx={{textTransform: "none", minHeight: 24, p: 0.5, ml: 2}} + {...a11yProps(dataLoaderType)} /> ))} - - - {uploadFileButton({isUploading ? 'uploading...' : 'upload file'})} + + + The database is empty, refresh the table list or import some data to get started. + + + {uploadFileButton({isUploading ? 'uploading...' : 'upload a csv/tsv file to the local database'})} + + {dataLoaderParamDefs && Object.entries(dataLoaderParamDefs).map(([dataLoaderType, paramDefs]) => ( + + { + setIsUploading(true); + }} + onFinish={(status, message) => { + setIsUploading(false); + fetchTables(); + if (status === "error") { + setErrorMessage({content: message, severity: "error"}); + setShowError(true); + } + }} + /> + + ))} {dbTables.map((t, i) => { const currentTable = t; const showingAnalysis = tableAnalysisMap[currentTable.name] !== undefined; return ( - + @@ -608,7 +736,7 @@ export const DBTableSelectionDialog: React.FC<{ buttonElement: any }> = function {showingAnalysis ? ( ) : ( = function ); })} - : - - - Database is currently empty. - - {uploadFileButton(Upload a csv dataset )} - or - {importButton(Import a db file)} - to get started. - - + return ( <> @@ -662,7 +780,6 @@ export const DBTableSelectionDialog: React.FC<{ buttonElement: any }> = function {errorMessage?.content} - {setTableDialogOpen(false)}} @@ -683,9 +800,7 @@ export const DBTableSelectionDialog: React.FC<{ buttonElement: any }> = function - - {mainContent} - + {mainContent} {isUploading && ( = function the backend database - @@ -735,4 +852,462 @@ export const DBTableSelectionDialog: React.FC<{ buttonElement: any }> = function ); +} + +export const DataLoaderForm: React.FC<{ + dataLoaderType: string, + paramDefs: {name: string, default: string, type: string, required: boolean, description: string}[], + onImport: () => void, + onFinish: (status: "success" | "error", message: string) => void +}> = ({dataLoaderType, paramDefs, onImport, onFinish}) => { + + const dispatch = useDispatch(); + + const params = useSelector((state: DataFormulatorState) => state.dataLoaderConnectParams[dataLoaderType] ?? {}); + + const [tableMetadata, setTableMetadata] = useState>({}); + let [displaySamples, setDisplaySamples] = useState>({}); + + let [isConnecting, setIsConnecting] = useState(false); + let [mode, setMode] = useState<"view tables" | "query">("view tables"); + const toggleDisplaySamples = (tableName: string) => { + setDisplaySamples({...displaySamples, [tableName]: !displaySamples[tableName]}); + } + + const handleModeChange = (event: React.MouseEvent, newMode: "view tables" | "query") => { + if (newMode != null) { + setMode(newMode); + } + }; + + let tableMetadataBox = [ + + + View Tables + Query Data + + + , + mode === "view tables" && + + + {Object.entries(tableMetadata).map(([tableName, metadata]) => { + return [ + + + toggleDisplaySamples(tableName)}> + {displaySamples[tableName] ? : } + + + + {tableName} + ({metadata.row_count > 0 ? `${metadata.row_count} rows × ` : ""}{metadata.columns.length} cols) + + + + {metadata.columns.map((column: any) => ( + + ))} + + + + + , + + + + + { + return Object.fromEntries(Object.entries(row).map(([key, value]: [string, any]) => { + return [key, String(value)]; + })); + })} + columnDefs={metadata.columns.map((column: any) => ({id: column.name, label: column.name}))} + rowsPerPageNum={-1} + compact={false} + isIncompleteTable={metadata.row_count > 10} + /> + + + + ] + })} + +
+
, + mode === "query" && ({name: t, fields: tableMetadata[t].columns.map((c: any) => c.name)}))} + dataLoaderParams={params} onImport={onImport} onFinish={onFinish} /> + ] + + return ( + + {isConnecting && + + } + + Data Connector ({dataLoaderType}) + + + {paramDefs.map((paramDef) => ( + + 0} + sx={{width: "270px", + '& .MuiInputLabel-root': {fontSize: 14}, + '& .MuiInputBase-root': {fontSize: 14}, + '& .MuiInputBase-input::placeholder': {fontSize: 12, fontStyle: "italic"} + }} + variant="standard" + size="small" + required={paramDef.required} + key={paramDef.name} + label={paramDef.name} + value={params[paramDef.name]} + placeholder={paramDef.description} + onChange={(event) => { + dispatch(dfActions.updateDataLoaderConnectParam({ + dataLoaderType, paramName: paramDef.name, + paramValue: event.target.value})); + }} + slotProps={{ + inputLabel: {shrink: true} + }} + /> + + ))} + {paramDefs.length > 0 && + + + } + + {Object.keys(tableMetadata).length > 0 && tableMetadataBox } + + ); +} + +export const DataQueryForm: React.FC<{ + dataLoaderType: string, + availableTables: {name: string, fields: string[]}[], + dataLoaderParams: Record, + onImport: () => void, + onFinish: (status: "success" | "error", message: string) => void +}> = ({dataLoaderType, availableTables, dataLoaderParams, onImport, onFinish}) => { + + let activeModel = useSelector(dfSelectors.getActiveModel); + + const [selectedTables, setSelectedTables] = useState(availableTables.map(t => t.name).slice(0, 5)); + + const [waiting, setWaiting] = useState(false); + + const [query, setQuery] = useState("-- query the data source / describe your goal and ask AI to help you write the query\n"); + const [queryResult, setQueryResult] = useState<{ + status: string, + message: string, + sample: any[], + code: string, + } | undefined>(undefined); + const [queryResultName, setQueryResultName] = useState(""); + + const aiCompleteQuery = (query: string) => { + if (queryResult?.status === "error") { + setQueryResult(undefined); + } + let data = { + data_source_metadata: { + data_loader_type: dataLoaderType, + tables: availableTables.filter(t => selectedTables.includes(t.name)) + }, + query: query, + model: activeModel + } + setWaiting(true); + fetch(getUrls().QUERY_COMPLETION, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(data) + }) + .then(response => response.json()) + .then(data => { + setWaiting(false); + if (data.status === "ok") { + setQuery(data.query); + } else { + onFinish("error", data.reasoning); + } + }) + .catch(error => { + setWaiting(false); + onFinish("error", `Failed to complete query please try again.`); + }); + } + + const handleViewQuerySample = (query: string) => { + setQueryResult(undefined); + setWaiting(true); + fetch(getUrls().DATA_LOADER_VIEW_QUERY_SAMPLE, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + data_loader_type: dataLoaderType, + data_loader_params: dataLoaderParams, + query: query + }) + }) + .then(response => response.json()) + .then(data => { + setWaiting(false); + if (data.status === "success") { + setQueryResult({ + status: "success", + message: "Data loaded successfully", + sample: data.sample, + code: query + }); + let newName = `r_${Math.random().toString(36).substring(2, 4)}`; + setQueryResultName(newName); + } else { + setQueryResult({ + status: "error", + message: data.message, + sample: [], + code: query + }); + } + }) + .catch(error => { + setWaiting(false); + setQueryResult({ + status: "error", + message: `Failed to view query sample, please try again.`, + sample: [], + code: query + }); + }); + } + + const handleImportQueryResult = () => { + setWaiting(true); + fetch(getUrls().DATA_LOADER_INGEST_DATA_FROM_QUERY, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + data_loader_type: dataLoaderType, + data_loader_params: dataLoaderParams, + query: queryResult?.code ?? query, + name_as: queryResultName + }) + }) + .then(response => response.json()) + .then(data => { + setWaiting(false); + if (data.status === "success") { + onFinish("success", "Data imported successfully"); + } else { + onFinish("error", data.reasoning); + } + }) + .catch(error => { + setWaiting(false); + onFinish("error", `Failed to import data, please try again.`); + }); + } + + let queryResultBox = queryResult?.status === "success" ? [ + + ({id: t, label: t}))} rowsPerPageNum={-1} compact={false} /> + , + + + setQueryResultName(event.target.value)} + /> + + + ] : []; + + return ( + + {waiting && + + } + + + query from tables: + + {availableTables.map((table) => ( + : undefined} + color={selectedTables.includes(table.name) ? "primary" : "default"} variant="outlined" + sx={{ fontSize: 11, margin: 0.25, + height: 20, borderRadius: 0.5, + borderColor: selectedTables.includes(table.name) ? "primary.main" : "rgba(0, 0, 0, 0.1)", + color: selectedTables.includes(table.name) ? "primary.main" : "text.secondary", + '&:hover': { + backgroundColor: "rgba(0, 0, 0, 0.07)", + } + }} + size="small" + onClick={() => { + setSelectedTables(selectedTables.includes(table.name) ? selectedTables.filter(t => t !== table.name) : [...selectedTables, table.name]); + }} + /> + ))} + + + + { + setQuery(tempCode); + }} + highlight={code => Prism.highlight(code, Prism.languages.sql, 'sql')} + padding={10} + style={{ + minHeight: queryResult ? 60 : 200, + fontFamily: '"Fira code", "Fira Mono", monospace', + fontSize: 12, + paddingBottom: '24px', + backgroundColor: "rgba(0, 0, 0, 0.03)", + + overflowY: "auto" + }} + /> + + {queryResult?.status === "error" && + + {queryResult?.message} + + + } + + + + + {queryResult && queryResultBox} + + + ) } \ No newline at end of file diff --git a/src/views/DataThread.tsx b/src/views/DataThread.tsx index e1e3633f..543cd3da 100644 --- a/src/views/DataThread.tsx +++ b/src/views/DataThread.tsx @@ -52,6 +52,7 @@ import { TriggerCard } from './EncodingShelfCard'; import ChevronLeftIcon from '@mui/icons-material/ChevronLeft'; import ChevronRightIcon from '@mui/icons-material/ChevronRight'; import CloudQueueIcon from '@mui/icons-material/CloudQueue'; +import { alpha } from '@mui/material/styles'; let buildChartCard = (chartElement: { tableId: string, chartId: string, element: any }, focusedChartId?: string) => { @@ -364,7 +365,7 @@ let SingleThreadView: FC<{ let regularTableBox = c.chartId == focusedChartId) ? scrollRef : null} sx={{ padding: '0px' }}> + return = function DataView({ $table const dispatch = useDispatch(); const tables = useSelector((state: DataFormulatorState) => state.tables); + const theme = useTheme(); const conceptShelfItems = useSelector((state: DataFormulatorState) => state.conceptShelfItems); const focusedTableId = useSelector((state: DataFormulatorState) => state.focusedTableId); @@ -139,7 +140,7 @@ export const FreeDataViewFC: FC = function DataView({ $table let genTableLink = (t: DictTable) => { dispatch(dfActions.setFocusedTable(t.id)) }}> + color={theme.palette.primary.main} onClick={()=>{ dispatch(dfActions.setFocusedTable(t.id)) }}> {t.displayId || t.id} ; diff --git a/src/views/ModelSelectionDialog.tsx b/src/views/ModelSelectionDialog.tsx index d0e57ac5..461386f7 100644 --- a/src/views/ModelSelectionDialog.tsx +++ b/src/views/ModelSelectionDialog.tsx @@ -91,6 +91,39 @@ export const ModelSelectionButton: React.FC<{}> = ({ }) => { }); }, []); + useEffect(() => { + const findWorkingModel = async () => { + for (let i = 0; i < models.length; i++) { + if (testedModels.find(t => t.id == models[i].id)) { + continue; + } + const model = models[i]; + const message = { + method: 'POST', + headers: { 'Content-Type': 'application/json', }, + body: JSON.stringify({ + model: model, + }), + }; + try { + const response = await fetch(getUrls().TEST_MODEL, {...message }); + const data = await response.json(); + const status = data["status"] || 'error'; + updateModelStatus(model, status, data["message"] || ""); + if (status === 'ok') { + break; + } + } catch (error) { + updateModelStatus(model, 'error', (error as Error).message || 'Failed to test model'); + } + } + }; + + if (models.length > 0 && testedModels.filter(t => t.status == 'ok').length == 0) { + findWorkingModel(); + } + }, []); + let updateModelStatus = (model: ModelConfig, status: 'ok' | 'error' | 'testing' | 'unknown', message: string) => { dispatch(dfActions.updateModelStatus({id: model.id, status, message})); } diff --git a/src/views/VisualizationView.tsx b/src/views/VisualizationView.tsx index ae80e79b..7460c0bc 100644 --- a/src/views/VisualizationView.tsx +++ b/src/views/VisualizationView.tsx @@ -42,7 +42,7 @@ import AnimateOnChange from 'react-animate-on-change' import '../scss/VisualizationView.scss'; import { useDispatch, useSelector } from 'react-redux'; -import { DataFormulatorState, dfActions } from '../app/dfSlice'; +import { DataFormulatorState, dfActions, getSessionId } from '../app/dfSlice'; import { assembleVegaChart, extractFieldsFromEncodingMap, getUrls, prepVisTable } from '../app/utils'; import { Chart, EncodingItem, EncodingMap, FieldItem } from '../components/ComponentType'; import { DictTable } from "../components/ComponentType"; @@ -236,8 +236,6 @@ export let checkChartAvailabilityOnPreparedData = (chart: Chart, conceptShelfIte } return undefined; }).filter(f => f != undefined); - console.log("visFieldsFinalNames", visFieldsFinalNames); - console.log("visTableRows", visTableRows.slice(0, 10)); return visFieldsFinalNames.length > 0 && visTableRows.length > 0 && visFieldsFinalNames.every(name => Object.keys(visTableRows[0]).includes(name)); } @@ -365,6 +363,8 @@ export const ChartEditorFC: FC<{ cachedCandidates: DictTable[], const [errorMessage, setErrorMessage] = useState<{content: string, severity: "error" | "warning" | "info" | "success"}>({content: "", severity: "error"}); const [showError, setShowError] = useState(false); + + let createVisTableRowsLocal = (rows: any[]) => { if (visFields.length == 0) { return rows; diff --git a/yarn.lock b/yarn.lock index 4d7d5334..265b56d9 100644 --- a/yarn.lock +++ b/yarn.lock @@ -88,13 +88,18 @@ dependencies: "@babel/types" "^7.25.6" -"@babel/runtime@^7.12.1", "@babel/runtime@^7.12.5", "@babel/runtime@^7.18.3", "@babel/runtime@^7.21.0", "@babel/runtime@^7.22.5", "@babel/runtime@^7.5.5", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2": +"@babel/runtime@^7.12.1", "@babel/runtime@^7.12.5", "@babel/runtime@^7.18.3", "@babel/runtime@^7.22.5", "@babel/runtime@^7.5.5", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2": version "7.26.10" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.26.10.tgz#a07b4d8fa27af131a633d7b3524db803eb4764c2" integrity sha512-2WJMeRQPHKSPemqk/awGrAiuFfzBmOIPXKizAsVhWH9YJqLZ0H+HS4c8loHGgW6utJ3E/ejXQUsiGaQy2NZ9Fw== dependencies: regenerator-runtime "^0.14.0" +"@babel/runtime@^7.27.0": + version "7.27.1" + resolved "https://registry.npmjs.org/@babel/runtime/-/runtime-7.27.1.tgz#9fce313d12c9a77507f264de74626e87fd0dc541" + integrity sha512-1x3D2xEk2fRo3PAhwQwu5UubzgiVWSXTBfWpVd2Mx2AzRqJuDJCsgaDVZ7HB5iGzDW1Hl1sWN2mFyKjmR9uAog== + "@babel/template@^7.25.0": version "7.25.0" resolved "https://registry.npmjs.org/@babel/template/-/template-7.25.0.tgz#e733dc3134b4fede528c15bc95e89cb98c52592a" @@ -135,16 +140,16 @@ "@babel/helper-validator-identifier" "^7.24.7" to-fast-properties "^2.0.0" -"@emotion/babel-plugin@^11.11.0": - version "11.11.0" - resolved "https://registry.npmjs.org/@emotion/babel-plugin/-/babel-plugin-11.11.0.tgz#c2d872b6a7767a9d176d007f5b31f7d504bb5d6c" - integrity sha512-m4HEDZleaaCH+XgDDsPF15Ht6wTLsgDTeR3WYj9Q/k76JtWhrJjcP4+/XlG8LGT/Rol9qUfOIztXeA84ATpqPQ== +"@emotion/babel-plugin@^11.13.5": + version "11.13.5" + resolved "https://registry.npmjs.org/@emotion/babel-plugin/-/babel-plugin-11.13.5.tgz#eab8d65dbded74e0ecfd28dc218e75607c4e7bc0" + integrity sha512-pxHCpT2ex+0q+HH91/zsdHkw/lXd468DIN2zvfvLtPKLLMo6gQj7oLObq8PhkrxOZb/gGCq03S3Z7PDhS8pduQ== dependencies: "@babel/helper-module-imports" "^7.16.7" "@babel/runtime" "^7.18.3" - "@emotion/hash" "^0.9.1" - "@emotion/memoize" "^0.8.1" - "@emotion/serialize" "^1.1.2" + "@emotion/hash" "^0.9.2" + "@emotion/memoize" "^0.9.0" + "@emotion/serialize" "^1.3.3" babel-plugin-macros "^3.1.0" convert-source-map "^1.5.0" escape-string-regexp "^4.0.0" @@ -162,15 +167,15 @@ "@emotion/utils" "0.11.3" "@emotion/weak-memoize" "0.2.5" -"@emotion/cache@^11.11.0": - version "11.11.0" - resolved "https://registry.npmjs.org/@emotion/cache/-/cache-11.11.0.tgz#809b33ee6b1cb1a625fef7a45bc568ccd9b8f3ff" - integrity sha512-P34z9ssTCBi3e9EI1ZsWpNHcfY1r09ZO0rZbRO2ob3ZQMnFI35jB536qoXbkdesr5EUhYi22anuEJuyxifaqAQ== +"@emotion/cache@^11.13.5", "@emotion/cache@^11.14.0": + version "11.14.0" + resolved "https://registry.npmjs.org/@emotion/cache/-/cache-11.14.0.tgz#ee44b26986eeb93c8be82bb92f1f7a9b21b2ed76" + integrity sha512-L/B1lc/TViYk4DcpGxtAVbx0ZyiKM5ktoIyafGkH6zg/tj+mA+NE//aPYKG0k8kCHSHVJrpLpcAlOBEXQ3SavA== dependencies: - "@emotion/memoize" "^0.8.1" - "@emotion/sheet" "^1.2.2" - "@emotion/utils" "^1.2.1" - "@emotion/weak-memoize" "^0.3.1" + "@emotion/memoize" "^0.9.0" + "@emotion/sheet" "^1.4.0" + "@emotion/utils" "^1.4.2" + "@emotion/weak-memoize" "^0.4.0" stylis "4.2.0" "@emotion/hash@0.8.0": @@ -178,40 +183,40 @@ resolved "https://registry.npmjs.org/@emotion/hash/-/hash-0.8.0.tgz#bbbff68978fefdbe68ccb533bc8cbe1d1afb5413" integrity sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow== -"@emotion/hash@^0.9.1": - version "0.9.1" - resolved "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.1.tgz#4ffb0055f7ef676ebc3a5a91fb621393294e2f43" - integrity sha512-gJB6HLm5rYwSLI6PQa+X1t5CFGrv1J1TWG+sOyMCeKz2ojaj6Fnl/rZEspogG+cvqbt4AE/2eIyD2QfLKTBNlQ== +"@emotion/hash@^0.9.2": + version "0.9.2" + resolved "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.2.tgz#ff9221b9f58b4dfe61e619a7788734bd63f6898b" + integrity sha512-MyqliTZGuOm3+5ZRSaaBGP3USLw6+EGykkwZns2EPC5g8jJ4z9OrdZY9apkl3+UP9+sdz76YYkwCKP5gh8iY3g== -"@emotion/is-prop-valid@^1.2.1": - version "1.2.1" - resolved "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.2.1.tgz#23116cf1ed18bfeac910ec6436561ecb1a3885cc" - integrity sha512-61Mf7Ufx4aDxx1xlDeOm8aFFigGHE4z+0sKCa+IHCeZKiyP9RLD0Mmx7m8b9/Cf37f7NAvQOOJAbQQGVr5uERw== +"@emotion/is-prop-valid@^1.3.0": + version "1.3.1" + resolved "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.3.1.tgz#8d5cf1132f836d7adbe42cf0b49df7816fc88240" + integrity sha512-/ACwoqx7XQi9knQs/G0qKvv5teDMhD7bXYns9N/wM8ah8iNb8jZ2uNO0YOgiq2o2poIvVtJS2YALasQuMSQ7Kw== dependencies: - "@emotion/memoize" "^0.8.1" + "@emotion/memoize" "^0.9.0" "@emotion/memoize@0.7.4": version "0.7.4" resolved "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.4.tgz#19bf0f5af19149111c40d98bb0cf82119f5d9eeb" integrity sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw== -"@emotion/memoize@^0.8.1": - version "0.8.1" - resolved "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.8.1.tgz#c1ddb040429c6d21d38cc945fe75c818cfb68e17" - integrity sha512-W2P2c/VRW1/1tLox0mVUalvnWXxavmv/Oum2aPsRcoDJuob75FC3Y8FbpfLwUegRcxINtGUMPq0tFCvYNTBXNA== +"@emotion/memoize@^0.9.0": + version "0.9.0" + resolved "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.9.0.tgz#745969d649977776b43fc7648c556aaa462b4102" + integrity sha512-30FAj7/EoJ5mwVPOWhAyCX+FPfMDrVecJAM+Iw9NRoSl4BBAQeqj4cApHHUXOVvIPgLVDsCFoz/hGD+5QQD1GQ== -"@emotion/react@^11.9.0": - version "11.11.1" - resolved "https://registry.npmjs.org/@emotion/react/-/react-11.11.1.tgz#b2c36afac95b184f73b08da8c214fdf861fa4157" - integrity sha512-5mlW1DquU5HaxjLkfkGN1GA/fvVGdyHURRiX/0FHl2cfIfRxSOfmxEH5YS43edp0OldZrZ+dkBKbngxcNCdZvA== +"@emotion/react@^11.14.0": + version "11.14.0" + resolved "https://registry.npmjs.org/@emotion/react/-/react-11.14.0.tgz#cfaae35ebc67dd9ef4ea2e9acc6cd29e157dd05d" + integrity sha512-O000MLDBDdk/EohJPFUqvnp4qnHeYkVP5B0xEG0D/L7cOKP9kefu2DXn8dj74cQfsEzUqh+sr1RzFqiL1o+PpA== dependencies: "@babel/runtime" "^7.18.3" - "@emotion/babel-plugin" "^11.11.0" - "@emotion/cache" "^11.11.0" - "@emotion/serialize" "^1.1.2" - "@emotion/use-insertion-effect-with-fallbacks" "^1.0.1" - "@emotion/utils" "^1.2.1" - "@emotion/weak-memoize" "^0.3.1" + "@emotion/babel-plugin" "^11.13.5" + "@emotion/cache" "^11.14.0" + "@emotion/serialize" "^1.3.3" + "@emotion/use-insertion-effect-with-fallbacks" "^1.2.0" + "@emotion/utils" "^1.4.2" + "@emotion/weak-memoize" "^0.4.0" hoist-non-react-statics "^3.3.1" "@emotion/serialize@^0.11.15", "@emotion/serialize@^0.11.16": @@ -225,15 +230,15 @@ "@emotion/utils" "0.11.3" csstype "^2.5.7" -"@emotion/serialize@^1.1.2": - version "1.1.2" - resolved "https://registry.npmjs.org/@emotion/serialize/-/serialize-1.1.2.tgz#017a6e4c9b8a803bd576ff3d52a0ea6fa5a62b51" - integrity sha512-zR6a/fkFP4EAcCMQtLOhIgpprZOwNmCldtpaISpvz348+DP4Mz8ZoKaGGCQpbzepNIUWbq4w6hNZkwDyKoS+HA== +"@emotion/serialize@^1.3.3": + version "1.3.3" + resolved "https://registry.npmjs.org/@emotion/serialize/-/serialize-1.3.3.tgz#d291531005f17d704d0463a032fe679f376509e8" + integrity sha512-EISGqt7sSNWHGI76hC7x1CksiXPahbxEOrC5RjmFRJTqLyEK9/9hZvBbiYn70dw4wuwMKiEMCUlR6ZXTSWQqxA== dependencies: - "@emotion/hash" "^0.9.1" - "@emotion/memoize" "^0.8.1" - "@emotion/unitless" "^0.8.1" - "@emotion/utils" "^1.2.1" + "@emotion/hash" "^0.9.2" + "@emotion/memoize" "^0.9.0" + "@emotion/unitless" "^0.10.0" + "@emotion/utils" "^1.4.2" csstype "^3.0.2" "@emotion/sheet@0.9.4": @@ -241,22 +246,22 @@ resolved "https://registry.npmjs.org/@emotion/sheet/-/sheet-0.9.4.tgz#894374bea39ec30f489bbfc3438192b9774d32e5" integrity sha512-zM9PFmgVSqBw4zL101Q0HrBVTGmpAxFZH/pYx/cjJT5advXguvcgjHFTCaIO3enL/xr89vK2bh0Mfyj9aa0ANA== -"@emotion/sheet@^1.2.2": - version "1.2.2" - resolved "https://registry.npmjs.org/@emotion/sheet/-/sheet-1.2.2.tgz#d58e788ee27267a14342303e1abb3d508b6d0fec" - integrity sha512-0QBtGvaqtWi+nx6doRwDdBIzhNdZrXUppvTM4dtZZWEGTXL/XE/yJxLMGlDT1Gt+UHH5IX1n+jkXyytE/av7OA== +"@emotion/sheet@^1.4.0": + version "1.4.0" + resolved "https://registry.npmjs.org/@emotion/sheet/-/sheet-1.4.0.tgz#c9299c34d248bc26e82563735f78953d2efca83c" + integrity sha512-fTBW9/8r2w3dXWYM4HCB1Rdp8NLibOw2+XELH5m5+AkWiL/KqYX6dc0kKYlaYyKjrQ6ds33MCdMPEwgs2z1rqg== -"@emotion/styled@^11.8.1": - version "11.11.0" - resolved "https://registry.npmjs.org/@emotion/styled/-/styled-11.11.0.tgz#26b75e1b5a1b7a629d7c0a8b708fbf5a9cdce346" - integrity sha512-hM5Nnvu9P3midq5aaXj4I+lnSfNi7Pmd4EWk1fOZ3pxookaQTNew6bp4JaCBYM4HVFZF9g7UjJmsUmC2JlxOng== +"@emotion/styled@^11.14.0": + version "11.14.0" + resolved "https://registry.npmjs.org/@emotion/styled/-/styled-11.14.0.tgz#f47ca7219b1a295186d7661583376fcea95f0ff3" + integrity sha512-XxfOnXFffatap2IyCeJyNov3kiDQWoR08gPUQxvbL7fxKryGBKUZUkG6Hz48DZwVrJSVh9sJboyV1Ds4OW6SgA== dependencies: "@babel/runtime" "^7.18.3" - "@emotion/babel-plugin" "^11.11.0" - "@emotion/is-prop-valid" "^1.2.1" - "@emotion/serialize" "^1.1.2" - "@emotion/use-insertion-effect-with-fallbacks" "^1.0.1" - "@emotion/utils" "^1.2.1" + "@emotion/babel-plugin" "^11.13.5" + "@emotion/is-prop-valid" "^1.3.0" + "@emotion/serialize" "^1.3.3" + "@emotion/use-insertion-effect-with-fallbacks" "^1.2.0" + "@emotion/utils" "^1.4.2" "@emotion/stylis@0.8.5": version "0.8.5" @@ -268,35 +273,35 @@ resolved "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.7.5.tgz#77211291c1900a700b8a78cfafda3160d76949ed" integrity sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg== -"@emotion/unitless@^0.8.1": - version "0.8.1" - resolved "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.8.1.tgz#182b5a4704ef8ad91bde93f7a860a88fd92c79a3" - integrity sha512-KOEGMu6dmJZtpadb476IsZBclKvILjopjUii3V+7MnXIQCYh8W3NgNcgwo21n9LXZX6EDIKvqfjYxXebDwxKmQ== +"@emotion/unitless@^0.10.0": + version "0.10.0" + resolved "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.10.0.tgz#2af2f7c7e5150f497bdabd848ce7b218a27cf745" + integrity sha512-dFoMUuQA20zvtVTuxZww6OHoJYgrzfKM1t52mVySDJnMSEa08ruEvdYQbhvyu6soU+NeLVd3yKfTfT0NeV6qGg== -"@emotion/use-insertion-effect-with-fallbacks@^1.0.1": - version "1.0.1" - resolved "https://registry.npmjs.org/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.0.1.tgz#08de79f54eb3406f9daaf77c76e35313da963963" - integrity sha512-jT/qyKZ9rzLErtrjGgdkMBn2OP8wl0G3sQlBb3YPryvKHsjvINUhVaPFfP+fpBcOkmrVOVEEHQFJ7nbj2TH2gw== +"@emotion/use-insertion-effect-with-fallbacks@^1.2.0": + version "1.2.0" + resolved "https://registry.npmjs.org/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.2.0.tgz#8a8cb77b590e09affb960f4ff1e9a89e532738bf" + integrity sha512-yJMtVdH59sxi/aVJBpk9FQq+OR8ll5GT8oWd57UpeaKEVGab41JWaCFA7FRLoMLloOZF/c/wsPoe+bfGmRKgDg== "@emotion/utils@0.11.3": version "0.11.3" resolved "https://registry.npmjs.org/@emotion/utils/-/utils-0.11.3.tgz#a759863867befa7e583400d322652a3f44820924" integrity sha512-0o4l6pZC+hI88+bzuaX/6BgOvQVhbt2PfmxauVaYOGgbsAw14wdKyvMCZXnsnsHys94iadcF+RG/wZyx6+ZZBw== -"@emotion/utils@^1.2.1": - version "1.2.1" - resolved "https://registry.npmjs.org/@emotion/utils/-/utils-1.2.1.tgz#bbab58465738d31ae4cb3dbb6fc00a5991f755e4" - integrity sha512-Y2tGf3I+XVnajdItskUCn6LX+VUDmP6lTL4fcqsXAv43dnlbZiuW4MWQW38rW/BVWSE7Q/7+XQocmpnRYILUmg== +"@emotion/utils@^1.4.2": + version "1.4.2" + resolved "https://registry.npmjs.org/@emotion/utils/-/utils-1.4.2.tgz#6df6c45881fcb1c412d6688a311a98b7f59c1b52" + integrity sha512-3vLclRofFziIa3J2wDh9jjbkUz9qk5Vi3IZ/FSTKViB0k+ef0fPV7dYrUIugbgupYDx7v9ud/SjrtEP8Y4xLoA== "@emotion/weak-memoize@0.2.5": version "0.2.5" resolved "https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.2.5.tgz#8eed982e2ee6f7f4e44c253e12962980791efd46" integrity sha512-6U71C2Wp7r5XtFtQzYrW5iKFT67OixrSxjI4MptCHzdSVlgabczzqLe0ZSgnub/5Kp4hSbpDB1tMytZY9pwxxA== -"@emotion/weak-memoize@^0.3.1": - version "0.3.1" - resolved "https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.3.1.tgz#d0fce5d07b0620caa282b5131c297bb60f9d87e6" - integrity sha512-EsBwpc7hBUJWAsNPBmJy4hxWx12v6bshQsldrVmjxJoc3isbxhOrF2IcCpaXxfvq03NwkI7sbsOLXbYuqF/8Ww== +"@emotion/weak-memoize@^0.4.0": + version "0.4.0" + resolved "https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.4.0.tgz#5e13fac887f08c44f76b0ccaf3370eb00fec9bb6" + integrity sha512-snKqtPW01tN0ui7yu9rGv69aJXr/a/Ywvl11sUjNtEcRc+ng/mQriFL0wLXMef74iHa/EkftbDzU9F8iFbH+zg== "@esbuild/aix-ppc64@0.21.5": version "0.21.5" @@ -536,24 +541,10 @@ "@jridgewell/resolve-uri" "^3.1.0" "@jridgewell/sourcemap-codec" "^1.4.14" -"@mui/base@5.0.0-beta.7": - version "5.0.0-beta.7" - resolved "https://registry.npmjs.org/@mui/base/-/base-5.0.0-beta.7.tgz#01cb99ac098af0ba989c7abc1474e3291c29414f" - integrity sha512-Pjbwm6gjiS96kOMF7E5fjEJsenc0tZBesrLQ4rrdi3eT/c/yhSWnPbCUkHSz8bnS0l3/VQ8bA+oERSGSV2PK6A== - dependencies: - "@babel/runtime" "^7.22.5" - "@emotion/is-prop-valid" "^1.2.1" - "@mui/types" "^7.2.4" - "@mui/utils" "^5.13.7" - "@popperjs/core" "^2.11.8" - clsx "^1.2.1" - prop-types "^15.8.1" - react-is "^18.2.0" - -"@mui/core-downloads-tracker@^5.14.0": - version "5.14.0" - resolved "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-5.14.0.tgz#ca394a1c53c215f4c6bf7f7460d8211298d7bbf6" - integrity sha512-SYBOVCatVDUf/lbrLGah09bHhX5WfUXg7kSskfLILr6SvKRni0NLp0aonxQ0SMALVVK3Qwa6cW4CdWuwS0gC1w== +"@mui/core-downloads-tracker@^7.0.2": + version "7.0.2" + resolved "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-7.0.2.tgz#2e6dcaf5027a3957d37797b8dce5c15c78fd4b82" + integrity sha512-TfeFU9TgN1N06hyb/pV/63FfO34nijZRMqgHk0TJ3gkl4Fbd+wZ73+ZtOd7jag6hMmzO9HSrBc6Vdn591nhkAg== "@mui/icons-material@^5.14.0": version "5.14.0" @@ -562,72 +553,77 @@ dependencies: "@babel/runtime" "^7.22.5" -"@mui/material@^5.6.0": - version "5.14.0" - resolved "https://registry.npmjs.org/@mui/material/-/material-5.14.0.tgz#3d2afb4a3643774370cb5add873abcbbe8e7af27" - integrity sha512-HP7CP71NhMkui2HUIEKl2/JfuHMuoarSUWAKlNw6s17bl/Num9rN61EM6uUzc2A2zHjj/00A66GnvDnmixEJEw== +"@mui/material@^7.0.2": + version "7.0.2" + resolved "https://registry.npmjs.org/@mui/material/-/material-7.0.2.tgz#444de6ab1d0856b638f98833f536c80293c66005" + integrity sha512-rjJlJ13+3LdLfobRplkXbjIFEIkn6LgpetgU/Cs3Xd8qINCCQK9qXQIjjQ6P0FXFTPFzEVMj0VgBR1mN+FhOcA== dependencies: - "@babel/runtime" "^7.22.5" - "@mui/base" "5.0.0-beta.7" - "@mui/core-downloads-tracker" "^5.14.0" - "@mui/system" "^5.14.0" - "@mui/types" "^7.2.4" - "@mui/utils" "^5.13.7" - "@types/react-transition-group" "^4.4.6" - clsx "^1.2.1" - csstype "^3.1.2" + "@babel/runtime" "^7.27.0" + "@mui/core-downloads-tracker" "^7.0.2" + "@mui/system" "^7.0.2" + "@mui/types" "^7.4.1" + "@mui/utils" "^7.0.2" + "@popperjs/core" "^2.11.8" + "@types/react-transition-group" "^4.4.12" + clsx "^2.1.1" + csstype "^3.1.3" prop-types "^15.8.1" - react-is "^18.2.0" + react-is "^19.1.0" react-transition-group "^4.4.5" -"@mui/private-theming@^5.13.7": - version "5.13.7" - resolved "https://registry.npmjs.org/@mui/private-theming/-/private-theming-5.13.7.tgz#2f8ef5da066f3c6c6423bd4260d003a28d10b099" - integrity sha512-qbSr+udcij5F9dKhGX7fEdx2drXchq7htLNr2Qg2Ma+WJ6q0ERlEqGSBiPiVDJkptcjeVL4DGmcf1wl5+vD4EA== +"@mui/private-theming@^7.0.2": + version "7.0.2" + resolved "https://registry.npmjs.org/@mui/private-theming/-/private-theming-7.0.2.tgz#18bd6c464d5af854e37ac3947f411ac76467c249" + integrity sha512-6lt8heDC9wN8YaRqEdhqnm0cFCv08AMf4IlttFvOVn7ZdKd81PNpD/rEtPGLLwQAFyyKSxBG4/2XCgpbcdNKiA== dependencies: - "@babel/runtime" "^7.22.5" - "@mui/utils" "^5.13.7" + "@babel/runtime" "^7.27.0" + "@mui/utils" "^7.0.2" prop-types "^15.8.1" -"@mui/styled-engine@^5.13.2": - version "5.13.2" - resolved "https://registry.npmjs.org/@mui/styled-engine/-/styled-engine-5.13.2.tgz#c87bd61c0ab8086d34828b6defe97c02bcd642ef" - integrity sha512-VCYCU6xVtXOrIN8lcbuPmoG+u7FYuOERG++fpY74hPpEWkyFQG97F+/XfTQVYzlR2m7nPjnwVUgATcTCMEaMvw== +"@mui/styled-engine@^7.0.2": + version "7.0.2" + resolved "https://registry.npmjs.org/@mui/styled-engine/-/styled-engine-7.0.2.tgz#7f13cd8b8cd793fbcd02fff5a8bee64069dc9265" + integrity sha512-11Bt4YdHGlh7sB8P75S9mRCUxTlgv7HGbr0UKz6m6Z9KLeiw1Bm9y/t3iqLLVMvSHYB6zL8X8X+LmfTE++gyBw== dependencies: - "@babel/runtime" "^7.21.0" - "@emotion/cache" "^11.11.0" - csstype "^3.1.2" + "@babel/runtime" "^7.27.0" + "@emotion/cache" "^11.13.5" + "@emotion/serialize" "^1.3.3" + "@emotion/sheet" "^1.4.0" + csstype "^3.1.3" prop-types "^15.8.1" -"@mui/system@^5.14.0": - version "5.14.0" - resolved "https://registry.npmjs.org/@mui/system/-/system-5.14.0.tgz#b7eeb799ae67d27b887fec4917ebd5e2be5a2faa" - integrity sha512-0HZGkX8miJbiNw+rjlZ9l0Cfkz1bSqfSHQH0EH9J+nx0aAm5cBleg9piOlLdCNIWGgecCqsw4x62erGrGjjcJg== - dependencies: - "@babel/runtime" "^7.22.5" - "@mui/private-theming" "^5.13.7" - "@mui/styled-engine" "^5.13.2" - "@mui/types" "^7.2.4" - "@mui/utils" "^5.13.7" - clsx "^1.2.1" - csstype "^3.1.2" +"@mui/system@^7.0.2": + version "7.0.2" + resolved "https://registry.npmjs.org/@mui/system/-/system-7.0.2.tgz#e03222375f486d697a4817865a81a1ac7d88f4d2" + integrity sha512-yFUraAWYWuKIISPPEVPSQ1NLeqmTT4qiQ+ktmyS8LO/KwHxB+NNVOacEZaIofh5x1NxY8rzphvU5X2heRZ/RDA== + dependencies: + "@babel/runtime" "^7.27.0" + "@mui/private-theming" "^7.0.2" + "@mui/styled-engine" "^7.0.2" + "@mui/types" "^7.4.1" + "@mui/utils" "^7.0.2" + clsx "^2.1.1" + csstype "^3.1.3" prop-types "^15.8.1" -"@mui/types@^7.2.4": - version "7.2.4" - resolved "https://registry.npmjs.org/@mui/types/-/types-7.2.4.tgz#b6fade19323b754c5c6de679a38f068fd50b9328" - integrity sha512-LBcwa8rN84bKF+f5sDyku42w1NTxaPgPyYKODsh01U1fVstTClbUoSA96oyRBnSNyEiAVjKm6Gwx9vjR+xyqHA== +"@mui/types@^7.4.1": + version "7.4.1" + resolved "https://registry.npmjs.org/@mui/types/-/types-7.4.1.tgz#5611268faa0b46ab0c622c02b54f3f30f9809c2d" + integrity sha512-gUL8IIAI52CRXP/MixT1tJKt3SI6tVv4U/9soFsTtAsHzaJQptZ42ffdHZV3niX1ei0aUgMvOxBBN0KYqdG39g== + dependencies: + "@babel/runtime" "^7.27.0" -"@mui/utils@^5.13.7": - version "5.13.7" - resolved "https://registry.npmjs.org/@mui/utils/-/utils-5.13.7.tgz#7e6a8336e05eb2642667a5c02eb605351e27ec20" - integrity sha512-/3BLptG/q0u36eYED7Nhf4fKXmcKb6LjjT7ZMwhZIZSdSxVqDqSTmATW3a56n3KEPQUXCU9TpxAfCBQhs6brVA== +"@mui/utils@^7.0.2": + version "7.0.2" + resolved "https://registry.npmjs.org/@mui/utils/-/utils-7.0.2.tgz#b6842a9f979a619b65011a84a1964b85b205a9a4" + integrity sha512-72gcuQjPzhj/MLmPHLCgZjy2VjOH4KniR/4qRtXTTXIEwbkgcN+Y5W/rC90rWtMmZbjt9svZev/z+QHUI4j74w== dependencies: - "@babel/runtime" "^7.22.5" - "@types/prop-types" "^15.7.5" - "@types/react-is" "^18.2.1" + "@babel/runtime" "^7.27.0" + "@mui/types" "^7.4.1" + "@types/prop-types" "^15.7.14" + clsx "^2.1.1" prop-types "^15.8.1" - react-is "^18.2.0" + react-is "^19.1.0" "@nodelib/fs.scandir@2.1.5": version "2.1.5" @@ -1138,11 +1134,16 @@ resolved "https://registry.npmjs.org/@types/prismjs/-/prismjs-1.26.0.tgz" integrity sha512-ZTaqn/qSqUuAq1YwvOFQfVW1AR/oQJlLSZVustdjwI+GZ8kr0MSHBj0tsXPW1EqHubx50gtBEjbPGsdZwQwCjQ== -"@types/prop-types@*", "@types/prop-types@^15.7.5": +"@types/prop-types@*": version "15.7.5" resolved "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== +"@types/prop-types@^15.7.14": + version "15.7.14" + resolved "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.14.tgz#1433419d73b2a7ebfc6918dcefd2ec0d5cd698f2" + integrity sha512-gNMvNH49DJ7OJYv+KAKn0Xp45p8PLl6zo2YnvDIbTd4J6MER2BmWN49TG7n9LvkyihINxeKW8+3bfS2yDC9dzQ== + "@types/react-dom@^18.3.0": version "18.3.0" resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-18.3.0.tgz#0cbc818755d87066ab6ca74fbedb2547d74a82b0" @@ -1150,19 +1151,10 @@ dependencies: "@types/react" "*" -"@types/react-is@^18.2.1": - version "18.2.1" - resolved "https://registry.npmjs.org/@types/react-is/-/react-is-18.2.1.tgz#61d01c2a6fc089a53520c0b66996d458fdc46863" - integrity sha512-wyUkmaaSZEzFZivD8F2ftSyAfk6L+DfFliVj/mYdOXbVjRcS87fQJLTnhk6dRZPuJjI+9g6RZJO4PNCngUrmyw== - dependencies: - "@types/react" "*" - -"@types/react-transition-group@^4.4.6": - version "4.4.6" - resolved "https://registry.npmjs.org/@types/react-transition-group/-/react-transition-group-4.4.6.tgz#18187bcda5281f8e10dfc48f0943e2fdf4f75e2e" - integrity sha512-VnCdSxfcm08KjsJVQcfBmhEQAPnLB8G08hAxn39azX1qYBQ/5RVQuoHuKIcfKOdncuaUvEpFKFzEvbtIMsfVew== - dependencies: - "@types/react" "*" +"@types/react-transition-group@^4.4.12": + version "4.4.12" + resolved "https://registry.npmjs.org/@types/react-transition-group/-/react-transition-group-4.4.12.tgz#b5d76568485b02a307238270bfe96cb51ee2a044" + integrity sha512-8TV6R3h2j7a91c+1DXdJi3Syo69zzIZbz7Lg5tORM5LEJG7X/E6a1V3drRyBRZq7/utz7A+c4OgYLiLcYGHG6w== "@types/react@*": version "18.2.15" @@ -1647,16 +1639,16 @@ clone@~2.1.2: resolved "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz" integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= -clsx@^1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz#0ddc4a20a549b59c93a4116bb26f5294ca17dc12" - integrity sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg== - clsx@^2.0.0: version "2.1.0" resolved "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz#e851283bcb5c80ee7608db18487433f7b23f77cb" integrity sha512-m3iNNWpd9rl3jvvcBnu70ylMdrXt8Vlq4HYadnU5fwcOtvkSQWPmj7amUcDT2qYI7risszBjI5AUIUox9D16pg== +clsx@^2.1.1: + version "2.1.1" + resolved "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz#eed397c9fd8bd882bfb18deab7102049a2f32999" + integrity sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA== + codepage@~1.15.0: version "1.15.0" resolved "https://registry.npmjs.org/codepage/-/codepage-1.15.0.tgz#2e00519024b39424ec66eeb3ec07227e692618ab" @@ -1757,11 +1749,16 @@ csstype@^2.5.7: resolved "https://registry.npmjs.org/csstype/-/csstype-2.6.21.tgz#2efb85b7cc55c80017c66a5ad7cbd931fda3a90e" integrity sha512-Z1PhmomIfypOpoMjRQB70jfvy/wxT50qW08YXO5lMIJkrdq4yOTR+AW7FqutScmB9NkLwxo+jU+kZLbofZZq/w== -csstype@^3.0.2, csstype@^3.1.2: +csstype@^3.0.2: version "3.1.2" resolved "https://registry.npmjs.org/csstype/-/csstype-3.1.2.tgz#1d4bf9d572f11c14031f0436e1c10bc1f571f50b" integrity sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ== +csstype@^3.1.3: + version "3.1.3" + resolved "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz#d80ff294d114fb0e6ac500fbf85b60137d7eff81" + integrity sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw== + "d3-array@1 - 3", "d3-array@2 - 3", "d3-array@2.10.0 - 3", "d3-array@2.5.0 - 3", d3-array@3: version "3.2.0" resolved "https://registry.npmjs.org/d3-array/-/d3-array-3.2.0.tgz" @@ -3446,11 +3443,16 @@ react-is@^16.13.1, react-is@^16.7.0: resolved "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== -react-is@^18.0.0, react-is@^18.2.0: +react-is@^18.0.0: version "18.2.0" resolved "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz" integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== +react-is@^19.1.0: + version "19.1.0" + resolved "https://registry.npmjs.org/react-is/-/react-is-19.1.0.tgz#805bce321546b7e14c084989c77022351bbdd11b" + integrity sha512-Oe56aUPnkHyyDxxkvqtd7KkdQP5uIUfHxd5XTb3wE9d/kRnZLmKbDB0GWk919tdQ+mxxPtG6EAs6RMT6i1qtHg== + react-lifecycles-compat@^3.0.4: version "3.0.4" resolved "https://registry.npmjs.org/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz"