diff --git a/.github/workflows/build-deploy-zodiac.yml b/.github/workflows/build-deploy-zodiac.yml index 67402dc..57a049d 100644 --- a/.github/workflows/build-deploy-zodiac.yml +++ b/.github/workflows/build-deploy-zodiac.yml @@ -36,7 +36,7 @@ jobs: # PUBLIC_URL="/dev/${{ env.BRANCH_NAME }}/" yarn build export PUBLIC_URL="/dev/${{ env.BRANCH_NAME }}/" export REACT_APP_API_URL="/dev/${{ env.BRANCH_NAME }}/api/v1" - export REACT_APP_USE_CORS=true + export REACT_APP_USE_CORS=false yarn build - name: Copy JS libraries (jdata, bjdata etc.) diff --git a/backend/migrations/20260127203625-create-collections.js b/backend/migrations/20260127203625-create-collections.js new file mode 100644 index 0000000..cc4e25d --- /dev/null +++ b/backend/migrations/20260127203625-create-collections.js @@ -0,0 +1,62 @@ +"use strict"; + +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.createTable("collections", { + id: { + type: Sequelize.INTEGER, + autoIncrement: true, + primaryKey: true, + allowNull: false, + }, + user_id: { + type: Sequelize.INTEGER, + allowNull: false, + references: { + model: "users", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + }, + name: { + type: Sequelize.STRING(100), + allowNull: false, + }, + description: { + type: Sequelize.TEXT, + allowNull: true, + }, + is_public: { + type: Sequelize.BOOLEAN, + defaultValue: false, + allowNull: false, + }, + created_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.literal("CURRENT_TIMESTAMP"), + }, + updated_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.literal("CURRENT_TIMESTAMP"), + }, + }); + + // Add indexes + await queryInterface.addIndex("collections", ["user_id"], { + name: "collections_user_id_idx", + }); + + await queryInterface.addIndex("collections", ["user_id", "name"], { + unique: true, + name: "collections_user_name_unique", + }); + }, + + async down(queryInterface, Sequelize) { + await queryInterface.dropTable("collections"); + }, +}; diff --git a/backend/migrations/20260127203731-create-collection-datasets.js b/backend/migrations/20260127203731-create-collection-datasets.js new file mode 100644 index 0000000..e654e59 --- /dev/null +++ b/backend/migrations/20260127203731-create-collection-datasets.js @@ -0,0 +1,62 @@ +"use strict"; + +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.createTable("collection_datasets", { + id: { + type: Sequelize.INTEGER, + autoIncrement: true, + primaryKey: true, + allowNull: false, + }, + collection_id: { + type: Sequelize.INTEGER, + allowNull: false, + references: { + model: "collections", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + }, + dataset_id: { + type: Sequelize.INTEGER, + allowNull: false, + references: { + model: "datasets", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + }, + created_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.literal("CURRENT_TIMESTAMP"), + }, + }); + + // Add indexes + await queryInterface.addIndex("collection_datasets", ["collection_id"], { + name: "collection_datasets_collection_id_idx", + }); + + await queryInterface.addIndex("collection_datasets", ["dataset_id"], { + name: "collection_datasets_dataset_id_idx", + }); + + await queryInterface.addIndex( + "collection_datasets", + ["collection_id", "dataset_id"], + { + unique: true, + name: "collection_datasets_unique", + } + ); + }, + + async down(queryInterface, Sequelize) { + await queryInterface.dropTable("collection_datasets"); + }, +}; diff --git a/backend/migrations/20260127203948-create-projects.js b/backend/migrations/20260127203948-create-projects.js new file mode 100644 index 0000000..5061a9c --- /dev/null +++ b/backend/migrations/20260127203948-create-projects.js @@ -0,0 +1,61 @@ +"use strict"; + +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.createTable("projects", { + id: { + type: Sequelize.INTEGER, + autoIncrement: true, + primaryKey: true, + allowNull: false, + }, + user_id: { + type: Sequelize.INTEGER, + allowNull: false, + references: { + model: "users", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + }, + name: { + type: Sequelize.STRING(200), + allowNull: false, + }, + public_id: { + type: Sequelize.STRING(12), + allowNull: false, + unique: true, + defaultValue: "", + }, + description: { + type: Sequelize.TEXT, + allowNull: true, + }, + extractor_state: { + type: Sequelize.JSON, + allowNull: true, + }, + created_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.literal("CURRENT_TIMESTAMP"), + }, + updated_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.literal("CURRENT_TIMESTAMP"), + }, + }); + // Add indexes + await queryInterface.addIndex("projects", ["user_id"], { + name: "projects_user_id_idx", + }); + }, + + async down(queryInterface, Sequelize) { + await queryInterface.dropTable("projects"); + }, +}; diff --git a/backend/package-lock.json b/backend/package-lock.json index 423ee9c..70f2f74 100644 --- a/backend/package-lock.json +++ b/backend/package-lock.json @@ -16,6 +16,7 @@ "dotenv": "^17.2.3", "express": "^5.1.0", "jsonwebtoken": "^9.0.2", + "nanoid": "^3.3.11", "nodemailer": "^7.0.11", "passport": "^0.7.0", "passport-google-oauth20": "^2.0.0", @@ -2362,6 +2363,24 @@ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "license": "MIT" }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, "node_modules/napi-build-utils": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-2.0.0.tgz", diff --git a/backend/package.json b/backend/package.json index d4d9b83..1dde6de 100644 --- a/backend/package.json +++ b/backend/package.json @@ -29,6 +29,7 @@ "dotenv": "^17.2.3", "express": "^5.1.0", "jsonwebtoken": "^9.0.2", + "nanoid": "^3.3.11", "nodemailer": "^7.0.11", "passport": "^0.7.0", "passport-google-oauth20": "^2.0.0", diff --git a/backend/src/controllers/activity.controller.js b/backend/src/controllers/activity.controller.js index 1463a2a..597a23d 100644 --- a/backend/src/controllers/activity.controller.js +++ b/backend/src/controllers/activity.controller.js @@ -383,6 +383,177 @@ const getMostViewedDatasets = async (req, res) => { } }; +// get dataset statistics (views count and likes count) +const getDatasetStats = async (req, res) => { + try { + const { dbName, datasetId } = req.params; + + const dataset = await Dataset.findOne({ + where: { couch_db: dbName, ds_id: datasetId }, + attributes: ["id", "couch_db", "ds_id", "views_count"], + }); + + if (!dataset) { + return res.status(200).json({ + viewsCount: 0, + likesCount: 0, + dataset: null, + }); + } + + // Count how many users liked this dataset + const likesCount = await DatasetLike.count({ + where: { dataset_id: dataset.id }, + }); + + res.status(200).json({ + viewsCount: dataset.views_count, + likesCount: likesCount, + dataset: { + id: dataset.id, + couch_db: dataset.couch_db, + ds_id: dataset.ds_id, + views_count: dataset.views_count, + likes_count: likesCount, + }, + }); + } catch (error) { + console.error("Get dataset stats error:", error); + res.status(500).json({ + message: "Error fetching dataset statistics", + error: error.message, + }); + } +}; + +// check user activity +const checkUserActivity = async (req, res) => { + try { + const user = req.user; + const { dbName, datasetId } = req.params; + + // If user is not authenticated, return false for both + if (!user) { + return res.status(200).json({ + isLiked: false, + isSaved: false, + }); + } + + // Find dataset + const dataset = await Dataset.findOne({ + where: { couch_db: dbName, ds_id: datasetId }, + }); + + // If dataset doesn't exist yet, user hasn't liked or saved it + if (!dataset) { + return res.status(200).json({ + isLiked: false, + isSaved: false, + }); + } + + // Check if user has liked this dataset + const like = await DatasetLike.findOne({ + where: { user_id: user.id, dataset_id: dataset.id }, + }); + + // Check if user has saved this dataset + const save = await SavedDataset.findOne({ + where: { user_id: user.id, dataset_id: dataset.id }, + }); + + res.status(200).json({ + isLiked: !!like, + isSaved: !!save, + }); + } catch (error) { + console.error("Check user activity error:", error); + res.status(500).json({ + message: "Error checking user activity", + error: error.message, + }); + } +}; + +// Get user's saved datasets +const getUserSavedDatasets = async (req, res) => { + try { + const userId = req.user.id; + + const savedDatasets = await SavedDataset.findAll({ + where: { user_id: userId }, + include: [ + { + model: Dataset, + attributes: ["id", "couch_db", "ds_id", "views_count"], + }, + ], + order: [["created_at", "DESC"]], // Most recently saved first + attributes: ["id", "created_at"], + }); + + // Transform the data for frontend + const datasets = savedDatasets.map((saved) => ({ + id: saved.Dataset.id, + couch_db: saved.Dataset.couch_db, + ds_id: saved.Dataset.ds_id, + views_count: saved.Dataset.views_count, + saved_at: saved.created_at, + })); + + res.status(200).json({ + savedDatasets: datasets, + count: datasets.length, + }); + } catch (error) { + console.error("Get saved datasets error:", error); + res.status(500).json({ + message: "Error fetching saved datasets", + error: error.message, + }); + } +}; + +// Get user's liked datasets +const getUserLikedDatasets = async (req, res) => { + try { + const userId = req.user.id; + + const likedDatasets = await DatasetLike.findAll({ + where: { user_id: userId }, + include: [ + { + model: Dataset, + attributes: ["id", "couch_db", "ds_id", "views_count"], + }, + ], + order: [["created_at", "DESC"]], // Most recently liked first + attributes: ["id", "created_at"], + }); + + // Transform the data for frontend + const datasets = likedDatasets.map((like) => ({ + id: like.Dataset.id, + couch_db: like.Dataset.couch_db, + ds_id: like.Dataset.ds_id, + views_count: like.Dataset.views_count, + liked_at: like.created_at, + })); + + res.status(200).json({ + likedDatasets: datasets, + count: datasets.length, + }); + } catch (error) { + console.error("Get liked datasets error:", error); + res.status(500).json({ + message: "Error fetching liked datasets", + error: error.message, + }); + } +}; + module.exports = { likeDataset, unlikeDataset, @@ -394,4 +565,8 @@ module.exports = { deleteComment, trackView, getMostViewedDatasets, + getDatasetStats, + checkUserActivity, + getUserSavedDatasets, + getUserLikedDatasets, }; diff --git a/backend/src/controllers/auth.controller.js b/backend/src/controllers/auth.controller.js index 45d7abf..5f6db30 100644 --- a/backend/src/controllers/auth.controller.js +++ b/backend/src/controllers/auth.controller.js @@ -150,10 +150,10 @@ const register = async (req, res) => { username: user.username, email: user.email, email_verified: user.email_verified, - firstName: user.first_name, // NEW - lastName: user.last_name, // NEW - company: user.company, // NEW - interests: user.interests, // NEW + firstName: user.first_name, + lastName: user.last_name, + company: user.company, + interests: user.interests, }, }); } catch (error) { @@ -354,7 +354,7 @@ const resendVerificationEmail = async (req, res) => { } }; -// NEW: Complete profile for OAuth users +// Complete profile for OAuth users const completeProfile = async (req, res) => { try { const { token, firstName, lastName, company, interests } = req.body; @@ -595,6 +595,75 @@ const resetPassword = async (req, res) => { } }; +// Update user profile +const updateProfile = async (req, res) => { + try { + const userId = req.user.id; + const { firstName, lastName, company, interests } = req.body; + + // Validate input + if (!firstName || !lastName || !company) { + return res.status(400).json({ + message: "First name, last name, and company/institution are required", + }); + } + + // Validate field lengths + if (firstName.trim().length < 1 || firstName.trim().length > 255) { + return res.status(400).json({ + message: "First name must be between 1 and 255 characters", + }); + } + if (lastName.trim().length < 1 || lastName.trim().length > 255) { + return res.status(400).json({ + message: "Last name must be between 1 and 255 characters", + }); + } + if (company.trim().length < 1 || company.trim().length > 255) { + return res.status(400).json({ + message: "Company/institution must be between 1 and 255 characters", + }); + } + + // Find user + const user = await User.findByPk(userId); + if (!user) { + return res.status(404).json({ message: "User not found" }); + } + + // Update profile + user.first_name = firstName.trim(); + user.last_name = lastName.trim(); + user.company = company.trim(); + user.interests = interests ? interests.trim() : null; + await user.save(); + + res.json({ + message: "Profile updated successfully", + user: { + id: user.id, + username: user.username, + email: user.email, + email_verified: user.email_verified, + firstName: user.first_name, + lastName: user.last_name, + company: user.company, + interests: user.interests, + isOAuthUser: !!(user.google_id || user.orcid_id || user.github_id), + hasPassword: !!user.hashed_password, + created_at: user.created_at, + updated_at: user.updated_at, + }, + }); + } catch (error) { + console.error("Update profile error:", error); + res.status(500).json({ + message: "Error updating profile", + error: error.message, + }); + } +}; + module.exports = { register, login, @@ -603,6 +672,7 @@ module.exports = { resendVerificationEmail, completeProfile, changePassword, - forgotPassword, // New + forgotPassword, resetPassword, + updateProfile, }; diff --git a/backend/src/controllers/collection.controller.js b/backend/src/controllers/collection.controller.js new file mode 100644 index 0000000..cb984aa --- /dev/null +++ b/backend/src/controllers/collection.controller.js @@ -0,0 +1,377 @@ +const { Collection, CollectionDataset, Dataset, User } = require("../models"); + +// Get all collections for current user +const getUserCollections = async (req, res) => { + try { + const userId = req.user.id; + + const collections = await Collection.findAll({ + where: { user_id: userId }, + include: [ + { + model: Dataset, + as: "datasets", + through: { attributes: ["created_at"] }, + attributes: ["id", "couch_db", "ds_id", "views_count"], + }, + ], + order: [["created_at", "DESC"]], + }); + + // Transform to include dataset count + const collectionsWithCount = collections.map((col) => ({ + id: col.id, + name: col.name, + description: col.description, + is_public: col.is_public, + created_at: col.created_at, + updated_at: col.updated_at, + datasets_count: col.datasets ? col.datasets.length : 0, + datasets: col.datasets, // Include full dataset details + })); + + res.status(200).json({ + collections: collectionsWithCount, + count: collectionsWithCount.length, + }); + } catch (error) { + console.error("Get collections error:", error); + res.status(500).json({ + message: "Error fetching collections", + error: error.message, + }); + } +}; + +// Create a new collection +const createCollection = async (req, res) => { + try { + const userId = req.user.id; + const { name, description, is_public } = req.body; + + if (!name || name.trim() === "") { + return res.status(400).json({ message: "Collection name is required" }); + } + + // Check if collection name already exists for this user + const existing = await Collection.findOne({ + where: { user_id: userId, name: name.trim() }, + }); + + if (existing) { + return res.status(400).json({ + message: "A collection with this name already exists", + }); + } + + const collection = await Collection.create({ + user_id: userId, + name: name.trim(), + description: description?.trim() || null, + is_public: is_public || false, + }); + + res.status(201).json({ + message: "Collection created successfully", + collection, + }); + } catch (error) { + console.error("Create collection error:", error); + res.status(500).json({ + message: "Error creating collection", + error: error.message, + }); + } +}; + +// Get a specific collection with its datasets +const getCollection = async (req, res) => { + try { + const userId = req.user.id; + const { collectionId } = req.params; + + // Verify collection belongs to user + const collection = await Collection.findOne({ + where: { id: collectionId, user_id: userId }, + include: [ + { + model: Dataset, + as: "datasets", + through: { attributes: ["created_at"] }, + attributes: ["id", "couch_db", "ds_id", "views_count"], + }, + ], + }); + + if (!collection) { + return res.status(404).json({ message: "Collection not found" }); + } + + res.status(200).json({ + collection: { + id: collection.id, + name: collection.name, + description: collection.description, + is_public: collection.is_public, + created_at: collection.created_at, + updated_at: collection.updated_at, + datasets: collection.datasets || [], + datasets_count: collection.datasets ? collection.datasets.length : 0, + }, + }); + } catch (error) { + console.error("Get collection error:", error); + res.status(500).json({ + message: "Error fetching collection", + error: error.message, + }); + } +}; + +// Add dataset to collection +const addDatasetToCollection = async (req, res) => { + try { + const userId = req.user.id; + const { collectionId } = req.params; + const { dbName, datasetId } = req.body; + + if (!dbName || !datasetId) { + return res.status(400).json({ + message: "dbName and datasetId are required", + }); + } + + // Verify collection belongs to user + const collection = await Collection.findOne({ + where: { id: collectionId, user_id: userId }, + }); + + if (!collection) { + return res.status(404).json({ message: "Collection not found" }); + } + + // Get or create dataset + let dataset = await Dataset.findOne({ + where: { couch_db: dbName, ds_id: datasetId }, + }); + + if (!dataset) { + dataset = await Dataset.create({ + couch_db: dbName, + ds_id: datasetId, + views_count: 0, + }); + } + + // Check if already in collection + const existing = await CollectionDataset.findOne({ + where: { collection_id: collectionId, dataset_id: dataset.id }, + }); + + if (existing) { + return res.status(400).json({ + message: "Dataset already in this collection", + }); + } + + // Add to collection + await CollectionDataset.create({ + collection_id: collectionId, + dataset_id: dataset.id, + }); + + res.status(201).json({ + message: "Dataset added to collection successfully", + }); + } catch (error) { + console.error("Add dataset to collection error:", error); + res.status(500).json({ + message: "Error adding dataset to collection", + error: error.message, + }); + } +}; + +// Remove dataset from collection +const removeDatasetFromCollection = async (req, res) => { + try { + const userId = req.user.id; + const { collectionId, datasetId } = req.params; + + // Verify collection belongs to user + const collection = await Collection.findOne({ + where: { id: collectionId, user_id: userId }, + }); + + if (!collection) { + return res.status(404).json({ message: "Collection not found" }); + } + + // Remove from collection (datasetId here is the Dataset.id, not ds_id) + const deleted = await CollectionDataset.destroy({ + where: { collection_id: collectionId, dataset_id: datasetId }, + }); + + if (deleted === 0) { + return res.status(404).json({ + message: "Dataset not found in this collection", + }); + } + + res.status(200).json({ + message: "Dataset removed from collection successfully", + }); + } catch (error) { + console.error("Remove dataset from collection error:", error); + res.status(500).json({ + message: "Error removing dataset from collection", + error: error.message, + }); + } +}; + +// Update collection (rename, change description) +const updateCollection = async (req, res) => { + try { + const userId = req.user.id; + const { collectionId } = req.params; + const { name, description, is_public } = req.body; + + const collection = await Collection.findOne({ + where: { id: collectionId, user_id: userId }, + }); + + if (!collection) { + return res.status(404).json({ message: "Collection not found" }); + } + + if (name !== undefined) { + // Check for duplicate name + const existing = await Collection.findOne({ + where: { + user_id: userId, + name: name.trim(), + id: { [require("sequelize").Op.ne]: collectionId }, // Exclude current collection + }, + }); + + if (existing) { + return res.status(400).json({ + message: "A collection with this name already exists", + }); + } + + collection.name = name.trim(); + } + + if (description !== undefined) { + collection.description = description?.trim() || null; + } + + if (is_public !== undefined) { + collection.is_public = is_public; + } + + await collection.save(); + + res.status(200).json({ + message: "Collection updated successfully", + collection, + }); + } catch (error) { + console.error("Update collection error:", error); + res.status(500).json({ + message: "Error updating collection", + error: error.message, + }); + } +}; + +// Delete collection +const deleteCollection = async (req, res) => { + try { + const userId = req.user.id; + const { collectionId } = req.params; + + const collection = await Collection.findOne({ + where: { id: collectionId, user_id: userId }, + }); + + if (!collection) { + return res.status(404).json({ message: "Collection not found" }); + } + + // Cascade delete will automatically remove collection_datasets entries + await collection.destroy(); + + res.status(200).json({ + message: "Collection deleted successfully", + }); + } catch (error) { + console.error("Delete collection error:", error); + res.status(500).json({ + message: "Error deleting collection", + error: error.message, + }); + } +}; + +// Check which collections contain a specific dataset +const getDatasetCollections = async (req, res) => { + try { + const userId = req.user.id; + const { dbName, datasetId } = req.params; + + // Find the dataset + const dataset = await Dataset.findOne({ + where: { couch_db: dbName, ds_id: datasetId }, + }); + + if (!dataset) { + return res.status(200).json({ + collections: [], + }); + } + + // Find all user's collections that contain this dataset + const collectionDatasets = await CollectionDataset.findAll({ + where: { dataset_id: dataset.id }, + include: [ + { + model: Collection, + where: { user_id: userId }, + attributes: ["id", "name", "description"], + }, + ], + }); + + const collections = collectionDatasets.map((cd) => ({ + id: cd.Collection.id, + name: cd.Collection.name, + description: cd.Collection.description, + added_at: cd.created_at, + })); + + res.status(200).json({ + collections, + count: collections.length, + }); + } catch (error) { + console.error("Get dataset collections error:", error); + res.status(500).json({ + message: "Error fetching dataset collections", + error: error.message, + }); + } +}; + +module.exports = { + getUserCollections, + createCollection, + getCollection, + addDatasetToCollection, + removeDatasetFromCollection, + updateCollection, + deleteCollection, + getDatasetCollections, // optional +}; diff --git a/backend/src/controllers/ollama.controller.js b/backend/src/controllers/ollama.controller.js new file mode 100644 index 0000000..84bb3bb --- /dev/null +++ b/backend/src/controllers/ollama.controller.js @@ -0,0 +1,32 @@ +const OLLAMA_BASE_URL = "http://jin.neu.edu:11434"; + +const proxyChat = async (req, res) => { + console.log("🟣 [Ollama] proxyChat hit — model:", req.body.model); + try { + const response = await fetch(`${OLLAMA_BASE_URL}/v1/chat/completions`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(req.body), + }); + + const data = await response.json(); + res.json(data); + } catch (err) { + console.error("🔴 [Ollama] fetch failed:", err.message); // ← add + res.status(500).json({ error: err.message }); + } +}; + +const getTags = async (req, res) => { + console.log("🟣 [Ollama] getTags hit"); // ← add + try { + const response = await fetch(`${OLLAMA_BASE_URL}/api/tags`); + const data = await response.json(); + res.json(data); + } catch (err) { + console.error("🔴 [Ollama] getTags failed:", err.message); // ← add + res.status(500).json({ error: err.message }); + } +}; + +module.exports = { proxyChat, getTags }; diff --git a/backend/src/controllers/projectController.js b/backend/src/controllers/projectController.js new file mode 100644 index 0000000..4e3ac07 --- /dev/null +++ b/backend/src/controllers/projectController.js @@ -0,0 +1,195 @@ +const { Project, User } = require("../models"); +const { nanoid } = require("nanoid"); + +// Create a new organizer project +const createProject = async (req, res) => { + try { + const user = req.user; + const { name, description } = req.body; + + const project = await Project.create({ + user_id: user.id, + public_id: nanoid(12), + name: name || `Dataset Project ${new Date().toLocaleDateString()}`, + description: description || "don't have description yet", + extractor_state: { + files: [], + selectedIds: [], + expandedIds: [], + }, + }); + + res.status(201).json({ + message: "Project created successfully", + project, + }); + } catch (error) { + console.error("Create project error:", error); + res.status(500).json({ + message: "Error creating project", + error: error.message, + }); + } +}; + +// Get all projects for current user +const getUserProjects = async (req, res) => { + try { + const user = req.user; + + const projects = await Project.findAll({ + where: { user_id: user.id }, + order: [["updated_at", "DESC"]], + attributes: [ + "id", + "public_id", // ← ADD + "name", + "description", + "created_at", + "updated_at", + "extractor_state", + ], + }); + + // Add file count to each project + const projectsWithCount = projects.map((project) => { + const state = project.extractor_state || { files: [] }; + return { + id: project.id, + public_id: project.public_id, // ← ADD + name: project.name, + description: project.description, + created_at: project.created_at, + updated_at: project.updated_at, + file_count: state.files ? state.files.length : 0, + }; + }); + + res.status(200).json({ + projects: projectsWithCount, + count: projectsWithCount.length, + }); + } catch (error) { + console.error("Get projects error:", error); + res.status(500).json({ + message: "Error fetching projects", + error: error.message, + }); + } +}; + +// Get a specific project +const getProject = async (req, res) => { + try { + const user = req.user; + const { projectId } = req.params; + + const project = await Project.findOne({ + where: { + // id: projectId, + public_id: projectId, + user_id: user.id, + }, + }); + + if (!project) { + return res.status(404).json({ + message: "Project not found", + }); + } + + res.status(200).json({ project }); + } catch (error) { + console.error("Get project error:", error); + res.status(500).json({ + message: "Error fetching project", + error: error.message, + }); + } +}; + +// Update project +const updateProject = async (req, res) => { + try { + const user = req.user; + const { projectId } = req.params; + const { name, description, extractor_state } = req.body; + + const project = await Project.findOne({ + where: { + // id: projectId, + public_id: projectId, + user_id: user.id, + }, + }); + + if (!project) { + return res.status(404).json({ + message: "Project not found", + }); + } + + // Update only provided fields + if (name !== undefined) project.name = name; + if (description !== undefined) project.description = description; + if (extractor_state !== undefined) { + project.extractor_state = extractor_state; + // Mark as changed for JSON field - tells Sequelize to UPDATE this field + project.changed("extractor_state", true); + } + + await project.save(); + + res.status(200).json({ + message: "Project updated successfully", + project, + }); + } catch (error) { + console.error("Update project error:", error); + res.status(500).json({ + message: "Error updating project", + error: error.message, + }); + } +}; +// Delete project +const deleteProject = async (req, res) => { + try { + const user = req.user; + const { projectId } = req.params; + + const project = await Project.findOne({ + where: { + // id: projectId, + public_id: projectId, + user_id: user.id, + }, + }); + + if (!project) { + return res.status(404).json({ + message: "Project not found", + }); + } + + await project.destroy(); + + res.status(200).json({ + message: "Project deleted successfully", + }); + } catch (error) { + console.error("Delete project error:", error); + res.status(500).json({ + message: "Error deleting project", + error: error.message, + }); + } +}; + +module.exports = { + createProject, + getUserProjects, + getProject, + updateProject, + deleteProject, +}; diff --git a/backend/src/middleware/auth.middleware.js b/backend/src/middleware/auth.middleware.js index 3f00fc5..b711717 100644 --- a/backend/src/middleware/auth.middleware.js +++ b/backend/src/middleware/auth.middleware.js @@ -2,9 +2,10 @@ const jwt = require("jsonwebtoken"); const { User } = require("../models"); const JWT_SECRET = process.env.JWT_SECRET; -const JWT_EXPIRES_IN = process.env.JWT_EXPIRES_IN || "3600"; +const JWT_EXPIRES_IN = process.env.JWT_EXPIRES_IN || 14400; // default 4 hours +const MAX_SESSION_DURATION = process.env.MAX_SESSION_DURATION || "86400"; // 24 hours default -const setTokenCookie = (res, user) => { +const setTokenCookie = (res, user, isNewSession = true) => { // create safe user object for token const safeUser = { id: user.id, @@ -12,8 +13,25 @@ const setTokenCookie = (res, user) => { username: user.username, }; + // Add + const payload = { + data: safeUser, + }; + + // Add: If this is a new session, add the session start time + if (isNewSession) { + payload.sessionStart = Math.floor(Date.now() / 1000); // Unix timestamp + } else { + // Preserve the original session start time when refreshing + payload.sessionStart = user.sessionStart; + } + // sign JWT token - const token = jwt.sign({ data: safeUser }, JWT_SECRET, { + // const token = jwt.sign({ data: safeUser }, JWT_SECRET, { + // expiresIn: parseInt(JWT_EXPIRES_IN), + // }); + // replace with + const token = jwt.sign(payload, JWT_SECRET, { expiresIn: parseInt(JWT_EXPIRES_IN), }); @@ -54,6 +72,16 @@ const restoreUser = (req, res, next) => { // extract user id from token payload const { id } = jwtPayload.data; + // Add: Check maximum session duration + const currentTime = Math.floor(Date.now() / 1000); + const sessionAge = currentTime - jwtPayload.sessionStart; + + if (sessionAge > parseInt(MAX_SESSION_DURATION)) { + // Session has exceeded maximum duration + res.clearCookie("token"); + return next(); + } + //load user from database req.user = await User.findByPk(id, { attributes: { @@ -61,6 +89,12 @@ const restoreUser = (req, res, next) => { exclude: ["hashed_password"], // Never send password }, }); + + // Add: refresh token - issue new token with extended expiration + if (req.user) { + req.user.sessionStart = jwtPayload.sessionStart; // Pass along the original session start + setTokenCookie(res, req.user, false); + } } catch (error) { res.clearCookie("token"); return next(); diff --git a/backend/src/models/Collection.js b/backend/src/models/Collection.js new file mode 100644 index 0000000..0614900 --- /dev/null +++ b/backend/src/models/Collection.js @@ -0,0 +1,52 @@ +const { DataTypes, Model } = require("sequelize"); +const { sequelize } = require("../config/database"); + +class Collection extends Model {} + +Collection.init( + { + id: { + type: DataTypes.INTEGER, + autoIncrement: true, + primaryKey: true, + }, + user_id: { + type: DataTypes.INTEGER, + allowNull: false, + references: { + model: "users", + key: "id", + }, + }, + name: { + type: DataTypes.STRING(100), + allowNull: false, + }, + description: { + type: DataTypes.TEXT, + allowNull: true, + }, + is_public: { + type: DataTypes.BOOLEAN, + defaultValue: false, + }, + created_at: { + type: DataTypes.DATE, + defaultValue: DataTypes.NOW, + }, + updated_at: { + type: DataTypes.DATE, + defaultValue: DataTypes.NOW, + }, + }, + { + sequelize, + tableName: "collections", + timestamps: true, + underscored: true, + createdAt: "created_at", + updatedAt: "updated_at", + } +); + +module.exports = Collection; diff --git a/backend/src/models/CollectionDataset.js b/backend/src/models/CollectionDataset.js new file mode 100644 index 0000000..b78552c --- /dev/null +++ b/backend/src/models/CollectionDataset.js @@ -0,0 +1,42 @@ +const { DataTypes, Model } = require("sequelize"); +const { sequelize } = require("../config/database"); + +class CollectionDataset extends Model {} + +CollectionDataset.init( + { + id: { + type: DataTypes.INTEGER, + autoIncrement: true, + primaryKey: true, + }, + collection_id: { + type: DataTypes.INTEGER, + allowNull: false, + references: { + model: "collections", + key: "id", + }, + }, + dataset_id: { + type: DataTypes.INTEGER, + allowNull: false, + references: { + model: "datasets", + key: "id", + }, + }, + created_at: { + type: DataTypes.DATE, + defaultValue: DataTypes.NOW, + }, + }, + { + sequelize, + tableName: "collection_datasets", + timestamps: false, + underscored: true, + } +); + +module.exports = CollectionDataset; diff --git a/backend/src/models/Project.js b/backend/src/models/Project.js new file mode 100644 index 0000000..11cccf5 --- /dev/null +++ b/backend/src/models/Project.js @@ -0,0 +1,59 @@ +const { DataTypes, Model } = require("sequelize"); +const { sequelize } = require("../config/database"); +const { nanoid } = require("nanoid"); +console.log("nanoid test:", nanoid(12)); + +class Project extends Model {} + +Project.init( + { + id: { + type: DataTypes.INTEGER, + autoIncrement: true, + primaryKey: true, + }, + user_id: { + type: DataTypes.INTEGER, + allowNull: false, + references: { + model: "users", + key: "id", + }, + }, + name: { + type: DataTypes.STRING(200), + allowNull: false, + }, + public_id: { + type: DataTypes.STRING(12), + allowNull: false, + unique: true, + }, + description: { + type: DataTypes.TEXT, + allowNull: true, + }, + extractor_state: { + type: DataTypes.JSON, + allowNull: true, + }, + created_at: { + type: DataTypes.DATE, + defaultValue: DataTypes.NOW, + }, + updated_at: { + type: DataTypes.DATE, + defaultValue: DataTypes.NOW, + }, + }, + { + sequelize, + tableName: "projects", + timestamps: true, + underscored: true, + createdAt: "created_at", + updatedAt: "updated_at", + } +); + +module.exports = Project; diff --git a/backend/src/models/index.js b/backend/src/models/index.js index c613110..79fb712 100644 --- a/backend/src/models/index.js +++ b/backend/src/models/index.js @@ -2,6 +2,9 @@ const { DataTypes, Model } = require("sequelize"); const { sequelize } = require("../config/database"); const User = require("../models/User"); const Dataset = require("../models/Dataset"); +const Collection = require("../models/Collection"); +const CollectionDataset = require("../models/CollectionDataset"); +const Project = require("../models/Project"); // DatasetLike Model class DatasetLike extends Model {} @@ -197,6 +200,34 @@ Comment.belongsTo(Dataset, { foreignKey: "dataset_id" }); Dataset.hasMany(ViewHistory, { foreignKey: "dataset_id", as: "viewHistory" }); ViewHistory.belongsTo(Dataset, { foreignKey: "dataset_id" }); +// NEW: Collection Associations +User.hasMany(Collection, { foreignKey: "user_id", as: "collections" }); +Collection.belongsTo(User, { foreignKey: "user_id" }); + +Collection.belongsToMany(Dataset, { + through: CollectionDataset, + foreignKey: "collection_id", + otherKey: "dataset_id", + as: "datasets", +}); + +Dataset.belongsToMany(Collection, { + through: CollectionDataset, + foreignKey: "dataset_id", + otherKey: "collection_id", + as: "collections", +}); + +CollectionDataset.belongsTo(Collection, { foreignKey: "collection_id" }); +Collection.hasMany(CollectionDataset, { foreignKey: "collection_id" }); + +CollectionDataset.belongsTo(Dataset, { foreignKey: "dataset_id" }); +Dataset.hasMany(CollectionDataset, { foreignKey: "dataset_id" }); + +// NEW: Project Associations +User.hasMany(Project, { foreignKey: "user_id", as: "projects" }); +Project.belongsTo(User, { foreignKey: "user_id" }); + module.exports = { User, Dataset, @@ -204,4 +235,7 @@ module.exports = { SavedDataset, Comment, ViewHistory, + Collection, + CollectionDataset, + Project, }; diff --git a/backend/src/routes/activities.routes.js b/backend/src/routes/activities.routes.js index 02ca128..700122d 100644 --- a/backend/src/routes/activities.routes.js +++ b/backend/src/routes/activities.routes.js @@ -11,6 +11,10 @@ const { deleteComment, trackView, getMostViewedDatasets, + getDatasetStats, + checkUserActivity, + getUserLikedDatasets, + getUserSavedDatasets, } = require("../controllers/activity.controller"); const { restoreUser, requireAuth } = require("../middleware/auth.middleware"); @@ -37,4 +41,13 @@ router.delete("/comments/:commentId", requireAuth, deleteComment); router.post("/datasets/:dbName/:datasetId/views", trackView); // Public router.get("/datasets/most-viewed", getMostViewedDatasets); // Public +// Dataset statistics (views count, likes count) +router.get("/datasets/:dbName/:datasetId/stats", getDatasetStats); // Public +// Check user activity (isLiked or isSaved) +router.get("/datasets/:dbName/:datasetId/user-activity", checkUserActivity); + +// User's collections +router.get("/users/me/saved-datasets", requireAuth, getUserSavedDatasets); +router.get("/users/me/liked-datasets", requireAuth, getUserLikedDatasets); + module.exports = router; diff --git a/backend/src/routes/auth.routes.js b/backend/src/routes/auth.routes.js index ebbb82d..f7e2b70 100644 --- a/backend/src/routes/auth.routes.js +++ b/backend/src/routes/auth.routes.js @@ -11,6 +11,7 @@ const { changePassword, forgotPassword, resetPassword, + updateProfile, } = require("../controllers/auth.controller"); const { verifyEmail } = require("../controllers/verification.controller"); const { @@ -33,12 +34,15 @@ router.post("/logout", requireAuth, logout); router.get("/verify-email", verifyEmail); router.post("/resend-verification", resendVerificationEmail); -// NEW: Password management routes +// Password management routes router.post("/change-password", requireAuth, changePassword); router.post("/forgot-password", forgotPassword); router.post("/reset-password", resetPassword); -// NEW: OAuth profile completion route +// update profile route +router.put("/update-profile", requireAuth, updateProfile); + +// OAuth profile completion route router.post("/complete-profile", completeProfile); // Google OAuth routes diff --git a/backend/src/routes/collection.route.js b/backend/src/routes/collection.route.js new file mode 100644 index 0000000..ed51265 --- /dev/null +++ b/backend/src/routes/collection.route.js @@ -0,0 +1,55 @@ +const express = require("express"); +const { + getUserCollections, + createCollection, + getCollection, + addDatasetToCollection, + removeDatasetFromCollection, + updateCollection, + deleteCollection, + getDatasetCollections, +} = require("../controllers/collection.controller"); +const { restoreUser, requireAuth } = require("../middleware/auth.middleware"); + +const router = express.Router(); + +// Apply restoreUser to all routes +router.use(restoreUser); + +// Get all user's collections +router.get("/me/collections", requireAuth, getUserCollections); + +// Create new collection +router.post("/collections", requireAuth, createCollection); + +// Get specific collection +router.get("/collections/:collectionId", requireAuth, getCollection); + +// Add dataset to collection +router.post( + "/collections/:collectionId/datasets", + requireAuth, + addDatasetToCollection +); + +// Remove dataset from collection +router.delete( + "/collections/:collectionId/datasets/:datasetId", + requireAuth, + removeDatasetFromCollection +); + +// Update collection +router.put("/collections/:collectionId", requireAuth, updateCollection); + +// Delete collection +router.delete("/collections/:collectionId", requireAuth, deleteCollection); + +// Check which collections contain a specific dataset (for the "Add to Collection" menu) +router.get( + "/datasets/:dbName/:datasetId/collections", + requireAuth, + getDatasetCollections +); + +module.exports = router; diff --git a/backend/src/routes/ollama.routes.js b/backend/src/routes/ollama.routes.js new file mode 100644 index 0000000..ff1fd94 --- /dev/null +++ b/backend/src/routes/ollama.routes.js @@ -0,0 +1,8 @@ +const express = require("express"); +const router = express.Router(); +const { proxyChat, getTags } = require("../controllers/ollama.controller"); + +router.post("/chat", proxyChat); +router.get("/tags", getTags); + +module.exports = router; diff --git a/backend/src/routes/projects.routes.js b/backend/src/routes/projects.routes.js new file mode 100644 index 0000000..4d4a834 --- /dev/null +++ b/backend/src/routes/projects.routes.js @@ -0,0 +1,31 @@ +const express = require("express"); +const { + getUserProjects, + createProject, + getProject, + updateProject, + deleteProject, +} = require("../controllers/projectController"); +const { restoreUser, requireAuth } = require("../middleware/auth.middleware"); + +const router = express.Router(); + +// Apply restoreUser to all routes +router.use(restoreUser); + +// Get all user's projects +router.get("/me/projects", requireAuth, getUserProjects); + +// Create new project +router.post("/", requireAuth, createProject); + +// Get specific project +router.get("/:projectId", requireAuth, getProject); + +// Update project +router.put("/:projectId", requireAuth, updateProject); + +// Delete project +router.delete("/:projectId", requireAuth, deleteProject); + +module.exports = router; diff --git a/backend/src/server.js b/backend/src/server.js index 47e87bf..5e6cb6c 100644 --- a/backend/src/server.js +++ b/backend/src/server.js @@ -11,6 +11,9 @@ const userRoutes = require("./routes/users.routes"); const activitiesRoutes = require("./routes/activities.routes"); const dbsRoutes = require("./routes/dbs.routes"); const datasetsRoutes = require("./routes/datasets.routes"); +const collectionRoutes = require("./routes/collection.route"); +const projectRoutes = require("./routes/projects.routes"); +const ollamaRoutes = require("./routes/ollama.routes"); const app = express(); const PORT = process.env.PORT || 5000; @@ -31,8 +34,8 @@ app.use( }) ); -app.use(express.json()); -app.use(express.urlencoded({ extended: true })); +app.use(express.json({ limit: "50mb" })); +app.use(express.urlencoded({ limit: "50mb", extended: true })); app.use(cookieParser()); // parse cookies app.use(passport.initialize()); @@ -45,6 +48,9 @@ app.use("/api/v1/users", userRoutes); app.use("/api/v1/activities", activitiesRoutes); app.use("/api/v1/dbs", dbsRoutes); app.use("/api/v1/datasets", datasetsRoutes); +app.use("/api/v1/collections", collectionRoutes); +app.use("/api/v1/projects", projectRoutes); +app.use("/api/v1/ollama", ollamaRoutes); // health check endpoint app.get("/api/health", async (req, res) => { diff --git a/package.json b/package.json index 3620db4..1f8144e 100644 --- a/package.json +++ b/package.json @@ -31,13 +31,18 @@ "bjd": "^0.3.2", "buffer": "6.0.3", "dayjs": "^1.11.10", + "dicom-parser": "^1.8.21", "jquery": "^3.7.1", + "jsfive": "^0.4.0", "json-stringify-safe": "^5.0.1", + "jszip": "^3.10.1", "jwt-decode": "^3.1.2", "lzma": "^2.3.2", + "mammoth": "^1.11.0", "numjs": "^0.16.1", "pako": "1.0.11", "path-browserify": "^1.0.1", + "pdfjs-dist": "3.4.120", "query-string": "^8.1.0", "react": "^18.2.0", "react-dom": "^18.2.0", @@ -52,7 +57,8 @@ "three": "0.145.0", "typescript": "^5.1.6", "uplot": "1.6.17", - "web-vitals": "^2.1.0" + "web-vitals": "^2.1.0", + "xlsx": "^0.18.5" }, "devDependencies": { "@babel/plugin-proposal-private-property-in-object": "^7.21.11", diff --git a/src/App.tsx b/src/App.tsx index 28b62f3..03d0af0 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -4,6 +4,7 @@ import Routes from "components/Routes"; import theme from "design/theme"; import { useAppDispatch } from "hooks/useAppDispatch"; import { useGAPageviews } from "hooks/useGAPageviews"; +import { useSessionPoller } from "hooks/useSessionPoller"; import { useEffect, useState } from "react"; import React from "react"; import { BrowserRouter, useLocation } from "react-router-dom"; @@ -26,6 +27,7 @@ function AuthHandler() { const navigate = useNavigate(); const hasProcessedOAuthRef = React.useRef(false); + useSessionPoller(); // Handle browser back/forward navigation useEffect(() => { const handlePopState = () => { diff --git a/src/components/DatasetDetailPage/DatasetAction.tsx b/src/components/DatasetDetailPage/DatasetAction.tsx new file mode 100644 index 0000000..9e33d14 --- /dev/null +++ b/src/components/DatasetDetailPage/DatasetAction.tsx @@ -0,0 +1,523 @@ +import AddIcon from "@mui/icons-material/Add"; +import BookmarkAddedIcon from "@mui/icons-material/BookmarkAdded"; +import BookmarkBorderIcon from "@mui/icons-material/BookmarkBorder"; +import CheckIcon from "@mui/icons-material/Check"; +import FavoriteIcon from "@mui/icons-material/Favorite"; +import FavoriteBorderIcon from "@mui/icons-material/FavoriteBorder"; +import { + Box, + Button, + CircularProgress, + Typography, + Snackbar, + Alert, + Menu, + MenuItem, + ListItemIcon, + ListItemText, + Divider, + Dialog, + DialogTitle, + DialogContent, + DialogActions, + TextField, +} from "@mui/material"; +import { Colors } from "design/theme"; +import React, { useState } from "react"; + +interface DatasetActionsProps { + isLiked: boolean; + isSaved: boolean; + likesCount: number; + viewsCount: number; + isLikeLoading: boolean; + isSaveLoading: boolean; + isAuthenticated: boolean; + onLikeToggle: () => void; + collections: Array<{ id: number; name: string; isInCollection: boolean }>; + onCreateCollection: (name: string, description?: string) => void; + onAddToCollection: (collectionId: number) => void; + isLoadingCollections: boolean; +} + +const DatasetActions: React.FC = ({ + isLiked, + isSaved, + likesCount, + viewsCount, + isLikeLoading, + isSaveLoading, + isAuthenticated, + onLikeToggle, + collections, + onCreateCollection, + onAddToCollection, + isLoadingCollections, +}) => { + const [showLoginAlert, setShowLoginAlert] = useState(false); + + // Collection menu state + const [saveMenuAnchor, setSaveMenuAnchor] = useState( + null + ); + const [createDialogOpen, setCreateDialogOpen] = useState(false); + const [newCollectionName, setNewCollectionName] = useState(""); + const [newCollectionDescription, setNewCollectionDescription] = useState(""); + const [showAlreadyInMessage, setShowAlreadyInMessage] = useState(false); + const [selectedCollectionName, setSelectedCollectionName] = useState(""); + + const handleUnauthenticatedClick = () => { + setShowLoginAlert(true); + }; + + // Add early return for non-authenticated users + if (!isAuthenticated) { + return ( + <> + + {/* Read-only Like Button */} + + + {/* Read-only Save Button */} + + + {/* Views Count */} + {viewsCount > 0 && ( + + {viewsCount} {viewsCount === 1 ? "view" : "views"} + + )} + + + {/* Login Alert for non-authenticated users */} + setShowLoginAlert(false)} + anchorOrigin={{ vertical: "bottom", horizontal: "center" }} + > + setShowLoginAlert(false)} + severity="info" + sx={{ + backgroundColor: Colors.black, + color: Colors.green, + border: `1px solid ${Colors.black}`, + width: "100%", + + // icon color + "& .MuiAlert-icon": { + color: Colors.rose, + }, + }} + > + Please log in to like or save datasets + + + + ); + } + + // Handle save button click - open menu instead of toggle + const handleSaveClick = (event: React.MouseEvent) => { + if (!isAuthenticated) { + handleUnauthenticatedClick(); + return; + } + setSaveMenuAnchor(event.currentTarget); + }; + + // Close menu + const handleCloseMenu = () => { + setSaveMenuAnchor(null); + setShowAlreadyInMessage(false); + }; + + // Toggle dataset in/out of collection + const handleCollectionClick = ( + collectionId: number, + isInCollection: boolean, + collectionName: string + ) => { + if (isInCollection) { + setSelectedCollectionName(collectionName); + setShowAlreadyInMessage(true); + setTimeout(() => setShowAlreadyInMessage(false), 3000); + // handleCloseMenu(); + } else { + onAddToCollection(collectionId); + handleCloseMenu(); + setShowAlreadyInMessage(false); + } + // handleCloseMenu(); + }; + + // Open create dialog + const handleCreateNew = () => { + setCreateDialogOpen(true); + handleCloseMenu(); + }; + + // Create new collection + const handleCreateSubmit = () => { + if (newCollectionName.trim()) { + onCreateCollection( + newCollectionName.trim(), + newCollectionDescription.trim() || undefined + ); + setNewCollectionName(""); + setNewCollectionDescription(""); + setCreateDialogOpen(false); + } + }; + + // Cancel create dialog + const handleCreateCancel = () => { + setNewCollectionName(""); + setNewCollectionDescription(""); + setCreateDialogOpen(false); + }; + + return ( + <> + + {/* Like Button */} + + + {/* Save Button - Now opens menu */} + + + {/* Collections Menu */} + + + Save to Collection + + + + {/* ✅ Show message inside menu */} + {showAlreadyInMessage && ( + <> + + + ✓ Already in "{selectedCollectionName}" + + + + + )} + + {collections.length === 0 ? ( + + No collections yet + + ) : ( + collections.map((collection) => ( + + handleCollectionClick( + collection.id, + collection.isInCollection, + collection.name + ) + } + sx={{ fontSize: "0.875rem" }} + > + {collection.isInCollection && ( + + + + )} + + + )) + )} + + + + + + + + + + + {/* Views Count Display */} + {viewsCount > 0 && ( + + {viewsCount} {viewsCount === 1 ? "view" : "views"} + + )} + + + {/* Create Collection Dialog */} + + + Create New Collection + + + setNewCollectionName(e.target.value)} + sx={{ + mb: 2, + mt: 1, + // focused label color + "& .MuiInputLabel-root.Mui-focused": { + color: Colors.purple, + }, + + // focused outline color + "& .MuiOutlinedInput-root.Mui-focused .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + + // optional: hover outline color + "& .MuiOutlinedInput-root:hover .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + }} + /> + setNewCollectionDescription(e.target.value)} + sx={{ + // focused label color + "& .MuiInputLabel-root.Mui-focused": { + color: Colors.purple, + }, + + // focused outline color + "& .MuiOutlinedInput-root.Mui-focused .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + + // optional: hover outline color + "& .MuiOutlinedInput-root:hover .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + }} + /> + + + + + + + + {/* Login Alert */} + {/* setShowLoginAlert(false)} + anchorOrigin={{ vertical: "bottom", horizontal: "center" }} + > + setShowLoginAlert(false)} + severity="info" + sx={{ width: "100%" }} + > + Please log in to like or save datasets + + */} + + ); +}; + +export default DatasetActions; diff --git a/src/components/Routes.tsx b/src/components/Routes.tsx index 3120159..5983701 100644 --- a/src/components/Routes.tsx +++ b/src/components/Routes.tsx @@ -1,13 +1,13 @@ import ScrollToTop from "./ScrollToTop"; import CompleteProfile from "./User/CompleteProfile"; +import CollectionDetailPage from "./User/Dashboard/CollectionDetailPage"; +import DatasetOrganizer from "./User/Dashboard/DatasetOrganizer"; import ForgotPassword from "./User/ForgotPassword"; import ResetPassword from "./User/ResetPassword"; import UserDashboard from "./User/UserDashboard"; import FullScreen from "design/Layouts/FullScreen"; import AboutPage from "pages/AboutPage"; import DatabasePage from "pages/DatabasePage"; -import DatasetDetailPage from "pages/DatasetDetailPage"; -import DatasetPage from "pages/DatasetPage"; import Home from "pages/Home"; import ResendVerification from "pages/ResendVerification"; import SearchPage from "pages/SearchPage"; @@ -61,6 +61,13 @@ const Routes = () => ( {/* Dashboard Page */} } /> + + {/* pages redirect from user dashboard */} + } + /> + } /> diff --git a/src/components/User/Dashboard/CollectionDetailPage.tsx b/src/components/User/Dashboard/CollectionDetailPage.tsx new file mode 100644 index 0000000..c2df615 --- /dev/null +++ b/src/components/User/Dashboard/CollectionDetailPage.tsx @@ -0,0 +1,371 @@ +import { + Home, + Folder, + Visibility, + Delete, + ArrowBack, +} from "@mui/icons-material"; +import { + Box, + Container, + Typography, + Paper, + CircularProgress, + Alert, + List, + ListItem, + ListItemText, + Divider, + Button, + Chip, + IconButton, + Breadcrumbs, + Link, + Dialog, + DialogTitle, + DialogContent, + DialogActions, +} from "@mui/material"; +import { Colors } from "design/theme"; +import { useAppDispatch } from "hooks/useAppDispatch"; +import { useAppSelector } from "hooks/useAppSelector"; +import React, { useEffect } from "react"; +import { useParams, useNavigate } from "react-router-dom"; +import { AuthSelector } from "redux/auth/auth.selector"; +import { + getCollection, + removeDatasetFromCollection, +} from "redux/collections/collections.action"; +import { + selectCurrentCollection, + selectCollectionsLoading, + selectCollectionsError, +} from "redux/collections/collections.selector"; + +const CollectionDetailPage: React.FC = () => { + const { collectionId } = useParams<{ collectionId: string }>(); + const { user } = useAppSelector(AuthSelector); + const navigate = useNavigate(); + const dispatch = useAppDispatch(); + + const collection = useAppSelector(selectCurrentCollection); + const loading = useAppSelector(selectCollectionsLoading); + const error = useAppSelector(selectCollectionsError); + + // ✅ Add state for delete confirmation dialog + const [deleteDialogOpen, setDeleteDialogOpen] = React.useState(false); + const [datasetToDelete, setDatasetToDelete] = React.useState<{ + id: number; + name: string; + } | null>(null); + + if (!user) { + return ( + + + Please log in to access your dashboard. + + + ); + } + + useEffect(() => { + if (collectionId) { + dispatch(getCollection({ collectionId: parseInt(collectionId) })); + } + }, [collectionId, dispatch]); + + const handleViewDataset = (dbName: string, datasetId: string) => { + navigate(`/db/${dbName}/${datasetId}`); + }; + + // Open delete confirmation + const handleDeleteClick = (datasetId: number, datasetName: string) => { + setDatasetToDelete({ id: datasetId, name: datasetName }); + setDeleteDialogOpen(true); + }; + + // Confirm delete + const handleDeleteConfirm = async () => { + if (!collectionId || !datasetToDelete) return; + + try { + await dispatch( + removeDatasetFromCollection({ + collectionId: parseInt(collectionId), + datasetId: datasetToDelete.id, + }) + ).unwrap(); + + // Refetch collection + dispatch(getCollection({ collectionId: parseInt(collectionId) })); + + // Close dialog + setDeleteDialogOpen(false); + setDatasetToDelete(null); + } catch (error) { + console.error("Error removing dataset:", error); + } + }; + + // Cancel delete + const handleDeleteCancel = () => { + setDeleteDialogOpen(false); + setDatasetToDelete(null); + }; + + const formatDate = (dateString: string) => { + return new Date(dateString).toLocaleDateString("en-US", { + year: "numeric", + month: "short", + day: "numeric", + }); + }; + + if (loading && !collection) { + return ( + + + + + + ); + } + + if (error) { + return ( + + {error} + + ); + } + + if (!collection) { + return ( + + Collection not found + + + ); + } + + const datasets = collection.datasets || []; + + return ( + + {/* Breadcrumbs */} + + navigate("/")} + sx={{ + display: "flex", + alignItems: "center", + gap: 0.5, + color: Colors.white, + textDecoration: "none", + "&:hover": { textDecoration: "underline" }, + }} + > + + Home + + navigate("/dashboard")} + sx={{ + color: Colors.white, + textDecoration: "none", + "&:hover": { textDecoration: "underline" }, + }} + > + Dashboard + + {collection.name} + + + {/* Back Button */} + + + {/* Collection Header */} + + + + + + {collection.name} + + {collection.description && ( + + {collection.description} + + )} + + Created {formatDate(collection.created_at)} • {datasets.length}{" "} + {datasets.length === 1 ? "dataset" : "datasets"} + + + + + + {/* Datasets List */} + {datasets.length === 0 ? ( + + + No datasets in this collection + + + Add datasets to this collection from any dataset page + + + ) : ( + + + {datasets.map((dataset, index) => ( + + {index > 0 && } + + + + + {dataset.ds_id} + + + + } + secondary={ + dataset.CollectionDataset?.created_at && + `Added ${formatDate( + dataset.CollectionDataset.created_at + )}` + } + /> + + + + + handleDeleteClick(dataset.id, dataset.ds_id) + } + sx={{ + color: Colors.rose, + "&:hover": { + backgroundColor: "rgba(211, 47, 47, 0.1)", + }, + }} + > + + + + + + ))} + + + )} + + {/* Delete Dataset from Collection Confirmation Dialog */} + + Remove Dataset from Collection? + + + Remove "{datasetToDelete?.name}" from this collection? + + + The dataset will not be deleted from NeuroJSON, only removed from + this collection. + + + + + + + + + ); +}; + +export default CollectionDetailPage; diff --git a/src/components/User/Dashboard/CollectionsTab.tsx b/src/components/User/Dashboard/CollectionsTab.tsx new file mode 100644 index 0000000..e041449 --- /dev/null +++ b/src/components/User/Dashboard/CollectionsTab.tsx @@ -0,0 +1,693 @@ +import { Folder, Visibility, Add, Delete, Edit } from "@mui/icons-material"; +import { + Box, + Typography, + Paper, + CircularProgress, + Alert, + List, + ListItem, + ListItemText, + Divider, + Button, + Chip, + IconButton, + Dialog, + DialogTitle, + DialogContent, + DialogActions, + TextField, + Checkbox, + FormControlLabel, +} from "@mui/material"; +import { Colors } from "design/theme"; +import { useAppDispatch } from "hooks/useAppDispatch"; +import { useAppSelector } from "hooks/useAppSelector"; +import React, { useEffect, useState } from "react"; +import { useNavigate } from "react-router-dom"; +import { + getUserCollections, + createCollection, + deleteCollection, + updateCollection, +} from "redux/collections/collections.action"; +import { + selectUserCollections, + selectCollectionsLoading, + selectCollectionsError, + selectIsCreatingCollection, +} from "redux/collections/collections.selector"; + +interface CollectionsTabProps { + userId: number; +} + +const CollectionsTab: React.FC = ({ userId }) => { + const dispatch = useAppDispatch(); + const navigate = useNavigate(); + + const collections = useAppSelector(selectUserCollections); + const loading = useAppSelector(selectCollectionsLoading); + const error = useAppSelector(selectCollectionsError); + const isCreating = useAppSelector(selectIsCreatingCollection); + + const [createDialogOpen, setCreateDialogOpen] = useState(false); + const [newCollectionName, setNewCollectionName] = useState(""); + const [newCollectionDescription, setNewCollectionDescription] = useState(""); + const [deleteDialogOpen, setDeleteDialogOpen] = useState(false); + const [collectionToDelete, setCollectionToDelete] = useState<{ + id: number; + name: string; + } | null>(null); + // edit dialog state + const [editDialogOpen, setEditDialogOpen] = useState(false); + const [editingCollection, setEditingCollection] = useState<{ + id: number; + name: string; + description: string; + is_public: boolean; + } | null>(null); + + useEffect(() => { + dispatch(getUserCollections()); + }, [dispatch]); + + const handleViewCollection = (collectionId: number) => { + navigate(`/collections/${collectionId}`); + }; + + const handleCreateOpen = () => { + setCreateDialogOpen(true); + }; + + const handleCreateClose = () => { + setNewCollectionName(""); + setNewCollectionDescription(""); + setCreateDialogOpen(false); + }; + + const handleCreateSubmit = async () => { + if (!newCollectionName.trim()) return; + + try { + await dispatch( + createCollection({ + name: newCollectionName.trim(), + description: newCollectionDescription.trim() || undefined, + }) + ).unwrap(); + + handleCreateClose(); + } catch (error) { + console.error("Error creating collection:", error); + } + }; + + const handleDeleteClick = (collectionId: number, collectionName: string) => { + setCollectionToDelete({ id: collectionId, name: collectionName }); + setDeleteDialogOpen(true); + }; + + const handleDeleteConfirm = async () => { + if (!collectionToDelete) return; + + try { + await dispatch( + deleteCollection({ collectionId: collectionToDelete.id }) + ).unwrap(); + setDeleteDialogOpen(false); + setCollectionToDelete(null); + + // Refetch collections after delete + dispatch(getUserCollections()); + } catch (error) { + console.error("Error deleting collection:", error); + } + }; + + const handleDeleteCancel = () => { + setDeleteDialogOpen(false); + setCollectionToDelete(null); + }; + + // Open edit dialog + const handleEditClick = (collection: any) => { + setEditingCollection({ + id: collection.id, + name: collection.name, + description: collection.description || "", + is_public: collection.is_public || false, + }); + setEditDialogOpen(true); + }; + + // Submit edit + const handleEditSubmit = async () => { + if (!editingCollection || !editingCollection.name.trim()) return; + + try { + await dispatch( + updateCollection({ + collectionId: editingCollection.id, + name: editingCollection.name.trim(), + description: editingCollection.description.trim() || undefined, + is_public: editingCollection.is_public, + }) + ).unwrap(); + + // Refetch collections + dispatch(getUserCollections()); + + handleEditClose(); + } catch (error) { + console.error("Error updating collection:", error); + } + }; + + // Close edit dialog + const handleEditClose = () => { + setEditDialogOpen(false); + setEditingCollection(null); + }; + + const formatDate = (dateString: string) => { + return new Date(dateString).toLocaleDateString("en-US", { + year: "numeric", + month: "short", + day: "numeric", + }); + }; + + if (loading && collections.length === 0) { + return ( + + + + ); + } + + if (error) { + return ( + + {error} + + ); + } + + return ( + + {/* Header with Create Button */} + + + + My Collections + + + Organize your datasets into collections + + + + + + {/* Empty State */} + {collections.length === 0 ? ( + + + + No Collections Yet + + + Create collections to organize your datasets + + + + ) : ( + // Collections List + + + {collections.map((collection, index) => ( + + {index > 0 && } + + + + + + {collection.name} + + + + } + secondary={ + <> + {collection.description && ( + + {collection.description} + + )} + + Created {formatDate(collection.created_at)} + + + } + /> + + + {/* Edit button */} + handleEditClick(collection)} + sx={{ + color: Colors.purple, + "&:hover": { + backgroundColor: "rgba(128, 90, 213, 0.1)", + }, + }} + > + + + {/* view button */} + + + handleDeleteClick(collection.id, collection.name) + } + sx={{ + color: Colors.rose, + "&:hover": { + backgroundColor: "rgba(211, 47, 47, 0.1)", + }, + }} + > + + + + + + ))} + + + )} + + {/* Create Collection Dialog */} + + + Create New Collection + + + setNewCollectionName(e.target.value)} + sx={{ + mb: 2, + mt: 1, + // focused label color + "& .MuiInputLabel-root.Mui-focused": { + color: Colors.purple, + }, + + // focused outline color + "& .MuiOutlinedInput-root.Mui-focused .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + + // optional: hover outline color + "& .MuiOutlinedInput-root:hover .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + }} + /> + setNewCollectionDescription(e.target.value)} + sx={{ + // focused label color + "& .MuiInputLabel-root.Mui-focused": { + color: Colors.purple, + }, + + // focused outline color + "& .MuiOutlinedInput-root.Mui-focused .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + + // optional: hover outline color + "& .MuiOutlinedInput-root:hover .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + }} + /> + + + + + + + + {/* Delete Confirmation Dialog */} + + + Delete Collection? + + + + Are you sure you want to delete "{collectionToDelete?.name}"? + + + The datasets will not be deleted, only the collection. + + + + + + + + {/* Edit Collection Dialog */} + + + Edit Collection + + + + setEditingCollection( + editingCollection + ? { ...editingCollection, name: e.target.value } + : null + ) + } + sx={{ + mb: 2, + mt: 1, + // focused label color + "& .MuiInputLabel-root.Mui-focused": { + color: Colors.purple, + }, + // focused outline color + "& .MuiOutlinedInput-root.Mui-focused .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + // optional: hover outline color + "& .MuiOutlinedInput-root:hover .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + }} + /> + + setEditingCollection( + editingCollection + ? { ...editingCollection, description: e.target.value } + : null + ) + } + sx={{ + mb: 2, + // focused label color + "& .MuiInputLabel-root.Mui-focused": { + color: Colors.purple, + }, + + // focused outline color + "& .MuiOutlinedInput-root.Mui-focused .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + + // optional: hover outline color + "& .MuiOutlinedInput-root:hover .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + }} + /> + {/* + + setEditingCollection( + editingCollection + ? { ...editingCollection, is_public: e.target.checked } + : null + ) + } + style={{ width: 18, height: 18, cursor: "pointer" }} + /> + + */} + {/* ✅ Replace HTML checkbox with Material-UI Checkbox */} + ) => + setEditingCollection( + editingCollection + ? { ...editingCollection, is_public: e.target.checked } + : null + ) + } + sx={{ + color: Colors.purple, + "&.Mui-checked": { + color: Colors.purple, + }, + }} + /> + } + label="Make this collection public" + /> + + Public collections can be viewed by others (feature coming soon) + + + + + + + + + ); +}; + +export default CollectionsTab; diff --git a/src/components/User/Dashboard/DatasetOrganizer/DropZone.tsx b/src/components/User/Dashboard/DatasetOrganizer/DropZone.tsx new file mode 100644 index 0000000..679d978 --- /dev/null +++ b/src/components/User/Dashboard/DatasetOrganizer/DropZone.tsx @@ -0,0 +1,235 @@ +// src/components/DatasetOrganizer/DropZone.tsx +import { processFile, processFolder, processZip } from "./utils/fileProcessors"; +import { CloudUpload, Add, CheckCircle } from "@mui/icons-material"; +import { + Box, + Typography, + Paper, + Button, + TextField, + CircularProgress, +} from "@mui/material"; +import { Colors } from "design/theme"; +import React, { useState, useRef } from "react"; +import { FileItem } from "redux/projects/types/projects.interface"; + +interface DropZoneProps { + files: FileItem[]; + setFiles: React.Dispatch>; + baseDirectoryPath: string; // ✅ ADD this line + // setBaseDirectoryPath: React.Dispatch>; + setBaseDirectoryPath: (path: string) => void; + selectedIds: Set; + setSelectedIds: React.Dispatch>>; + expandedIds: Set; + setExpandedIds: React.Dispatch>>; +} + +const DropZone: React.FC = ({ + files, + setFiles, + baseDirectoryPath, // ✅ ADD this line + setBaseDirectoryPath, // ✅ ADD this line + selectedIds, + setSelectedIds, + expandedIds, + setExpandedIds, +}) => { + const [isDragging, setIsDragging] = useState(false); + const [isProcessing, setIsProcessing] = useState(false); // ← add + const fileInputRef = useRef(null); + // const [basePath, setBasePath] = useState(""); // change + + const handleDragOver = (e: React.DragEvent) => { + e.preventDefault(); + setIsDragging(true); + }; + + const handleDragLeave = (e: React.DragEvent) => { + e.preventDefault(); + setIsDragging(false); + }; + + const handleDrop = async (e: React.DragEvent) => { + e.preventDefault(); + setIsDragging(false); + setIsProcessing(true); // ← add + + const items = Array.from(e.dataTransfer.items); // detect if it is a folder + const droppedFiles = Array.from(e.dataTransfer.files); // only gives file objects, can't detect folders + + // Separate folders and files + const folderEntries: any[] = []; + const fileItems: File[] = []; + + for (let i = 0; i < items.length; i++) { + const entry = (items[i] as any).webkitGetAsEntry?.(); + if (entry && entry.isDirectory) { + folderEntries.push(entry); + } else if (droppedFiles[i]) { + fileItems.push(droppedFiles[i]); + } + } + try { + // Process folders + for (const folderEntry of folderEntries) { + const folderFiles = await processFolder( + folderEntry, + null, + baseDirectoryPath + ); + setFiles((prev) => [...prev, ...folderFiles]); + } + + // Process files + for (const file of fileItems) { + if (file.name.toLowerCase().endsWith(".zip")) { + const zipFiles = await processZip(file, baseDirectoryPath); + setFiles((prev) => [...prev, ...zipFiles]); + } else { + const fileItem = await processFile(file, baseDirectoryPath); + setFiles((prev) => [...prev, fileItem]); + } + } + } finally { + setIsProcessing(false); + } + }; + + const handleFileSelect = async (e: React.ChangeEvent) => { + const selectedFiles = Array.from(e.target.files || []); + setIsProcessing(true); + + try { + for (const file of selectedFiles) { + if (file.name.toLowerCase().endsWith(".zip")) { + const zipFiles = await processZip(file, baseDirectoryPath); + setFiles((prev) => [...prev, ...zipFiles]); + } else { + const fileItem = await processFile(file, baseDirectoryPath); + setFiles((prev) => [...prev, fileItem]); + } + } + } finally { + setIsProcessing(false); + // Reset input + e.target.value = ""; + } + }; + + return ( + + {/* Show file count if files exist */} + {files.length > 0 && ( + + + Dataset Files + + + {files.length} file{files.length !== 1 ? "s" : ""} added + + + )} + + {/* Always show drop zone */} + fileInputRef.current?.click()} + sx={{ + border: `2px dashed ${isDragging ? Colors.purple : Colors.lightGray}`, + borderRadius: 2, + p: 6, + textAlign: "center", + cursor: "pointer", + transition: "all 0.2s", + backgroundColor: isDragging + ? "rgba(128, 90, 213, 0.05)" + : "transparent", + "&:hover": { + borderColor: Colors.purple, + backgroundColor: "rgba(128, 90, 213, 0.05)", + }, + }} + > + {/* 0 ? 40 : 64, // ← Smaller icon when files exist + color: Colors.purple, + mb: 1, + }} + /> */} + {isProcessing ? ( + + ) : ( + 0 ? 40 : 64, + color: Colors.purple, + mb: 1, + }} + /> + )} + + 0 ? "body1" : "h6"} gutterBottom> + {/* {files.length > 0 + ? "Drop more files here" + : "Drop your neuroimaging files here"} */} + {isProcessing + ? "Processing files..." + : files.length > 0 + ? "Drop more files here" + : "Drop your neuroimaging files here"} + + + Supports NIfTI, DICOM, SNIRF, MATLAB, Homer3, HDF5, NeuroJSON, + folders, and ZIP archives + + + {files.length === 0 && ( + <> + + 📁 Folders • 🗜️ ZIP files • 📄 Documents (.json, .txt, .md) • 📊 + Office (.docx, .pdf, .xlsx) + + + + )} + + + setBasePath(e.target.value)} //change + value={baseDirectoryPath} // ✅ CHANGE: Use prop + onChange={(e) => setBaseDirectoryPath(e.target.value)} // ✅ CHANGE: Use prop setter + fullWidth + size="small" + sx={{ mb: 2 }} + helperText="Enter the folder path where these files are located" + /> + + ); +}; + +export default DropZone; diff --git a/src/components/User/Dashboard/DatasetOrganizer/FileTree.tsx b/src/components/User/Dashboard/DatasetOrganizer/FileTree.tsx new file mode 100644 index 0000000..29c0bd5 --- /dev/null +++ b/src/components/User/Dashboard/DatasetOrganizer/FileTree.tsx @@ -0,0 +1,927 @@ +import { generateId } from "./utils/fileProcessors"; +import { + Folder, + InsertDriveFile, + ExpandMore, + ChevronRight, + Delete, + NoteAdd, + Edit, + Description, + Add, + AutoAwesome, + FolderSpecial, + Download, +} from "@mui/icons-material"; +import { + Box, + Typography, + IconButton, + Paper, + Button, + TextField, + Dialog, + DialogTitle, + DialogContent, + DialogActions, +} from "@mui/material"; +import { Colors } from "design/theme"; +import JSZip from "jszip"; +import React, { useState } from "react"; +import { FileItem } from "redux/projects/types/projects.interface"; + +interface FileTreeProps { + files: FileItem[]; + setFiles: React.Dispatch>; + selectedIds: Set; + setSelectedIds: React.Dispatch>>; + expandedIds: Set; + setExpandedIds: React.Dispatch>>; +} + +const FileTree: React.FC = ({ + files, + setFiles, + selectedIds, + setSelectedIds, + expandedIds, + setExpandedIds, +}) => { + const [noteDialogOpen, setNoteDialogOpen] = useState(false); + const [editingNoteId, setEditingNoteId] = useState(null); + const [noteText, setNoteText] = useState(""); + const [metaEditorOpen, setMetaEditorOpen] = useState(false); + const [metaType, setMetaType] = useState< + "readme" | "subject" | "instructions" | null + >(null); + const [metaFileName, setMetaFileName] = useState(""); + const [metaContent, setMetaContent] = useState(""); + + // split files into two groups + const userFiles = files.filter((f) => f.source !== "output"); + const outputFiles = files.filter((f) => f.source === "output"); + + // In FileTree.tsx + const metaConfigs = { + readme: { + label: "Add README File", + defaultFilename: "README.md", + placeholder: + "Enter dataset description, authors, license, and other important information...", + }, + subject: { + label: "Add Subject/Session Info", + defaultFilename: "participants.txt", + placeholder: + "Enter subject IDs, session info, and participant metadata...\n\nExample:\nSubject ID: sub-01\nSession: ses-01\nAge: 25\nSex: M", + }, + instructions: { + label: "Add Conversion Instructions", + defaultFilename: "CONVERSION_NOTES.md", + placeholder: + "Enter instructions for converting this dataset to BIDS format...\n\nExample:\n- Rename T1w files to sub-XX_T1w.nii.gz\n- Create JSON sidecars for each scan\n- Map task names to BIDS task labels", + }, + }; + + const handleOpenMetaEditor = ( + type: "readme" | "subject" | "instructions" + ) => { + const config = metaConfigs[type]; + setMetaType(type); + setMetaFileName(config.defaultFilename); + setMetaContent(""); + setMetaEditorOpen(true); + }; + + const handleSaveMetaFile = () => { + if (!metaFileName.trim()) { + alert("Please enter a filename"); + return; + } + + const newFile: FileItem = { + id: generateId(), + name: metaFileName.trim(), + type: "file", + parentId: null, + fileType: "meta", + content: metaContent, + contentType: "text", + sourcePath: undefined, + isUserMeta: true, + }; + + setFiles((prev) => [...prev, newFile]); + setMetaEditorOpen(false); + setMetaType(null); + setMetaFileName(""); + setMetaContent(""); + }; + + const handleToggleExpand = (id: string) => { + setExpandedIds((prev) => { + const newSet = new Set(prev); + if (newSet.has(id)) { + newSet.delete(id); + } else { + newSet.add(id); + } + return newSet; + }); + }; + + const handleToggleSelect = (id: string) => { + setSelectedIds((prev) => { + const newSet = new Set(prev); + if (newSet.has(id)) { + newSet.delete(id); + } else { + newSet.add(id); + } + return newSet; + }); + }; + + const handleSelectAll = () => { + const allIds = files.map((f) => f.id); + setSelectedIds(new Set(allIds)); + }; + + const handleDeselectAll = () => { + setSelectedIds(new Set()); + }; + + const handleDeleteSelected = () => { + if (selectedIds.size === 0) return; + if (!window.confirm(`Delete ${selectedIds.size} selected item(s)?`)) return; + + // Collect all descendants + const toDelete = new Set(selectedIds); + const collectDescendants = (parentId: string) => { + files.forEach((file) => { + if (file.parentId === parentId) { + toDelete.add(file.id); + collectDescendants(file.id); + } + }); + }; + + selectedIds.forEach((id) => collectDescendants(id)); + + // Remove files + setFiles((prev) => prev.filter((f) => !toDelete.has(f.id))); + setSelectedIds(new Set()); + }; + + const handleDownloadOutputFolder = async ( + folderId: string, + folderName: string + ) => { + const zip = new JSZip(); + + // Recursive function to add files to zip + const addToZip = ( + parentId: string, + zipFolder: any, + currentPath: string + ) => { + const children = files.filter((f) => f.parentId === parentId); + children.forEach((child) => { + if (child.type === "folder") { + const subFolder = zipFolder.folder(child.name); + addToZip(child.id, subFolder, `${currentPath}/${child.name}`); + } else { + if (child.content) { + zipFolder.file(child.name, child.content); + } + } + }); + }; + + addToZip(folderId, zip, folderName); + + const blob = await zip.generateAsync({ type: "blob" }); + const url = URL.createObjectURL(blob); + const a = document.createElement("a"); + a.href = url; + a.download = `${folderName}.zip`; + a.click(); + URL.revokeObjectURL(url); + }; + + const handleAddNote = (id: string) => { + const file = files.find((f) => f.id === id); + setEditingNoteId(id); + setNoteText(file?.note || ""); + setNoteDialogOpen(true); + }; + + const handleSaveNote = () => { + if (!editingNoteId) return; + + setFiles((prev) => + prev.map((f) => (f.id === editingNoteId ? { ...f, note: noteText } : f)) + ); + + setNoteDialogOpen(false); + setEditingNoteId(null); + setNoteText(""); + }; + + const renderFileIcon = (file: FileItem) => { + if (file.source === "output") { + if (file.type === "folder") { + return ; + } + return ; + } + // AI generated files — use AutoAwesome icon with purple color + if (file.source === "ai") { + return ( + <> + {file.source === "ai" && ( + + + + AI + + + )} + + ); + } + if (file.type === "folder" || file.type === "zip") { + return ; + } + + // Color based on file type + const colorMap: Record = { + text: "#22c55e", + nifti: "#f472b6", + hdf5: "#fb923c", + neurojsonText: Colors.purple, + neurojsonBinary: Colors.secondaryPurple, + office: "#38bdf8", + meta: Colors.yellow, + matlab: Colors.black, + dicom: "#34d399", + nirs: Colors.darkOrange, // show homer3 in footer legend + array: "#9ca3af", + }; + + const color = colorMap[file.fileType || "other"] || "#9ca3af"; + return ; + }; + + // one item in the tree + const renderTreeItem = ( + file: FileItem, + depth: number = 0, + filePool: FileItem[] = files + ) => { + // const children = files.filter((f) => f.parentId === file.id); // origin + const children = filePool.filter((f) => f.parentId === file.id); + const hasChildren = children.length > 0; + + // Check if file has content or children to show expand button + const hasContent = + file.content !== undefined && + file.content !== null && + file.content !== ""; + const canExpand = hasChildren || hasContent; + + const isExpanded = expandedIds.has(file.id); + const isSelected = selectedIds.has(file.id); + + return ( + + {/* File Row */} + handleToggleSelect(file.id)} + > + {/* Expand/Collapse Icon */} + {canExpand ? ( + { + e.stopPropagation(); + handleToggleExpand(file.id); + }} + sx={{ p: 0.25 }} + > + {isExpanded ? ( + + ) : ( + + )} + + ) : ( + + )} + + {/* File Icon */} + {renderFileIcon(file)} + + {/* File Name */} + + {file.name} + + + {/* Download button for output root folders */} + {file.source === "output" && + file.parentId === null && + file.type === "folder" && ( + { + e.stopPropagation(); + handleDownloadOutputFolder(file.id, file.name); + }} + sx={{ p: 0.25, color: Colors.purple }} + title="Download as ZIP" + > + + + )} + + {/* Add timestamp for AI files */} + {file.source === "ai" && file.generatedAt && ( + + {file.generatedAt} + + )} + + {/* Note Icon */} + { + e.stopPropagation(); + handleAddNote(file.id); + }} + sx={{ + p: 0.25, + color: file.note ? Colors.darkGreen : "text.secondary", + }} + title={file.note ? "Edit note" : "Add note"} + > + {file.note ? ( + + ) : ( + + )} + + + + {/* Show Note Preview */} + {file.note && isExpanded && ( + + + Note: {file.note} + + + )} + + {/* Show Content Preview */} + {hasContent && isExpanded && ( + + {file.content} + + )} + + {/* Children */} + {hasChildren && isExpanded && ( + + {children.map((child) => + renderTreeItem(child, depth + 1, filePool) + )} + // add filePool + )} + + ); + }; + + const rootFiles = files.filter((f) => f.parentId === null); + + if (files.length === 0) { + return ( + + + No files yet. Drop files to get started. + + + ); + } + + return ( + <> + + {/* Header */} + + + + Virtual File System + + + {files.length} item{files.length !== 1 ? "s" : ""} + + + + + + + + + + {/* Select All / Deselect All */} + + + + {selectedIds.size > 0 && ( + + )} + + + + {/* File Tree */} + {/* + {rootFiles.map((file) => renderTreeItem(file))} + */} + + + {userFiles + .filter((f) => f.parentId === null) + .map((f) => renderTreeItem(f, 0, userFiles))} + + {outputFiles.length > 0 && ( + <> + + + + Saved Outputs + + + {outputFiles + .filter((f) => f.parentId === null) + .map((f) => renderTreeItem(f, 0, outputFiles))} + + )} + + + {/* Footer Legend */} + + + + + Text + + + + NIfTI + + + + HDF5 + + + + NeuroJSON + + + + Office + + + + User Meta + + + + DICOM + + + + MATLAB + + + + Homer3 + + + + Array + + + + + + AI + + + AI Generated + + + + + + {/* Note Editor Dialog */} + setNoteDialogOpen(false)} + maxWidth="sm" + fullWidth + > + + {editingNoteId && files.find((f) => f.id === editingNoteId) + ? `Note for: ${files.find((f) => f.id === editingNoteId)?.name}` + : "Add Note"} + + + setNoteText(e.target.value)} + sx={{ + mt: 1, + "& .MuiInputLabel-root.Mui-focused": { + color: Colors.purple, + }, + "& .MuiOutlinedInput-root.Mui-focused .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + }} + /> + + + + + + + + {/* Meta File Editor Dialog */} + setMetaEditorOpen(false)} + maxWidth="sm" + fullWidth + > + + {metaType && metaConfigs[metaType].label} + + + setMetaFileName(e.target.value)} + sx={{ + mb: 2, + mt: 1, + "& .MuiInputLabel-root.Mui-focused": { color: Colors.purple }, + "& .MuiOutlinedInput-root.Mui-focused .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + }} + /> + setMetaContent(e.target.value)} + sx={{ + "& .MuiInputLabel-root.Mui-focused": { color: Colors.purple }, + "& .MuiOutlinedInput-root.Mui-focused .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + }} + /> + + + + + + + + ); +}; + +export default FileTree; diff --git a/src/components/User/Dashboard/DatasetOrganizer/LLMPanel.tsx b/src/components/User/Dashboard/DatasetOrganizer/LLMPanel.tsx new file mode 100644 index 0000000..d9d6366 --- /dev/null +++ b/src/components/User/Dashboard/DatasetOrganizer/LLMPanel.tsx @@ -0,0 +1,1642 @@ +import { generateId } from "./utils/fileProcessors"; +import { extractSubjectAnalysis } from "./utils/filenameTokenizer"; +//add +import { + buildFileSummary, + analyzeFilePatterns, + getUserContext, + getFileAnnotations, + downloadJSON, + buildEvidenceBundle, + extractSubjectsFromFiles, + buildIngestInfo, +} from "./utils/llmHelpers"; +import { + getDatasetDescriptionPrompt, + getReadmePrompt, + getParticipantsPrompt, + getConversionScriptPrompt, + getBIDSPlanPrompt, +} from "./utils/llmPrompts"; +import { + Close, + ContentCopy, + Download, + AutoAwesome, + DriveFileMove, +} from "@mui/icons-material"; +import { + Box, + Paper, + Typography, + Button, + TextField, + Select, + MenuItem, + FormControl, + InputLabel, + CircularProgress, + IconButton, + Alert, +} from "@mui/material"; +import { Colors } from "design/theme"; +import JSZip from "jszip"; +import React, { useState, useEffect } from "react"; +import { FileItem } from "redux/projects/types/projects.interface"; +import { OllamaService } from "services/ollama.service"; + +interface LLMPanelProps { + files: FileItem[]; + baseDirectoryPath: string; + setBaseDirectoryPath: (path: string) => void; + evidenceBundle: any; // ✅ Add + setEvidenceBundle: (bundle: any) => void; // ✅ Add + trioGenerated: boolean; // ✅ Add + setTrioGenerated: (value: boolean) => void; // ✅ Add + updateFiles: (updater: React.SetStateAction) => void; // ✅ Add + onClose: () => void; +} + +interface LLMProvider { + name: string; + baseUrl: string; + models: Array<{ id: string; name: string }>; + noApiKey?: boolean; + isAnthropic?: boolean; + // customUrl?: boolean; +} + +const llmProviders: Record = { + ollama: { + name: "Ollama (Local Server)", + // baseUrl: "http://localhost:11434/v1/chat/completions", + baseUrl: "", + models: [ + { id: "qwen3-coder-next:latest", name: "Qwen 3 Coder Next" }, + { id: "qwen3-coder-careful:latest", name: "Qwen 3 Coder Careful" }, + { id: "qwen3.5:9b", name: "Qwen 3.5 9B" }, + { id: "qwen2.5-coder:latest", name: "Qwen 2.5 Coder (7.6B)" }, + { id: "qwen2.5-coder:7b", name: "Qwen 2.5 Coder 7B" }, + ], + noApiKey: true, + // customUrl: true, + }, + groq: { + name: "Groq (Free API Key - 14,400 req/day)", + baseUrl: "https://api.groq.com/openai/v1/chat/completions", + models: [ + { id: "llama-3.3-70b-versatile", name: "Llama 3.3 70B" }, + { id: "llama-3.1-8b-instant", name: "Llama 3.1 8B (Fast)" }, + { id: "mixtral-8x7b-32768", name: "Mixtral 8x7B" }, + ], + }, + openrouter: { + name: "OpenRouter (Free models available)", + baseUrl: "https://openrouter.ai/api/v1/chat/completions", + models: [ + { + id: "meta-llama/llama-3.1-8b-instruct:free", + name: "Llama 3.1 8B (Free)", + }, + { id: "google/gemma-2-9b-it:free", name: "Gemma 2 9B (Free)" }, + { id: "mistralai/mistral-7b-instruct:free", name: "Mistral 7B (Free)" }, + ], + }, + anthropic: { + name: "Anthropic Claude (Paid)", + baseUrl: "https://api.anthropic.com/v1/messages", + models: [ + { id: "claude-sonnet-4-20250514", name: "Claude Sonnet 4" }, + { id: "claude-3-5-haiku-20241022", name: "Claude 3.5 Haiku" }, + ], + isAnthropic: true, + }, + openai: { + name: "OpenAI (Paid)", + baseUrl: "https://api.openai.com/v1/chat/completions", + models: [ + { id: "gpt-4o-mini", name: "GPT-4o Mini" }, + { id: "gpt-4o", name: "GPT-4o" }, + ], + }, +}; + +const LLMPanel: React.FC = ({ + files, + baseDirectoryPath, + setBaseDirectoryPath, + evidenceBundle, // ✅ Add + setEvidenceBundle, // ✅ Add + trioGenerated, // ✅ Add + setTrioGenerated, // ✅ Add + updateFiles, // ✅ Add + onClose, +}) => { + const [provider, setProvider] = useState("ollama"); + const [model, setModel] = useState("qwen3-coder-next:latest"); + // const [ollamaUrl, setOllamaUrl] = useState( + // "http://jin.neu.edu:11434" + // ); + const [apiKey, setApiKey] = useState(""); + const [generatedScript, setGeneratedScript] = useState(""); + const [bidsPlan, setBidsPlan] = useState(""); // add bids plan + const [loading, setLoading] = useState(false); // add loading spin to generate script button + const [error, setError] = useState(null); + const [status, setStatus] = useState(""); + const [generatingEvidence, setGeneratingEvidence] = useState(false); // Add loading spin to evidence button + const [generatingTrio, setGeneratingTrio] = useState(false); // Add loading spin to trio button + const [abortController, setAbortController] = + useState(null); + + const [subjectAnalysis, setSubjectAnalysis] = useState(null); + const [nSubjects, setNSubjects] = useState(""); + const [modalityHint, setModalityHint] = useState("mri"); + const [describeText, setDescribeText] = useState(""); + const [nSubjectsError, setNSubjectsError] = useState(false); + const [modalityError, setModalityError] = useState(false); + + const [panelHeight, setPanelHeight] = useState(450); + const [isResizing, setIsResizing] = useState(false); + + // ======================================================================== + // BUTTON 1: GENERATE EVIDENCE BUNDLE + // ======================================================================== + const handleGenerateEvidence = () => { + const hasNSubjectsError = !nSubjects || parseInt(nSubjects) < 1; + const hasModalityError = !modalityHint; + setNSubjectsError(hasNSubjectsError); + setModalityError(hasModalityError); + if (hasNSubjectsError || hasModalityError) return; + + if (!baseDirectoryPath.trim()) { + setError("Please enter a base directory path first"); + return; + } + + setGeneratingEvidence(true); + setError(null); + setStatus("Building evidence bundle..."); + try { + const bundle = buildEvidenceBundle(files, baseDirectoryPath, { + nSubjects: nSubjects ? parseInt(nSubjects) : null, + modalityHint, + describeText, + }); + + setEvidenceBundle(bundle); + downloadJSON(bundle, "evidence_bundle.json"); + setStatus("✓ Evidence bundle generated and downloaded!"); + } catch (err: any) { + setError("Failed to generate evidence bundle"); + } finally { + setGeneratingEvidence(false); + } + }; + + // ======================================================================== + // BUTTON 2: Generate BIDS Trio with LLM calls + // ======================================================================== + const handleGenerateTrio = async () => { + if (!evidenceBundle) { + setError("Please generate evidence bundle first"); + return; + } + + if (!currentProvider.noApiKey && !apiKey.trim()) { + setError("Please enter an API key"); + return; + } + + // Create abort controller + const controller = new AbortController(); + setAbortController(controller); + + setGeneratingTrio(true); + setError(null); + setStatus("Generating BIDS trio files..."); + + try { + const userText = evidenceBundle.user_hints.user_text || ""; + + // ========================================== + // Call 1: Generate dataset_description.json + // ========================================== + let datasetDesc: any; + if (evidenceBundle.trio_found?.["dataset_description.json"]) { + setStatus("1/3 dataset_description.json already exists, skipping..."); + const existing = files.find( + (f) => f.source === "user" && f.name === "dataset_description.json" + ); + datasetDesc = existing?.content ? JSON.parse(existing.content) : {}; + } else { + setStatus("1/3 Generating dataset_description.json..."); + const ddPrompt = getDatasetDescriptionPrompt(userText, evidenceBundle); + + let ddResponse; + if (currentProvider.isAnthropic) { + ddResponse = await fetch(currentProvider.baseUrl, { + method: "POST", + signal: controller.signal, + headers: { + "Content-Type": "application/json", + "x-api-key": apiKey, + "anthropic-version": "2023-06-01", + }, + body: JSON.stringify({ + model, + max_tokens: 2048, + messages: [{ role: "user", content: ddPrompt }], + }), + }); + } else if (provider === "ollama") { + // const ollamaBaseUrl = ollamaUrl || "http://localhost:11434"; + // ddResponse = await fetch(`${ollamaBaseUrl}/v1/chat/completions`, { + // method: "POST", + // signal: controller.signal, + // headers: { "Content-Type": "application/json" }, + // body: JSON.stringify({ + // model, + // messages: [{ role: "user", content: ddPrompt }], + // stream: false, + // }), + // }); + ddResponse = await OllamaService.chat(model, [ + { role: "user", content: ddPrompt }, + ]); + } else { + ddResponse = await fetch(currentProvider.baseUrl, { + method: "POST", + signal: controller.signal, + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${apiKey}`, + }, + body: JSON.stringify({ + model, + messages: [{ role: "user", content: ddPrompt }], + max_tokens: 2048, + }), + }); + } + + // const ddData = await ddResponse.json(); + const ddData = + provider === "ollama" ? ddResponse : await ddResponse.json(); + let ddText = currentProvider.isAnthropic + ? ddData.content[0].text + : ddData.choices[0].message.content; + + // Clean up markdown fences + ddText = ddText + .replace(/^```json\n?/g, "") + .replace(/\n?```$/g, "") + .trim(); + datasetDesc = JSON.parse(ddText); + } + + // ========================================== + // Call 2: Generate README.md + // ========================================== + let readmeContent: string; + if (evidenceBundle.trio_found?.["README.md"]) { + setStatus("2/3 README.md already exists, skipping..."); + const existing = files.find( + (f) => + f.source === "user" && + ["README.md", "README.txt", "README.rst", "readme.md"].includes( + f.name + ) + ); + readmeContent = existing?.content || ""; + } else { + setStatus("2/3 Generating README.md..."); + const readmePrompt = getReadmePrompt(userText); + + let readmeResponse; + if (currentProvider.isAnthropic) { + readmeResponse = await fetch(currentProvider.baseUrl, { + method: "POST", + signal: controller.signal, + headers: { + "Content-Type": "application/json", + "x-api-key": apiKey, + "anthropic-version": "2023-06-01", + }, + body: JSON.stringify({ + model, + max_tokens: 2048, + messages: [{ role: "user", content: readmePrompt }], + }), + }); + } else if (provider === "ollama") { + // const ollamaBaseUrl = ollamaUrl || "http://localhost:11434"; + // readmeResponse = await fetch(`${ollamaBaseUrl}/v1/chat/completions`, { + // method: "POST", + // signal: controller.signal, + // headers: { "Content-Type": "application/json" }, + // body: JSON.stringify({ + // model, + // messages: [{ role: "user", content: readmePrompt }], + // stream: false, + // }), + // }); + readmeResponse = await OllamaService.chat(model, [ + { role: "user", content: readmePrompt }, + ]); + } else { + readmeResponse = await fetch(currentProvider.baseUrl, { + method: "POST", + signal: controller.signal, + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${apiKey}`, + }, + body: JSON.stringify({ + model, + messages: [{ role: "user", content: readmePrompt }], + max_tokens: 2048, + }), + }); + } + + // const readmeData = await readmeResponse.json(); + const readmeData = + provider === "ollama" ? readmeResponse : await readmeResponse.json(); + readmeContent = currentProvider.isAnthropic + ? readmeData.content[0].text + : readmeData.choices[0].message.content; + } + // ========================================== + // Call 3: Generate participants.tsv + // ========================================== + let participantsContent: string; + if (evidenceBundle.trio_found?.["participants.tsv"]) { + setStatus("3/3 participants.tsv already exists, skipping..."); + const existing = files.find( + (f) => f.source === "user" && f.name === "participants.tsv" + ); + participantsContent = existing?.content || ""; + } else { + setStatus("3/3 Generating participants.tsv..."); + const partsPrompt = getParticipantsPrompt(userText); + + let partsResponse; + if (currentProvider.isAnthropic) { + partsResponse = await fetch(currentProvider.baseUrl, { + method: "POST", + signal: controller.signal, + headers: { + "Content-Type": "application/json", + "x-api-key": apiKey, + "anthropic-version": "2023-06-01", + }, + body: JSON.stringify({ + model, + max_tokens: 1024, + messages: [{ role: "user", content: partsPrompt }], + }), + }); + } else if (provider === "ollama") { + // const ollamaBaseUrl = ollamaUrl || "http://localhost:11434"; + // partsResponse = await fetch(`${ollamaBaseUrl}/v1/chat/completions`, { + // method: "POST", + // signal: controller.signal, + // headers: { "Content-Type": "application/json" }, + // body: JSON.stringify({ + // model, + // messages: [{ role: "user", content: partsPrompt }], + // stream: false, + // }), + // }); + partsResponse = await OllamaService.chat(model, [ + { role: "user", content: partsPrompt }, + ]); + } else { + partsResponse = await fetch(currentProvider.baseUrl, { + method: "POST", + signal: controller.signal, + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${apiKey}`, + }, + body: JSON.stringify({ + model, + messages: [{ role: "user", content: partsPrompt }], + max_tokens: 1024, + }), + }); + } + + // const partsData = await partsResponse.json(); + const partsData = + provider === "ollama" ? partsResponse : await partsResponse.json(); + const participantsRaw = currentProvider.isAnthropic + ? partsData.content[0].text + : partsData.choices[0].message.content; + + // Build TSV from schema + try { + const schemaText = participantsRaw + .replace(/^```json\n?/g, "") + .replace(/\n?```$/g, "") + .trim(); + const schema = JSON.parse(schemaText); + const columns: string[] = schema.columns.map((c: any) => c.name); + + // Get subject IDs from evidence bundle (extracted by Python-style analysis) + // const idMapping = + // evidenceBundle?.subject_analysis?.id_mapping?.id_mapping; + // const subjectLabels: string[] = idMapping + // ? Object.values(idMapping).map((id) => `sub-${id}`) + // : ["sub-01"]; // fallback if no subject analysis + // Get subject IDs from subjectAnalysis state (computed at plan stage) + // Fall back to computing fresh if plan hasn't been run yet + const currentSubjectAnalysis = + subjectAnalysis || + extractSubjectAnalysis( + evidenceBundle?.all_files || [], + evidenceBundle?.user_hints?.n_subjects, + evidenceBundle?.filename_analysis?.python_statistics + ?.dominant_prefixes + ); + const idMap = currentSubjectAnalysis?.id_mapping?.id_mapping; + const subjectLabels: string[] = + idMap && Object.keys(idMap).length > 0 + ? Object.values(idMap).map((id) => `sub-${id}`) + : Array.from( + { length: evidenceBundle?.user_hints?.n_subjects || 1 }, + (_, i) => `sub-${String(i + 1).padStart(2, "0")}` + ); + + const header = columns.join("\t"); + const rows = subjectLabels.map((subId) => + columns + .map((col: string) => (col === "participant_id" ? subId : "n/a")) + .join("\t") + ); + participantsContent = [header, ...rows].join("\n"); + } catch (e) { + // Fallback: LLM didn't return valid JSON schema, use raw content + participantsContent = participantsRaw + .replace(/^```\n?/g, "") + .replace(/\n?```$/g, "") + .trim(); + } + } + // ========================================== + // Add trio files to Virtual File System + // ========================================== + const timestamp = new Date().toLocaleString(); + const trioFiles: FileItem[] = [ + { + id: generateId(), + name: "dataset_description.json", + type: "file", + fileType: "meta", + content: JSON.stringify(datasetDesc, null, 2), + contentType: "text", + isUserMeta: true, + parentId: null, + source: "ai", + generatedAt: timestamp, + }, + { + id: generateId(), + name: "README.md", + type: "file", + fileType: "meta", + content: readmeContent + .replace(/^```markdown\n?/g, "") + .replace(/\n?```$/g, "") + .trim(), + contentType: "text", + isUserMeta: true, + parentId: null, + source: "ai", + generatedAt: timestamp, + }, + { + id: generateId(), + name: "participants.tsv", + type: "file", + fileType: "meta", + content: participantsContent + .replace(/^```\n?/g, "") + .replace(/\n?```$/g, "") + .trim(), + contentType: "text", + isUserMeta: true, + parentId: null, + source: "ai", + generatedAt: timestamp, + }, + ]; + // replace existing trio files, add if not exist + updateFiles((prev) => { + const trioNames = [ + "dataset_description.json", + "README.md", + "participants.tsv", + ]; + + // Remove old AI generated trio files + const withoutOldTrio = prev.filter( + (f) => !(f.source === "ai" && trioNames.includes(f.name)) + ); + + // Add new trio files + // return [...withoutOldTrio, ...trioFiles]; + + // Only add AI-generated files for ones that weren't user-uploaded + const newTrioFiles = trioFiles.filter( + (tf) => + !evidenceBundle.trio_found?.[ + tf.name as keyof typeof evidenceBundle.trio_found + ] + ); + + return [...withoutOldTrio, ...newTrioFiles]; + }); + setTrioGenerated(true); + setStatus( + "✓ BIDS trio files generated and added to Virtual File System!" + ); + } catch (err: any) { + if (err.name === "AbortError") { + setStatus("❌ Generation cancelled"); + } else { + setError(err.message || "Failed to generate trio files"); + setStatus("❌ Error generating trio files"); + } + } finally { + setGeneratingTrio(false); + setAbortController(null); // Clear controller + } + }; + + const handleMouseDown = (e: React.MouseEvent) => { + setIsResizing(true); + e.preventDefault(); + }; + + const handleMouseMove = (e: MouseEvent) => { + if (!isResizing) return; + + const newHeight = window.innerHeight - e.clientY; + if (newHeight >= 100 && newHeight <= window.innerHeight - 100) { + setPanelHeight(newHeight); + } + }; + + const handleMouseUp = () => { + setIsResizing(false); + }; + + // Add event listeners + useEffect(() => { + if (isResizing) { + document.addEventListener("mousemove", handleMouseMove); + document.addEventListener("mouseup", handleMouseUp); + document.body.style.cursor = "ns-resize"; + + return () => { + document.removeEventListener("mousemove", handleMouseMove); + document.removeEventListener("mouseup", handleMouseUp); + document.body.style.cursor = ""; + }; + } + }, [isResizing]); + + const currentProvider = llmProviders[provider]; + + const handleGenerate = async () => { + if (!currentProvider.noApiKey && !apiKey.trim()) { + setError("Please enter an API key"); + return; + } + + if (!baseDirectoryPath.trim()) { + setError("Please enter a base directory path"); + return; + } + + // Create abort controller + const controller = new AbortController(); + setAbortController(controller); + + setLoading(true); + setError(null); + setStatus(`Generating script using ${currentProvider.name}...`); + + const fileSummary = buildFileSummary(files); + const filePatterns = analyzeFilePatterns(files); + const userContext = getUserContext(files); + const annotations = getFileAnnotations(files); + // console.log("=== PROMPT BEING SENT TO LLM ==="); + // console.log(fileSummary); + // console.log(filePatterns); + // console.log(userContext); + // console.log("================================="); + + // UPDATED: Improved prompt that uses trio files + const prompt = getConversionScriptPrompt( + baseDirectoryPath, + fileSummary, + filePatterns, + userContext, + annotations + ); + + try { + let response; + + if (provider === "ollama") { + // const ollamaBaseUrl = ollamaUrl || "http://localhost:11434"; + // response = await fetch(`${ollamaBaseUrl}/v1/chat/completions`, { + // method: "POST", + // signal: controller.signal, + // headers: { + // "Content-Type": "application/json", + // }, + // body: JSON.stringify({ + // model, + // messages: [ + // { + // role: "system", + // content: + // "You are a neuroimaging data expert specializing in BIDS format conversion. Output only Python code without markdown fences or explanations.", + // }, + // { role: "user", content: prompt }, + // ], + // stream: false, + // }), + // }); + response = await OllamaService.chat(model, [ + { + role: "system", + content: + "You are a neuroimaging data expert specializing in BIDS format conversion. Output only Python code without markdown fences or explanations.", + }, + { role: "user", content: prompt }, + ]); + } else if (currentProvider.isAnthropic) { + response = await fetch(currentProvider.baseUrl, { + method: "POST", + signal: controller.signal, + headers: { + "Content-Type": "application/json", + "x-api-key": apiKey, + "anthropic-version": "2023-06-01", + }, + body: JSON.stringify({ + model, + max_tokens: 4096, + messages: [{ role: "user", content: prompt }], + }), + }); + } else { + const headers: Record = { + "Content-Type": "application/json", + }; + + if (!currentProvider.noApiKey) { + headers["Authorization"] = `Bearer ${apiKey}`; + } + + response = await fetch(currentProvider.baseUrl, { + method: "POST", + signal: controller.signal, + headers, + body: JSON.stringify({ + model, + messages: [ + { + role: "system", + content: + "You are a neuroimaging data expert specializing in BIDS format conversion. Output only Python code without markdown fences or explanations.", + }, + { role: "user", content: prompt }, + ], + max_tokens: 4096, + temperature: 0.7, + }), + }); + } + + // const data = await response.json(); + const data = provider === "ollama" ? response : await response.json(); + + // if (!response.ok) { + // throw new Error(data.error?.message || "Failed to generate script"); + // } + if (!response.ok && provider !== "ollama") { + throw new Error(data.error?.message || "Failed to generate script"); + } + + // let script = ""; + // if (currentProvider.isAnthropic) { + // script = data.content[0].text; + // } else { + // script = data.choices[0].message.content; + // } + let script = currentProvider.isAnthropic + ? data.content[0].text + : data.choices[0].message.content; + + // Clean up markdown fences if AI included them anyway + script = script.replace(/^```python\n?/g, "").replace(/\n?```$/g, ""); + + setGeneratedScript(script); + setStatus(`✓ Script generated using ${currentProvider.name}`); + } catch (err: any) { + if (err.name === "AbortError") { + setStatus("❌ Generation cancelled"); + } else { + setError(err.message || "Failed to generate script"); + setStatus("❌ Error generating script"); + } + } finally { + setLoading(false); + setAbortController(null); // Clear controller + } + }; + + const handleGeneratePlan = async () => { + if (!currentProvider.noApiKey && !apiKey.trim()) { + setError("Please enter an API key"); + return; + } + if (!baseDirectoryPath.trim()) { + setError("Please enter a base directory path"); + return; + } + + const controller = new AbortController(); + setAbortController(controller); + setLoading(true); + setError(null); + setStatus(`Generating BIDSPlan.yaml using ${currentProvider.name}...`); + + // ── Compute subject analysis (mirrors planner.py Step 1) + const allFiles = evidenceBundle?.all_files || []; + const userNSubjects = evidenceBundle?.user_hints?.n_subjects; + const dominantPrefixes = + evidenceBundle?.filename_analysis?.python_statistics?.dominant_prefixes; + + const computedSubjectAnalysis = extractSubjectAnalysis( + allFiles, + userNSubjects, + dominantPrefixes + ); + setSubjectAnalysis(computedSubjectAnalysis); + + const fileSummary = buildFileSummary(files); + const filePatterns = analyzeFilePatterns(files); + const userContext = getUserContext(files); + // const subjectInfo = extractSubjectsFromFiles(files); + const subjectInfo = computedSubjectAnalysis; + const sampleFiles = + evidenceBundle?.samples + ?.slice(0, 10) + .map((s: any) => ` - ${s.relpath}`) + .join("\n") || ""; + + // console.log("=== SAMPLE FILES ==="); + // console.log(sampleFiles); + // console.log("=== COUNTS BY EXT ==="); + // console.log(evidenceBundle?.counts_by_ext); + + const prompt = getBIDSPlanPrompt( + fileSummary, + filePatterns, + userContext, + { + subjects: Object.entries( + computedSubjectAnalysis.id_mapping.id_mapping + ).map(([originalId, bidsId]) => ({ originalId, bidsId })), + strategy: computedSubjectAnalysis.id_mapping.strategy_used, + }, + evidenceBundle?.counts_by_ext || {}, + sampleFiles, + evidenceBundle + ); + + try { + let response; + + if (provider === "ollama") { + // const ollamaBaseUrl = ollamaUrl || "http://localhost:11434"; + // response = await fetch(`${ollamaBaseUrl}/v1/chat/completions`, { + // method: "POST", + // signal: controller.signal, + // headers: { "Content-Type": "application/json" }, + // body: JSON.stringify({ + // model, + // messages: [ + // { + // role: "system", + // content: + // "You are a BIDS dataset architect. Output only valid YAML without markdown fences or explanations.", + // }, + // { role: "user", content: prompt }, + // ], + // stream: false, + // }), + // }); + response = await OllamaService.chat(model, [ + { + role: "system", + content: + "You are a BIDS dataset architect. Output only valid YAML without markdown fences or explanations.", + }, + { role: "user", content: prompt }, + ]); + } else if (currentProvider.isAnthropic) { + response = await fetch(currentProvider.baseUrl, { + method: "POST", + signal: controller.signal, + headers: { + "Content-Type": "application/json", + "x-api-key": apiKey, + "anthropic-version": "2023-06-01", + }, + body: JSON.stringify({ + model, + max_tokens: 2048, + messages: [{ role: "user", content: prompt }], + }), + }); + } else { + response = await fetch(currentProvider.baseUrl, { + method: "POST", + signal: controller.signal, + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${apiKey}`, + }, + body: JSON.stringify({ + model, + messages: [ + { + role: "system", + content: + "You are a BIDS dataset architect. Output only valid YAML without markdown fences or explanations.", + }, + { role: "user", content: prompt }, + ], + max_tokens: 2048, + temperature: 0.15, + }), + }); + } + + // const data = await response.json(); + + // if (!response.ok) { + // throw new Error(data.error?.message || "Failed to generate BIDSPlan"); + // } + const data = provider === "ollama" ? response : await response.json(); + if (!response.ok && provider !== "ollama") { + throw new Error(data.error?.message || "Failed to generate BIDSPlan"); + } + + let plan = currentProvider.isAnthropic + ? data.content[0].text + : data.choices[0].message.content; + + // Clean up markdown fences if present + plan = plan + .replace(/^```yaml\n?/g, "") + .replace(/\n?```$/g, "") + .trim(); + + setBidsPlan(plan); + setStatus(`✓ BIDSPlan.yaml generated using ${currentProvider.name}`); + } catch (err: any) { + if (err.name === "AbortError") { + setStatus("❌ Generation cancelled"); + } else { + setError(err.message || "Failed to generate BIDSPlan"); + setStatus("❌ Error generating BIDSPlan"); + } + } finally { + setLoading(false); + setAbortController(null); + } + }; + + const handleDownloadPlan = () => { + const blob = new Blob([bidsPlan], { type: "text/yaml" }); + const url = URL.createObjectURL(blob); + const a = document.createElement("a"); + a.href = url; + a.download = "BIDSPlan.yaml"; + a.click(); + URL.revokeObjectURL(url); + }; + + const handleCancel = () => { + if (abortController) { + abortController.abort(); + setStatus("Cancelling..."); + } + }; + + const handleCopy = () => { + navigator.clipboard.writeText(generatedScript); + setStatus("✓ Copied to clipboard!"); + setTimeout(() => setStatus(""), 2000); + }; + + const handleDownload = () => { + const blob = new Blob([generatedScript], { type: "text/plain" }); + const url = URL.createObjectURL(blob); + const a = document.createElement("a"); + a.href = url; + a.download = "bids_conversion_script.py"; + a.click(); + URL.revokeObjectURL(url); + }; + + const handleDownloadPackage = async () => { + const zip = new JSZip(); + // const outputDir = "outputs"; + + // _staging/ files + const ingestInfo = buildIngestInfo(baseDirectoryPath); + zip.file("_staging/ingest_info.json", JSON.stringify(ingestInfo, null, 2)); + zip.file("_staging/BIDSPlan.yaml", bidsPlan); // your already-generated YAML + zip.file( + "_staging/evidence_bundle.json", + JSON.stringify(evidenceBundle, null, 2) + ); + zip.file( + "_staging/subject_analysis.json", + JSON.stringify(subjectAnalysis, null, 2) // ← was evidenceBundle.subject_analysis + ); + // trio files (get content from the AI-generated FileItems) + const dd = files.find( + (f) => + (f.source === "ai" || f.source === "user") && + f.name === "dataset_description.json" + ); + const readme = files.find( + (f) => + (f.source === "ai" || f.source === "user") && + (f.name === "README.md" || + f.name === "README.txt" || + f.name === "README.rst" || + f.name === "readme.md") + ); + const participants = files.find( + (f) => + (f.source === "ai" || f.source === "user") && + f.name === "participants.tsv" + ); + + if (dd?.content) zip.file("dataset_description.json", dd.content); + if (readme?.content) zip.file("README.md", readme.content); + if (participants?.content) + zip.file("participants.tsv", participants.content); + + const blob = await zip.generateAsync({ type: "blob" }); + const url = URL.createObjectURL(blob); + const a = document.createElement("a"); + a.href = url; + a.download = "outputs.zip"; + a.click(); + URL.revokeObjectURL(url); + }; + + // save zip + const handleSaveZip = async () => { + // Add output files to VFS + const timestamp = new Date().toLocaleString(); + const zipLabel = `bids_output_${new Date().toISOString().slice(0, 10)}`; + const outputFiles: FileItem[] = []; + + const folderId = generateId(); + outputFiles.push({ + id: folderId, + name: zipLabel, + type: "folder", + parentId: null, + source: "output", + generatedAt: timestamp, + }); + + // _staging subfolder + const stagingFolderId = generateId(); + outputFiles.push({ + id: stagingFolderId, + name: "_staging", + type: "folder", + parentId: folderId, // ← child of root output folder + source: "output", + generatedAt: timestamp, + }); + + // Files inside _staging + if (bidsPlan) { + outputFiles.push({ + id: generateId(), + name: "BIDSPlan.yaml", + type: "file", + fileType: "text", + content: bidsPlan, + parentId: stagingFolderId, // ← inside _staging + source: "output", + generatedAt: timestamp, + }); + } + + if (evidenceBundle) { + outputFiles.push({ + id: generateId(), + name: "evidence_bundle.json", + type: "file", + fileType: "text", + content: JSON.stringify(evidenceBundle, null, 2), + parentId: stagingFolderId, // ← inside _staging + source: "output", + generatedAt: timestamp, + }); + + outputFiles.push({ + id: generateId(), + name: "ingest_info.json", + type: "file", + fileType: "text", + content: JSON.stringify(buildIngestInfo(baseDirectoryPath), null, 2), + parentId: stagingFolderId, // ← inside _staging + source: "output", + generatedAt: timestamp, + }); + + if (subjectAnalysis) { + // ← was evidenceBundle.subject_analysis + outputFiles.push({ + id: generateId(), + name: "subject_analysis.json", + type: "file", + fileType: "text", + content: JSON.stringify(subjectAnalysis, null, 2), // ← was evidenceBundle.subject_analysis + parentId: stagingFolderId, // ← inside _staging + source: "output", + generatedAt: timestamp, + }); + } + } + // Trio files at root level (outside _staging) + const dd = files.find( + (f) => + (f.source === "ai" || f.source === "user") && + f.name === "dataset_description.json" + ); + const readme = files.find( + (f) => + (f.source === "ai" || f.source === "user") && + (f.name === "README.md" || + f.name === "README.txt" || + f.name === "README.rst" || + f.name === "readme.md") + ); + const participants = files.find( + (f) => + (f.source === "ai" || f.source === "user") && + f.name === "participants.tsv" + ); + + [dd, readme, participants].forEach((f) => { + if (f?.content) { + outputFiles.push({ + ...f, + id: generateId(), + parentId: folderId, + source: "output", + generatedAt: timestamp, + }); + } + }); + + updateFiles((prev) => [...prev, ...outputFiles]); + setStatus("✓ Saved to VFS. Click 'Save Changes' to persist to database."); + }; + // const handleSaveZip = async () => { + // const zip = new JSZip(); + + // // _staging/ files + // const ingestInfo = buildIngestInfo(baseDirectoryPath); + // zip.file("_staging/ingest_info.json", JSON.stringify(ingestInfo, null, 2)); + // zip.file("_staging/BIDSPlan.yaml", bidsPlan); + // zip.file( + // "_staging/evidence_bundle.json", + // JSON.stringify(evidenceBundle, null, 2) + // ); + // zip.file( + // "_staging/subject_analysis.json", + // JSON.stringify(evidenceBundle.subject_analysis, null, 2) + // ); + + // // Declare trio files once, reuse for both zip and VFS + // const dd = files.find( + // (f) => f.source === "ai" && f.name === "dataset_description.json" + // ); + // const readme = files.find( + // (f) => f.source === "ai" && f.name === "README.md" + // ); + // const participants = files.find( + // (f) => f.source === "ai" && f.name === "participants.tsv" + // ); + + // // Add trio files to zip + // if (dd?.content) zip.file("dataset_description.json", dd.content); + // if (readme?.content) zip.file("README.md", readme.content); + // if (participants?.content) + // zip.file("participants.tsv", participants.content); + + // const blob = await zip.generateAsync({ type: "blob" }); + // const url = URL.createObjectURL(blob); + // const a = document.createElement("a"); + // a.href = url; + // a.download = `bids_output_${new Date().toISOString().slice(0, 10)}.zip`; + // a.click(); + // URL.revokeObjectURL(url); + + // // Add output files to VFS + // const timestamp = new Date().toLocaleString(); + // const zipLabel = `bids_output_${new Date().toISOString().slice(0, 10)}`; + // const outputFiles: FileItem[] = []; + + // const folderId = generateId(); + // outputFiles.push({ + // id: folderId, + // name: zipLabel, + // type: "folder", + // parentId: null, + // source: "output", + // generatedAt: timestamp, + // }); + + // // Add trio files under the folder + // [dd, readme, participants].forEach((f) => { + // if (f?.content) { + // outputFiles.push({ + // ...f, + // id: generateId(), + // parentId: folderId, + // source: "output", + // generatedAt: timestamp, + // }); + // } + // }); + + // // Add BIDSPlan.yaml under the folder + // if (bidsPlan) { + // outputFiles.push({ + // id: generateId(), + // name: "BIDSPlan.yaml", + // type: "file", + // fileType: "text", + // content: bidsPlan, + // parentId: folderId, + // source: "output", + // generatedAt: timestamp, + // }); + // } + + // updateFiles((prev) => [...prev, ...outputFiles]); + // }; + + return ( + + {/* Resize Handle */} + + + + {/* Header */} + + + + AI-Generated BIDS Conversion Script + + + + + + + {/* Content */} + + {/* Left: Configuration */} + + + LLM Provider + + + + + Model + + + + {/* Ollama Server URL field */} + {/* {provider === "ollama" && ( + setOllamaUrl(e.target.value)} + placeholder="http://localhost:11434" + sx={{ mb: 2 }} + /> + )} */} + {/* Base Directory Path field (shows for ALL providers) */} + setBaseDirectoryPath(e.target.value)} + placeholder="Enter the folder path where these files are located" + helperText="e.g., /Users/name/datasets/study1 or C:\Data\Study1" + sx={{ mb: 2 }} + /> + + {!currentProvider.noApiKey && ( + setApiKey(e.target.value)} + placeholder="Enter your API key..." + sx={{ mb: 2 }} + /> + )} + + {/* Step-by-step workflow buttons */} + + + Workflow Steps: + + + + { + setNSubjects(e.target.value); + setNSubjectsError(false); + }} + type="number" + size="small" + error={nSubjectsError} + helperText={nSubjectsError ? "Required" : ""} + inputProps={{ min: 1 }} + sx={{ mb: 1 }} + /> + + + Modality (required)* + + {modalityError && ( + + Required + + )} + + + {/* setDescribeText(e.target.value)} + size="small" + multiline + rows={2} + /> */} + + + + {/* + Ready to Generate Script ↓ + */} + + + + + + {/* */} + + {/* cancel button*/} + {(generatingTrio || loading) && ( + + )} + + {error && ( + + {error} + + )} + + {status && !error && ( + + {status} + + )} + + + {/* Right: Generated Script */} + + + + {/* */} + + + + + + {/* {generatedScript || + 'Configure your LLM provider and click "Generate Script"...'} */} + {bidsPlan || + generatedScript || + 'Configure your LLM provider and click "Generate BIDSPlan.yaml"...'} + + + + + ); +}; + +export default LLMPanel; diff --git a/src/components/User/Dashboard/DatasetOrganizer/index.tsx b/src/components/User/Dashboard/DatasetOrganizer/index.tsx new file mode 100644 index 0000000..ec8d97f --- /dev/null +++ b/src/components/User/Dashboard/DatasetOrganizer/index.tsx @@ -0,0 +1,429 @@ +import DropZone from "./DropZone"; +import FileTree from "./FileTree"; +import LLMPanel from "./LLMPanel"; +import { generateId } from "./utils/fileProcessors"; +import { ArrowBack, Save, GetApp, Psychology } from "@mui/icons-material"; +import { + Box, + Button, + Typography, + Alert, + CircularProgress, + Dialog, + DialogTitle, + DialogContent, + DialogActions, + DialogContentText, +} from "@mui/material"; +import { Colors } from "design/theme"; +import { useAppDispatch } from "hooks/useAppDispatch"; +import { useAppSelector } from "hooks/useAppSelector"; +import React, { useState, useEffect, useRef } from "react"; +import { useParams, useNavigate } from "react-router-dom"; +import { getProject, updateProject } from "redux/projects/projects.action"; +import { + selectCurrentProject, + selectProjectsLoading, + selectIsUpdatingProject, +} from "redux/projects/projects.selector"; +import { FileItem } from "redux/projects/types/projects.interface"; + +const DatasetOrganizer: React.FC = () => { + const { projectId } = useParams<{ projectId: string }>(); + const navigate = useNavigate(); + const dispatch = useAppDispatch(); + + const currentProject = useAppSelector(selectCurrentProject); + const loading = useAppSelector(selectProjectsLoading); + const isSaving = useAppSelector(selectIsUpdatingProject); + + // Local state for the organizer + const [files, setFiles] = useState([]); + const [selectedIds, setSelectedIds] = useState>(new Set()); + const [expandedIds, setExpandedIds] = useState>(new Set()); + const [showLLMPanel, setShowLLMPanel] = useState(false); + const [hasUnsavedChanges, setHasUnsavedChanges] = useState(false); + const [error, setError] = useState(null); + const [baseDirectoryPath, setBaseDirectoryPath] = useState(""); + // add + const [evidenceBundle, setEvidenceBundle] = useState(null); + const [trioGenerated, setTrioGenerated] = useState(false); + const [showExitDialog, setShowExitDialog] = useState(false); //add + + // Helper to mark as changed + const markAsChanged = () => { + setHasUnsavedChanges(true); + }; + + // Wrapper functions that mark as changed + const updateFiles = (updater: React.SetStateAction) => { + setFiles(updater); + markAsChanged(); + }; + + const updateSelectedIds = (updater: React.SetStateAction>) => { + setSelectedIds(updater); + markAsChanged(); + }; + + const updateExpandedIds = (updater: React.SetStateAction>) => { + setExpandedIds(updater); + markAsChanged(); + }; + + const updateBaseDirectoryPath = (path: string) => { + setBaseDirectoryPath(path); + markAsChanged(); + }; + + // Load project on mount + useEffect(() => { + if (projectId) { + // dispatch(getProject({ projectId: parseInt(projectId) })); + dispatch(getProject({ projectId })); + } + }, [projectId, dispatch]); + + // Restore state from project when loaded + useEffect(() => { + if (currentProject && currentProject.extractor_state) { + const state = currentProject.extractor_state; + setFiles(state.files || []); + setSelectedIds(new Set(state.selectedIds || [])); + setExpandedIds(new Set(state.expandedIds || [])); + setBaseDirectoryPath(state.baseDirectoryPath || ""); + setEvidenceBundle(state.evidenceBundle || null); + setTrioGenerated(state.trioGenerated || false); + setHasUnsavedChanges(false); + } + }, [currentProject]); + + const handleSave = async () => { + if (!currentProject) return; + + try { + await dispatch( + updateProject({ + projectId: currentProject.public_id, // ← was currentProject.id + extractor_state: { + files, + selectedIds: Array.from(selectedIds), + expandedIds: Array.from(expandedIds), + baseDirectoryPath, + evidenceBundle, + trioGenerated, + }, + }) + ).unwrap(); + + setHasUnsavedChanges(false); + setError(null); + } catch (err: any) { + setError(err.message || "Failed to save project"); + } + }; + + const handleExportJSON = () => { + const buildTree = (parentId: string | null): any => { + const children = files.filter((f) => f.parentId === parentId); + const result: any = {}; + + children.forEach((child) => { + if (child.type === "folder" || child.type === "zip") { + result[child.name] = { + _type: child.type, + // _sourcePath: child.sourcePath || "", //change + //add + _sourcePath: baseDirectoryPath + ? `${baseDirectoryPath}/${ + child.sourcePath || child.name + }`.replace(/\/+/g, "/") + : child.sourcePath || "", + _children: buildTree(child.id), + }; + } else { + const fileData: any = { + _type: "file", + _fileType: child.fileType || "other", + }; + // if (child.sourcePath) fileData._sourcePath = child.sourcePath; // change + //add + if (child.sourcePath || baseDirectoryPath) { + fileData._sourcePath = baseDirectoryPath + ? `${baseDirectoryPath}/${ + child.sourcePath || child.name + }`.replace(/\/+/g, "/") + : child.sourcePath; + } + if (child.isUserMeta) fileData._isUserMeta = true; + if (child.content) fileData._content = child.content; + if (child.contentType) fileData._contentType = child.contentType; + if (child.note) fileData._note = child.note; + result[child.name] = fileData; + } + }); + + return result; + }; + + const exportData = { + _exportDate: new Date().toISOString(), + _totalFiles: files.length, + _projectName: currentProject?.name, + files: buildTree(null), + }; + + const blob = new Blob([JSON.stringify(exportData, null, 2)], { + type: "application/json", + }); + const url = URL.createObjectURL(blob); + const a = document.createElement("a"); + a.href = url; + a.download = `${ + currentProject?.name?.replace(/\s+/g, "_") || "bids_metadata" + }_export.json`; + a.click(); + URL.revokeObjectURL(url); + }; + + // BACK BUTTON WITH DIALOG + const handleBack = () => { + if (hasUnsavedChanges) { + setShowExitDialog(true); + } else { + navigate("/dashboard"); + } + }; + + // ======================================================================== + // LOADING & ERROR STATES + // ======================================================================== + if (loading && !currentProject) { + return ( + + + + ); + } + + if (!currentProject) { + return ( + + Project not found + + + ); + } + + return ( + + {/* Header */} + + + + + {currentProject.name} + {currentProject.description && ( + + {currentProject.description} + + )} + + + + + + {/* */} + + + + + {error && ( + setError(null)} sx={{ m: 2 }}> + {error} + + )} + + {/* Main Content */} + + {/* Left: Drop Zone */} + + + {/* LLM Panel */} + {showLLMPanel && ( + setShowLLMPanel(false)} + /> + )} + + + {/* Right: File Tree */} + + + + {/* Exit Confirmation Dialog */} + setShowExitDialog(false)}> + Unsaved Changes + + + You have unsaved changes. Are you sure you want to leave? Your + changes will be lost. + + + + + + + + + + ); +}; + +export default DatasetOrganizer; diff --git a/src/components/User/Dashboard/DatasetOrganizer/utils/fileAnalyzers.ts b/src/components/User/Dashboard/DatasetOrganizer/utils/fileAnalyzers.ts new file mode 100644 index 0000000..76142c0 --- /dev/null +++ b/src/components/User/Dashboard/DatasetOrganizer/utils/fileAnalyzers.ts @@ -0,0 +1,372 @@ +// src/components/DatasetOrganizer/utils/fileAnalyzers.ts +import { FileItem } from "redux/projects/types/projects.interface"; + +/** + * Categorize a file based on its name and type + * Returns detailed scan category (anatomical-T1w, functional-bold, etc.) + */ +export const categorizeFile = (file: FileItem): string => { + const name = file.name.toLowerCase(); + + // Functional scans (task-based) + if (name.includes("task-") && name.includes("bold")) { + return "functional-bold"; + } + if (name.endsWith(".snirf")) { + return "functional-nirs"; + } + + if (name.endsWith(".nirs")) return "functional-nirs"; + if (name.endsWith(".mat")) return "functional-nirs"; + + // Anatomical scans + if (name.includes("t1w")) { + return "anatomical-T1w"; + } + if (name.includes("t2w") || name.includes("inplanet2")) { + return "anatomical-T2w"; + } + if (name.includes("flair")) { + return "anatomical-FLAIR"; + } + + if (name.endsWith(".dcm")) return "anatomical-dicom"; + + // Diffusion + if (name.includes("dwi") || name.includes("diffusion")) { + return "diffusion"; + } + + // Field maps + if (name.includes("fieldmap") || name.includes("fmap")) { + return "fieldmap"; + } + + // Fall back to file type + return file.fileType || "unknown"; +}; + +/** + * Detect modality from file collection + */ +export const detectModality = (files: FileItem[]): string => { + const counts: Record = {}; + files.forEach((f) => { + const ext = f.fileType || "unknown"; + counts[ext] = (counts[ext] || 0) + 1; + }); + + if (counts.nifti > 0 || counts.dicom > 0) return "mri"; + if ( + counts.hdf5 > 0 || + counts.matlab > 0 || + counts.homer3 > 0 || + files.some((f) => f.name.endsWith(".snirf")) + ) + return "nirs"; + return "mixed"; +}; + +/** + * Get file extension counts + */ +export const getCountsByExtension = ( + files: FileItem[] +): Record => { + const counts: Record = {}; + // files.forEach((f) => { + // const ext = f.fileType || "unknown"; + // counts[ext] = (counts[ext] || 0) + 1; + // }); + files + .filter((f) => f.source === "user" && f.type === "file") + .forEach((f) => { + // Mirror Python: use ".nii.gz" as a single key for .nii.gz files + const name = f.name.toLowerCase(); + const ext = name.endsWith(".nii.gz") + ? ".nii.gz" + : "." + name.split(".").pop(); + counts[ext] = (counts[ext] || 0) + 1; + }); + return counts; +}; + +/** + * Extract user context from metadata files + */ +export const getUserContextText = (files: FileItem[]): string => { + const readme = files.find((f) => f.name.toLowerCase().includes("readme")); + const instructions = files.find( + (f) => + f.name.toLowerCase().includes("conversion") || + f.name.toLowerCase().includes("instruction") + ); + const participants = files.find((f) => + f.name.toLowerCase().includes("participant") + ); + + const datasetDescription = files.find( + (f) => f.name.toLowerCase() === "dataset_description.json" + ); + + const pdfsAndDocs = files.filter( + (f) => + f.source === "user" && + f.fileType === "office" && + f.content?.trim() && + f.name.toLowerCase() !== "participants.tsv" // already handled + ); + + const parts = []; + if (datasetDescription?.content) + parts.push(`DATASET DESCRIPTION:\n${datasetDescription.content}`); + if (readme?.content) parts.push(`README:\n${readme.content}`); + if (instructions?.content) + parts.push(`INSTRUCTIONS:\n${instructions.content}`); + if (participants?.content) + parts.push(`PARTICIPANTS:\n${participants.content}`); + pdfsAndDocs.forEach((f) => { + parts.push(`DOCUMENT [${f.name}]:\n${f.content!.slice(0, 3000)}`); + }); + return parts.join("\n\n"); +}; + +/** (not using yet) + * Analyze filename patterns to detect subjects + * (Simplified version inspired by auto-bidsify's filename_tokenizer) + */ +export const analyzeFilenamePatterns = ( + files: FileItem[] +): { + subjectCount: number; + subjectIds: string[]; + hasRunNumbers: boolean; + hasTaskNames: boolean; +} => { + const dataFiles = files.filter((f) => f.type === "file" && !f.isUserMeta); + const subjectIds = new Set(); + let hasRunNumbers = false; + let hasTaskNames = false; + + dataFiles.forEach((f) => { + const name = f.name; + + // Extract subject ID (sub-01, sub-02, etc.) + const subMatch = name.match(/sub-(\d+)/i); + if (subMatch) { + subjectIds.add(subMatch[1]); + } + + // Check for run numbers + if (name.includes("_run-")) { + hasRunNumbers = true; + } + + // Check for task names + if (name.includes("task-")) { + hasTaskNames = true; + } + }); + + return { + subjectCount: subjectIds.size, + subjectIds: Array.from(subjectIds).sort(), + hasRunNumbers, + hasTaskNames, + }; +}; + +// add to fileAnalyzers.ts + +// export interface SubjectRecord { +// original_id: string; +// numeric_id: string; +// site: string | null; +// pattern_name: string; +// file_count: number; +// } + +// export interface SubjectAnalysis { +// success: boolean; +// method: string; +// subject_records: SubjectRecord[]; +// subject_count: number; +// has_site_info: boolean; +// variants_by_subject: Record; +// python_generated_filename_rules: any[]; +// id_mapping: { +// id_mapping: Record; +// reverse_mapping: Record; +// strategy_used: string; +// metadata_columns: string[]; +// }; +// } + +// // mirrors _extract_subjects_from_directory_structure +// const extractFromDirectoryStructure = ( +// allFiles: string[] +// ): Omit | null => { +// const patterns: Array<[RegExp, boolean, number, number | null, string]> = [ +// [/^([A-Za-z]+)_sub(\d+)$/i, true, 2, 1, "site_prefixed"], +// [/^sub-(\d+)$/i, false, 1, null, "standard_bids"], +// [/^subject[_-]?(\d+)$/i, false, 1, null, "simple"], +// [/^(\d{3,})$/, false, 1, null, "numeric_only"], +// ]; + +// const subjectRecords: SubjectRecord[] = []; +// const seenIds = new Set(); + +// for (const filepath of allFiles) { +// const parts = filepath.split("/"); +// for (const part of parts.slice(0, 2)) { +// for (const [ +// regex, +// hasSite, +// idGroup, +// siteGroup, +// patternName, +// ] of patterns) { +// const match = part.match(regex); +// if (match) { +// const originalId = match[0]; +// if (seenIds.has(originalId)) break; +// seenIds.add(originalId); +// subjectRecords.push({ +// original_id: originalId, +// numeric_id: match[idGroup], +// site: hasSite && siteGroup ? match[siteGroup] : null, +// pattern_name: patternName, +// file_count: 0, +// }); +// break; +// } +// } +// } +// } + +// if (subjectRecords.length === 0) return null; + +// subjectRecords.sort((a, b) => { +// const na = parseInt(a.numeric_id) || 0; +// const nb = parseInt(b.numeric_id) || 0; +// return na - nb; +// }); + +// return { +// success: true, +// method: "directory_structure", +// subject_records: subjectRecords, +// subject_count: subjectRecords.length, +// has_site_info: subjectRecords.some((r) => r.site !== null), +// variants_by_subject: {}, +// python_generated_filename_rules: [], +// }; +// }; + +// // mirrors _extract_subjects_from_flat_filenames +// const extractFromFlatFilenames = ( +// allFiles: string[] +// ): Omit | null => { +// const identifierToFiles: Record = {}; + +// for (const filepath of allFiles) { +// const filename = filepath.split("/").pop() || ""; +// const nameNoExt = filename +// .replace(/\.[^/.]+$/, "") +// .replace(/\.nii\.gz$/, ""); +// const match = nameNoExt.match(/^([A-Za-z0-9\-]+)/); +// if (match) { +// const identifier = match[1]; +// if (!identifierToFiles[identifier]) identifierToFiles[identifier] = []; +// identifierToFiles[identifier].push(filepath); +// } +// } + +// if (Object.keys(identifierToFiles).length === 0) return null; + +// const extractNumeric = (id: string): number => { +// const nums = id.match(/\d+/g); +// return nums ? parseInt(nums[nums.length - 1]) : 999999; +// }; + +// const sortedIdentifiers = Object.keys(identifierToFiles).sort( +// (a, b) => extractNumeric(a) - extractNumeric(b) +// ); + +// const subjectRecords: SubjectRecord[] = sortedIdentifiers.map((id, i) => ({ +// original_id: id, +// numeric_id: String(i + 1), +// site: null, +// pattern_name: "dominant_prefix", +// file_count: identifierToFiles[id].length, +// })); + +// return { +// success: true, +// method: "dominant_prefix_fallback", +// subject_records: subjectRecords, +// subject_count: subjectRecords.length, +// has_site_info: false, +// variants_by_subject: {}, +// python_generated_filename_rules: [], +// }; +// }; + +// // mirrors _generate_subject_id_mapping +// const generateIdMapping = ( +// subjectInfo: Omit +// ): SubjectAnalysis["id_mapping"] => { +// const records = subjectInfo.subject_records; +// const idMapping: Record = {}; +// const reverseMapping: Record = {}; + +// // detect already-BIDS format (sub-01, sub-02...) +// const allAlreadyBids = records.every((r) => /^sub-\w+$/i.test(r.original_id)); + +// if (allAlreadyBids) { +// for (const rec of records) { +// const bidsId = rec.original_id.replace(/^sub-/i, ""); +// idMapping[rec.original_id] = bidsId; +// reverseMapping[bidsId] = rec.original_id; +// } +// return { +// id_mapping: idMapping, +// reverse_mapping: reverseMapping, +// strategy_used: "already_bids", +// metadata_columns: [], +// }; +// } + +// // numeric strategy +// for (let i = 0; i < records.length; i++) { +// const orig = records[i].original_id; +// const bidsId = String(i + 1); +// idMapping[orig] = bidsId; +// reverseMapping[bidsId] = orig; +// } + +// return { +// id_mapping: idMapping, +// reverse_mapping: reverseMapping, +// strategy_used: "numeric", +// metadata_columns: ["original_id"], +// }; +// }; + +// // main export — call this from llmHelpers +// export const extractSubjectAnalysis = (allFiles: string[]): SubjectAnalysis => { +// const fromDir = extractFromDirectoryStructure(allFiles); +// const base = fromDir ?? +// extractFromFlatFilenames(allFiles) ?? { +// success: false, +// method: "none", +// subject_records: [], +// subject_count: 0, +// has_site_info: false, +// variants_by_subject: {}, +// python_generated_filename_rules: [], +// }; + +// const idMapping = generateIdMapping(base); +// return { ...base, id_mapping: idMapping }; +// }; diff --git a/src/components/User/Dashboard/DatasetOrganizer/utils/fileProcessors.ts b/src/components/User/Dashboard/DatasetOrganizer/utils/fileProcessors.ts new file mode 100644 index 0000000..b51d150 --- /dev/null +++ b/src/components/User/Dashboard/DatasetOrganizer/utils/fileProcessors.ts @@ -0,0 +1,820 @@ +import * as dicomParser from "dicom-parser"; +import * as hdf5 from "jsfive"; +import JSZip from "jszip"; +import * as mammoth from "mammoth"; +import pako from "pako"; +import * as pdfjsLib from "pdfjs-dist"; +import { FileItem } from "redux/projects/types/projects.interface"; +import * as XLSX from "xlsx"; + +pdfjsLib.GlobalWorkerOptions.workerSrc = + "https://cdnjs.cloudflare.com/ajax/libs/pdf.js/3.4.120/pdf.worker.min.js"; + +export const generateId = (): string => { + return Math.random().toString(36).substr(2, 9); +}; + +export const getFileType = (name: string): string => { + const lower = name.toLowerCase(); + if (lower.endsWith(".nii.gz") || lower.endsWith(".nii")) return "nifti"; + + const ext = lower.split(".").pop() || ""; + const fileTypes: Record = { + text: ["json", "md", "txt", "tsv", "bvec", "bval", "csv"], + nifti: ["nii"], + hdf5: ["snirf"], + array: ["h5", "hdf5", "hdf", "npy", "npz"], + neurojsonText: ["jnii", "jmsh", "jdt", "jnirs"], + neurojsonBinary: ["jdb", "bjd", "bnii", "bmsh", "bnirs"], + office: ["docx", "pdf", "xlsx", "xls"], + matlab: ["mat"], + dicom: ["dcm"], + nirs: ["nirs"], + }; + + for (const [type, extensions] of Object.entries(fileTypes)) { + if (extensions.includes(ext)) return type; + } + + return "other"; +}; + +// Extract PDF text content +const extractPDFContent = async (buffer: ArrayBuffer): Promise => { + try { + const loadingTask = pdfjsLib.getDocument({ data: buffer }); + const pdf = await loadingTask.promise; + + let fullText = `PDF: ${pdf.numPages} page${ + pdf.numPages !== 1 ? "s" : "" + }\n`; + fullText += "─".repeat(50) + "\n\n"; + + // Extract first 5 pages only + const maxPages = Math.min(pdf.numPages, 5); + + for (let i = 1; i <= maxPages; i++) { + const page = await pdf.getPage(i); + const textContent = await page.getTextContent(); + const pageText = textContent.items.map((item: any) => item.str).join(" "); + + fullText += `[Page ${i}]\n${pageText.slice(0, 1000)}\n\n`; + } + + if (pdf.numPages > 5) { + fullText += `... (${pdf.numPages - 5} more pages not shown)`; + } + + return fullText; + } catch (error: any) { + return `Error extracting PDF: ${error.message}`; + } +}; + +// Simple file processing - just store file info without deep parsing +export const processFile = async ( + file: File, + basePath?: string +): Promise => { + const relativePath = file.webkitRelativePath || file.name; + // const fullPath = basePath + // ? `${basePath}/${relativePath}`.replace(/\/+/g, "/") // Clean up double slashes + // : relativePath; + const entry: FileItem = { + id: generateId(), + name: file.name, + type: "file", + parentId: null, + fileType: getFileType(file.name) as any, + // sourcePath: file.name, + // sourcePath: fullPath, // ← Now includes base path if provided + sourcePath: relativePath, //add + source: "user", // add source + }; + + // Only extract content for text files + const fileType = getFileType(file.name); + const ext = file.name.toLowerCase().split(".").pop(); + + try { + if (fileType === "text") { + // Extract text files + const text = await file.text(); + entry.content = text.slice(0, 5000); + entry.contentType = "text"; + } else if (fileType === "nifti") { + const buffer = await file.arrayBuffer(); + const header = parseNiftiHeader(buffer); + entry.content = JSON.stringify(header, null, 2); + entry.contentType = "nifti"; + } else if (fileType === "hdf5") { + // Extract HDF5/SNIRF structure + const buffer = await file.arrayBuffer(); + const tree = parseHDF5Tree(buffer); + if (tree.error) { + entry.content = `Error parsing HDF5: ${tree.error}`; + } else { + entry.content = formatHDF5Tree(tree); + } + entry.contentType = "hdf5"; + } else if (fileType === "neurojsonText") { + // Extract NeuroJSON text + const text = await file.text(); + // entry.content = text.slice(0, 5000); + try { + const json = JSON.parse(text); + entry.content = JSON.stringify(json, null, 2).slice(0, 5000); + } catch (e) { + entry.content = text.slice(0, 5000); + } + entry.contentType = "neurojson"; + } else if (fileType === "neurojsonBinary") { + // NeuroJSON binary placeholder + entry.content = `Binary NeuroJSON: ${file.name}\nSize: ${( + file.size / 1024 + ).toFixed(2)} KB\nFormat: BJData`; + entry.contentType = "neurojson"; + } else if (fileType === "office" && ext === "pdf") { + // Extract PDF + const buffer = await file.arrayBuffer(); + entry.content = await extractPDFContent(buffer); + entry.contentType = "office"; + } else if (fileType === "office" && ext === "docx") { + // Extract DOCX + const buffer = await file.arrayBuffer(); + entry.content = await extractDOCXContent(buffer); + entry.contentType = "office"; + } else if (fileType === "office" && (ext === "xlsx" || ext === "xls")) { + // Extract Excel + const buffer = await file.arrayBuffer(); + entry.content = extractExcelContent(buffer); + entry.contentType = "office"; + } else if (fileType === "matlab") { + entry.content = `MATLAB File: ${file.name}\nSize: ${( + file.size / 1024 + ).toFixed( + 2 + )} KB\nFormat: .mat (fNIRS data — will be converted to SNIRF by autobidsify)`; + entry.contentType = "matlab"; + } else if (fileType === "dicom") { + // entry.content = `DICOM File: ${file.name}\nSize: ${( + // file.size / 1024 + // ).toFixed( + // 2 + // )} KB\nFormat: .dcm (MRI data — will be converted to NIfTI by dcm2niix)`; + const buffer = await file.arrayBuffer(); + entry.content = parseDicomHeader(buffer); + entry.contentType = "dicom"; + } else if (fileType === "nirs") { + entry.content = `Homer3 File: ${file.name}\nSize: ${( + file.size / 1024 + ).toFixed( + 2 + )} KB\nFormat: .nirs (fNIRS data — will be converted to SNIRF by autobidsify)`; + entry.contentType = "nirs"; + } else if (fileType === "array") { + entry.content = `Array File: ${file.name}\nSize: ${( + file.size / 1024 + ).toFixed(2)} KB\nFormat: ${file.name + .split(".") + .pop() + ?.toUpperCase()} (generic array data — will be placed in unknown pool by autobidsify)`; + entry.contentType = "array"; + } else { + // For other binary files, just store basic info + entry.content = `File: ${file.name}\nSize: ${(file.size / 1024).toFixed( + 2 + )} KB\nType: ${file.type || "Unknown"}`; + entry.contentType = fileType; + } + } catch (e: any) { + console.error("File processing error:", e); + entry.content = `Error reading file: ${e.message}`; + } + + return entry; +}; + +// Process ZIP files +export const processZip = async ( + file: File, + basePath?: string +): Promise => { + const zip = new JSZip(); + const zipName = file.name; + + try { + const contents = await zip.loadAsync(file); + const entries: FileItem[] = []; + const pathMap: Record = {}; + // Create root ZIP container + const zipRootId = generateId(); + entries.push({ + id: zipRootId, + name: zipName, + type: "zip", + parentId: null, + sourcePath: zipName, + }); + + const paths = Object.keys(contents.files).sort(); + + for (const path of paths) { + const zipEntry = contents.files[path]; + + // Skip directories + if (zipEntry.dir || path.endsWith("/")) continue; + + const parts = path.split("/"); + const fileName = parts.pop()!; + let currentPath = ""; + // let parentId: string | null = null; + let parentId: string | null = zipRootId; + + // Create folder hierarchy + parts.forEach((part) => { + const folderPath = currentPath ? `${currentPath}/${part}` : part; + if (!pathMap[folderPath]) { + const folderId = generateId(); + pathMap[folderPath] = folderId; + // const folderSourcePath = basePath + // ? `${basePath}/${zipName}/${folderPath}`.replace(/\/+/g, "/") + // : `${zipName}/${folderPath}`; + entries.push({ + id: folderId, + name: part, + type: "folder", + parentId: parentId, + // sourcePath: `${zipName}/${folderPath}`, + // sourcePath: folderSourcePath, + sourcePath: `${zipName}/${folderPath}`, //add + }); + } + parentId = pathMap[folderPath]; + currentPath = folderPath; + }); + + // Add file + const fileId = generateId(); + const fileType = getFileType(fileName); + const ext = fileName.toLowerCase().split(".").pop(); + + // Add basePath to file sourcePath + // const fileSourcePath = basePath + // ? `${basePath}/${zipName}/${path}`.replace(/\/+/g, "/") + // : `${zipName}/${path}`; + + const entry: FileItem = { + id: fileId, + name: fileName, + type: "file", + parentId: parentId, + fileType: fileType as any, + sourcePath: `${zipName}/${path}`, // only relative path + source: "user", + // sourcePath: fileSourcePath,//change + }; + + // Extract content based on file type + if (fileType === "text") { + try { + const text = await zipEntry.async("text"); + entry.content = text.slice(0, 5000); + entry.contentType = "text"; + } catch (e: any) { + entry.content = `Error: ${e.message}`; + } + } else if (fileType === "office" && ext === "pdf") { + // Extract PDF + try { + const arrayBuffer = await zipEntry.async("arraybuffer"); + entry.content = await extractPDFContent(arrayBuffer); + entry.contentType = "office"; + } catch (e: any) { + console.error("ZIP PDF extraction error:", e); + entry.content = `Error extracting PDF: ${e.message}`; + } + } else if (fileType === "office" && ext === "docx") { + // Extract DOCX + try { + const arrayBuffer = await zipEntry.async("arraybuffer"); + entry.content = await extractDOCXContent(arrayBuffer); + entry.contentType = "office"; + } catch (e: any) { + entry.content = `Error extracting DOCX: ${e.message}`; + } + } else if (fileType === "office" && (ext === "xlsx" || ext === "xls")) { + // Extract Excel + try { + const arrayBuffer = await zipEntry.async("arraybuffer"); + entry.content = extractExcelContent(arrayBuffer); + entry.contentType = "office"; + } catch (e: any) { + entry.content = `Error extracting Excel: ${e.message}`; + } + } + // NIfTI header extraction from ZIP + else if (fileType === "nifti") { + try { + const arrayBuffer = await zipEntry.async("arraybuffer"); + const header = parseNiftiHeader(arrayBuffer); + entry.content = JSON.stringify(header, null, 2); // ← Format as JSON + entry.contentType = "nifti"; + } catch (e: any) { + entry.content = `Error extracting NIfTI header: ${e.message}`; + } + } + + // HDF5/SNIRF + else if (fileType === "hdf5") { + try { + const arrayBuffer = await zipEntry.async("arraybuffer"); + const tree = parseHDF5Tree(arrayBuffer); + if (tree.error) { + entry.content = `Error parsing HDF5: ${tree.error}`; + } else { + entry.content = formatHDF5Tree(tree); + } + entry.contentType = "hdf5"; + } catch (e: any) { + entry.content = `Error extracting HDF5: ${e.message}`; + } + } + // NeuroJSON text files + else if (fileType === "neurojsonText") { + try { + const text = await zipEntry.async("text"); + // entry.content = text.slice(0, 5000); + const json = JSON.parse(text); + entry.content = JSON.stringify(json, null, 2).slice(0, 5000); + entry.contentType = "neurojson"; + } catch (e: any) { + entry.content = `Error: ${e.message}`; + } + } + // NeuroJSON binary placeholder + else if (fileType === "neurojsonBinary") { + const arrayBuffer = await zipEntry.async("arraybuffer"); + const sizeKB = (arrayBuffer.byteLength / 1024).toFixed(2); + entry.content = `Binary NeuroJSON: ${fileName}\nSize: ${sizeKB} KB\nFormat: BJData`; + entry.contentType = "neurojson"; + } + // matlab placeholder + else if (fileType === "matlab") { + const arrayBuffer = await zipEntry.async("arraybuffer"); + const sizeKB = (arrayBuffer.byteLength / 1024).toFixed(2); + entry.content = `MATLAB File: ${fileName}\nSize: ${sizeKB} KB\nFormat: .mat (fNIRS data — will be converted to SNIRF by autobidsify)`; + entry.contentType = "matlab"; + } + // dicom header extraction from ZIP + else if (fileType === "dicom") { + const arrayBuffer = await zipEntry.async("arraybuffer"); + // const sizeKB = (arrayBuffer.byteLength / 1024).toFixed(2); + // entry.content = `DICOM File: ${fileName}\nSize: ${sizeKB} KB\nFormat: .dcm (MRI data — will be converted to NIfTI by dcm2niix)`; + entry.content = parseDicomHeader(arrayBuffer); + entry.contentType = "dicom"; + } else if (fileType === "nirs") { + const arrayBuffer = await zipEntry.async("arraybuffer"); + const sizeKB = (arrayBuffer.byteLength / 1024).toFixed(2); + entry.content = `Homer3 File: ${fileName}\nSize: ${sizeKB} KB\nFormat: .nirs (fNIRS data — will be converted to SNIRF by autobidsify)`; + entry.contentType = "nirs"; + } else if (fileType === "array") { + const arrayBuffer = await zipEntry.async("arraybuffer"); + const sizeKB = (arrayBuffer.byteLength / 1024).toFixed(2); + entry.content = `Array File: ${fileName}\nSize: ${sizeKB} KB\nFormat: ${fileName + .split(".") + .pop() + ?.toUpperCase()} (generic array data — will be placed in unknown pool by autobidsify)`; + entry.contentType = "array"; + } else { + // For other binary files, just store info + const arrayBuffer = await zipEntry.async("arraybuffer"); + const sizeKB = (arrayBuffer.byteLength / 1024).toFixed(2); + entry.content = `ZIP Entry: ${fileName}\nSize: ${sizeKB} KB`; + entry.contentType = fileType; + } + + entries.push(entry); + } + + return entries; + } catch (e: any) { + console.error("Error processing ZIP:", e); + return [ + { + id: generateId(), + name: zipName, + type: "file", + parentId: null, + content: `Error processing ZIP: ${e.message}`, + fileType: "other", + }, + ]; + } +}; + +export const processFolder = async ( + folderEntry: FileSystemDirectoryEntry, + parentId: string | null, + basePath?: string +): Promise => { + const entries: FileItem[] = []; + const folderId = generateId(); + // const basePath = folderEntry.name; + const folderName = folderEntry.name; + + // Add basePath to root folder sourcePath + // const rootSourcePath = basePath + // ? `${basePath}/${folderName}`.replace(/\/+/g, "/") + // : folderName; + + // Add the folder itself + entries.push({ + id: folderId, + name: folderEntry.name, + type: "folder", + parentId: parentId, + // sourcePath: basePath, + // sourcePath: rootSourcePath, + sourcePath: folderName, //add + }); + + // Helper: Promisify readEntries + const readEntries = ( + reader: FileSystemDirectoryReader + ): Promise => { + return new Promise((resolve, reject) => { + reader.readEntries(resolve, reject); + }); + }; + + // Helper: Promisify file() method + const getFile = (fileEntry: FileSystemFileEntry): Promise => { + return new Promise((resolve, reject) => { + fileEntry.file(resolve, reject); + }); + }; + + // Recursive traversal function + async function traverseDirectory( + dirEntry: FileSystemDirectoryEntry, + currentParentId: string, + currentPath: string + ): Promise { + const dirReader = dirEntry.createReader(); + let allEntries: FileSystemEntry[] = []; + + // Read all entries (may require multiple calls) + const readBatch = async (): Promise => { + const batch = await readEntries(dirReader); + if (batch.length > 0) { + allEntries = allEntries.concat(Array.from(batch)); + await readBatch(); // Keep reading + } + }; + + await readBatch(); + + // Process each entry + for (const entry of allEntries) { + const entryPath = `${currentPath}/${entry.name}`; + // Construct full path with basePath + // const entryPath = basePath + // ? `${basePath}/${currentPath}/${entry.name}`.replace(/\/+/g, "/") + // : `${currentPath}/${entry.name}`; + + if (entry.isFile) { + // Process file + const fileEntry = entry as FileSystemFileEntry; + const file = await getFile(fileEntry); + const fileItem = await processFile(file); + fileItem.parentId = currentParentId; + fileItem.sourcePath = entryPath; // only relative path + entries.push(fileItem); + } else if (entry.isDirectory) { + // Process subfolder + const subFolderId = generateId(); + entries.push({ + id: subFolderId, + name: entry.name, + type: "folder", + parentId: currentParentId, + sourcePath: entryPath, + }); + await traverseDirectory( + entry as FileSystemDirectoryEntry, + subFolderId, + // entryPath + `${currentPath}/${entry.name}` + ); + } + } + } + + // Start traversal + await traverseDirectory(folderEntry, folderId, folderName); + + return entries; +}; + +export const parseNiftiHeader = (buffer: ArrayBuffer): any => { + try { + let data: ArrayBufferLike = buffer; + const arr = new Uint8Array(buffer); + + // Check if gzipped + if (arr[0] === 0x1f && arr[1] === 0x8b) { + const decompressed = pako.inflate(arr); + data = decompressed.buffer; + } + + const view = new DataView(data); + const sizeof_hdr = view.getInt32(0, true); + const isNifti2 = sizeof_hdr === 540; + const isNifti1 = sizeof_hdr === 348; + + if (!isNifti1 && !isNifti2) { + return { error: "Not a valid NIfTI file" }; + } + + const header: any = { format: isNifti2 ? "NIfTI-2" : "NIfTI-1" }; + + if (isNifti1) { + header.dim = []; + for (let i = 0; i < 8; i++) { + header.dim.push(view.getInt16(40 + i * 2, true)); + } + header.datatype = view.getInt16(70, true); + header.bitpix = view.getInt16(72, true); + header.pixdim = []; + for (let j = 0; j < 8; j++) { + header.pixdim.push(view.getFloat32(76 + j * 4, true)); + } + header.vox_offset = view.getFloat32(108, true); + header.scl_slope = view.getFloat32(112, true); + header.scl_inter = view.getFloat32(116, true); + header.qform_code = view.getInt16(252, true); + header.sform_code = view.getInt16(254, true); + + // Extract 80-character description (bytes 148-227) + const descripBytes = new Uint8Array(data, 148, 80); + header.descrip = String.fromCharCode(...descripBytes) + .replace(/\0/g, "") // Remove null terminators + .trim(); // Remove whitespace + + // Extract 4-character magic string (bytes 344-347) + const magicBytes = new Uint8Array(data, 344, 4); + header.magic = String.fromCharCode(...magicBytes).replace(/\0/g, ""); + + const datatypes: Record = { + 0: "UNKNOWN", + 2: "UINT8", + 4: "INT16", + 8: "INT32", + 16: "FLOAT32", + 64: "FLOAT64", + 256: "INT8", + 512: "UINT16", + 768: "UINT32", + }; + header.datatype_name = datatypes[header.datatype] || "UNKNOWN"; + } + + return header; + } catch (e: any) { + return { error: e.message }; + } +}; + +// Parse HDF5/SNIRF tree structure +const parseHDF5Tree = (buffer: ArrayBuffer): any => { + try { + const f = new hdf5.File(buffer); + const tree: any = { type: "group", name: "/", children: [], attrs: {} }; + + const getAttrs = (item: any) => { + try { + return item.attrs || {}; + } catch (e) { + return {}; + } + }; + + const getKeys = (item: any): string[] => { + try { + if (item.keys && Array.isArray(item.keys)) return item.keys; + if (typeof item.keys === "function") return item.keys(); + return []; + } catch (e) { + return []; + } + }; + + const traverse = (group: any, node: any, depth: number, path: string) => { + if (depth > 20) { + node.truncated = true; + return; + } + node.attrs = getAttrs(group); + + const keys = getKeys(group); + + for (const key of keys) { + try { + const item = group.get(key); + if (!item) continue; + + const childPath = `${path}/${key}`; + const child: any = { + name: key, + path: childPath, + attrs: getAttrs(item), + }; + + const itemKeys = getKeys(item); + + if (itemKeys.length > 0) { + // It's a group with children + child.type = "group"; + child.children = []; + traverse(item, child, depth + 1, childPath); + } else { + // It's a dataset + child.type = "dataset"; + try { + child.shape = item.shape || []; + child.dtype = item.dtype || "unknown"; + // Read small scalar or 1D data + const totalElements = child.shape.reduce( + (a: number, b: number) => a * b, + 1 + ); + if ( + totalElements > 0 && + totalElements < 50 && + child.shape.length <= 1 + ) { + try { + const val = item.value; + if (val !== undefined && val !== null) { + child.value = val; + } + } catch (e) { + // Ignore read errors + } + } + } catch (e) { + child.dtype = "error"; + } + } + node.children.push(child); + } catch (e) { + console.log("Error reading key:", key, e); + } + } + }; + + traverse(f, tree, 0, ""); + return tree; + } catch (e: any) { + return { error: e.message }; + } +}; + +const formatHDF5Tree = (node: any, indent: number = 0): string => { + const pad = " ".repeat(indent); + let result = ""; + + if (node.type === "group") { + result += `${pad}📁 ${node.name}`; + const attrKeys = Object.keys(node.attrs || {}); + if (attrKeys.length > 0) { + const attrStr = attrKeys + .slice(0, 5) + .map((k) => { + const v = node.attrs[k]; + if (typeof v === "string") return `${k}="${v.slice(0, 30)}"`; + return k; + }) + .join(", "); + result += ` {${attrStr}${attrKeys.length > 5 ? "..." : ""}}`; + } + result += "\n"; + if (node.truncated) result += `${pad} ... (truncated)\n`; + const children = node.children || []; + for (const child of children) { + result += formatHDF5Tree(child, indent + 1); + } + } else { + result += `${pad}📊 ${node.name}`; + if (node.shape && node.shape.length) { + result += ` [${node.shape.join("×")}]`; + } + if (node.dtype) result += ` (${node.dtype})`; + if (node.value !== undefined) { + let valStr; + if (Array.isArray(node.value)) { + valStr = `[${node.value.slice(0, 5).join(", ")}${ + node.value.length > 5 ? "..." : "" + }]`; + } else { + valStr = String(node.value).slice(0, 50); + } + result += ` = ${valStr}`; + } + const attrKeys2 = Object.keys(node.attrs || {}); + if (attrKeys2.length > 0) { + result += ` {${attrKeys2.slice(0, 3).join(", ")}}`; + } + result += "\n"; + } + return result; +}; + +// parse dicom header +const parseDicomHeader = (buffer: ArrayBuffer): string => { + try { + const byteArray = new Uint8Array(buffer); + const dataSet = dicomParser.parseDicom(byteArray); + + const getString = (tag: string): string => { + try { + return dataSet.string(tag) || ""; + } catch { + return ""; + } + }; + + const patientID = getString("x00100020"); + const patientName = getString("x00100010"); + const patientSex = getString("x00100040"); + const patientAge = getString("x00101010"); + const studyDescription = getString("x00081030"); + const seriesDescription = getString("x0008103e"); + const modality = getString("x00080060"); + const manufacturer = getString("x00080070"); + const rows = getString("x00280010"); + const cols = getString("x00280011"); + + const lines = [`DICOM File`, `─`.repeat(50)]; + + if (modality) lines.push(`Modality: ${modality}`); + if (studyDescription) lines.push(`Study: ${studyDescription}`); + if (seriesDescription) lines.push(`Series: ${seriesDescription}`); + if (patientID) lines.push(`Patient ID: ${patientID}`); + if (patientName) lines.push(`Patient Name: ${patientName}`); + if (patientSex) lines.push(`Sex: ${patientSex}`); + if (patientAge) lines.push(`Age: ${patientAge}`); + if (manufacturer) lines.push(`Scanner: ${manufacturer}`); + if (rows && cols) lines.push(`Image Size: ${rows} × ${cols}`); + + return lines.join("\n"); + } catch (e: any) { + return `DICOM File\nSize: ${(buffer.byteLength / 1024).toFixed( + 2 + )} KB\nError reading header: ${e.message}`; + } +}; + +// Extract DOCX text content +const extractDOCXContent = async (buffer: ArrayBuffer): Promise => { + try { + const result = await mammoth.extractRawText({ arrayBuffer: buffer }); + const text = result.value; + return ( + text.slice(0, 5000) + (text.length > 5000 ? "\n... (truncated)" : "") + ); + } catch (error: any) { + return `Error extracting DOCX: ${error.message}`; + } +}; + +// Extract Excel content +const extractExcelContent = (buffer: ArrayBuffer): string => { + try { + const workbook = XLSX.read(buffer, { type: "array" }); + let text = `Excel: ${workbook.SheetNames.length} sheet(s)\n${"─".repeat( + 50 + )}`; + + // Process first 3 sheets + for (let i = 0; i < Math.min(workbook.SheetNames.length, 3); i++) { + const sheetName = workbook.SheetNames[i]; + const worksheet = workbook.Sheets[sheetName]; + const csv = XLSX.utils.sheet_to_csv(worksheet); + const lines = csv.split("\n").slice(0, 20); // First 20 rows + + text += `\n\n[Sheet: ${sheetName}]\n${lines.join("\n")}`; + } + + if (workbook.SheetNames.length > 3) { + text += `\n\n... (${ + workbook.SheetNames.length - 3 + } more sheets not shown)`; + } + + return text; + } catch (error: any) { + return `Error extracting Excel: ${error.message}`; + } +}; diff --git a/src/components/User/Dashboard/DatasetOrganizer/utils/filenameTokenizer.ts b/src/components/User/Dashboard/DatasetOrganizer/utils/filenameTokenizer.ts new file mode 100644 index 0000000..1c1820b --- /dev/null +++ b/src/components/User/Dashboard/DatasetOrganizer/utils/filenameTokenizer.ts @@ -0,0 +1,618 @@ +// src/components/DatasetOrganizer/utils/filenameTokenizer.ts +// Port of autobidsify's filename_tokenizer.py +// Philosophy: Python stats → dominant prefixes → subject IDs (no LLM needed for this part) +export interface SubjectRecord { + original_id: string; + numeric_id: string; + site: string | null; + pattern_name: string; + file_count: number; +} + +export interface SubjectAnalysis { + success: boolean; + method: string; + subject_records: SubjectRecord[]; + subject_count: number; + has_site_info: boolean; + variants_by_subject: Record; + python_generated_filename_rules: any[]; + id_mapping: { + id_mapping: Record; + reverse_mapping: Record; + strategy_used: string; + metadata_columns: string[]; + }; +} + +// Known neuroimaging terms to keep together (not split) +const NEUROIMAGING_TERMS = new Set([ + "T1w", + "T2w", + "T1", + "T2", + "PD", + "FLAIR", + "DWI", + "BOLD", +]); + +// Common words to exclude from dominant prefix detection +const COMMON_WORDS = new Set([ + "scan", + "data", + "file", + "image", + "sub", + "subject", + "patient", + "sample", + "test", + "experiment", +]); + +// ============================================================================ +// FilenameTokenizer — mirrors FilenameTokenizer class in filename_tokenizer.py +// ============================================================================ + +/** + * Advanced split: CamelCase + number boundaries + * "VHMCT" → ["VHM", "CT"] + * "CT1mm" → ["CT", "1", "mm"] + * "sub82352" → ["sub", "82352"] + */ +const splitAdvanced = (text: string): string[] => { + if (!text) return []; + + // Keep known neuroimaging terms together + if (NEUROIMAGING_TERMS.has(text)) return [text]; + + // Split on type boundaries: + // - Uppercase sequence before uppercase+lowercase: "VHM" before "CT" + // - CamelCase: uppercase followed by lowercase + // - Letter/digit boundaries + const pattern = /([A-Z]+(?=[A-Z][a-z]|\b|[0-9])|[A-Z][a-z]+|[a-z]+|[0-9]+)/g; + const tokens = text.match(pattern) || []; + return tokens.filter((t) => t.length > 0); +}; + +/** + * Tokenize a filename into meaningful tokens. + * Mirrors FilenameTokenizer.tokenize() in filename_tokenizer.py + * + * Examples: + * "VHMCT1mm-Hip (134).dcm" → ["VHM", "CT", "1", "mm", "Hip", "134"] + * "Beijing_sub82352" → ["Beijing", "sub", "82352"] + * "scan_001_T1w.nii" → ["scan", "001", "T1w"] + */ +export const tokenizeFilename = (filename: string): string[] => { + // Step 1: Remove all extensions (up to 6 chars) + let name = filename; + while (name.includes(".") && name.split(".").pop()!.length <= 6) { + name = name.substring(0, name.lastIndexOf(".")); + } + + // Step 2: Replace delimiters with spaces + for (const delim of ["_", "-", "(", ")", "[", "]", "{", "}", ",", ";"]) { + name = name.split(delim).join(" "); + } + + // Step 3: Split by spaces + const parts = name.split(/\s+/).filter((p) => p.length > 0); + + // Step 4: Advanced split each part + const tokens: string[] = []; + for (const part of parts) { + tokens.push(...splitAdvanced(part)); + } + + // Step 5: Filter empty + return tokens.filter((t) => t.trim().length >= 1); +}; + +// ============================================================================ +// FilenamePatternAnalyzer — mirrors FilenamePatternAnalyzer class +// ============================================================================ + +interface DominantPrefix { + prefix: string; + count: number; + percentage: number; +} + +interface TokenStatistics { + totalFiles: number; + tokenFrequency: Record; + prefixFrequency: Record; + dominantPrefixes: DominantPrefix[]; +} + +/** + * Find dominant prefixes — tokens appearing in >5% of files + * that are not common words. + * Mirrors FilenamePatternAnalyzer._find_dominant_prefixes() + */ +const findDominantPrefixes = ( + prefixCounter: Record, + totalFiles: number +): DominantPrefix[] => { + const threshold = totalFiles * 0.05; // 5% threshold + + return Object.entries(prefixCounter) + .filter(([prefix, count]) => { + if (count < threshold) return false; + if (COMMON_WORDS.has(prefix.toLowerCase())) return false; + return true; + }) + .sort((a, b) => b[1] - a[1]) + .slice(0, 20) + .map(([prefix, count]) => ({ + prefix, + count, + percentage: Math.round((count / totalFiles) * 1000) / 10, + })); +}; + +/** + * Analyze token statistics across all filenames. + * Mirrors FilenamePatternAnalyzer.analyze_token_statistics() + */ +export const analyzeTokenStatistics = ( + filenames: string[] +): TokenStatistics => { + const allTokens: Record = {}; + const prefixTokens: Record = {}; // first token only + + for (const filename of filenames) { + // Extract just filename from path + const fname = filename.includes("/") + ? filename.split("/").pop()! + : filename; + + const tokens = tokenizeFilename(fname); + + // Count all tokens + for (const token of tokens) { + allTokens[token] = (allTokens[token] || 0) + 1; + } + + // CRITICAL: use first TOKEN as prefix (not regex match) + if (tokens.length > 0) { + const firstToken = tokens[0]; + prefixTokens[firstToken] = (prefixTokens[firstToken] || 0) + 1; + } + } + + const dominantPrefixes = findDominantPrefixes(prefixTokens, filenames.length); + + return { + totalFiles: filenames.length, + tokenFrequency: allTokens, + prefixFrequency: prefixTokens, + dominantPrefixes, + }; +}; + +// ============================================================================ +// extractSubjectAnalysis — mirrors build_bids_plan()'s subject extraction +// ============================================================================ + +/** + * Full subject extraction mirroring autobidsify's judgment sequence: + * + * 1. Try directory structure patterns (sub-01, subject_01, site_sub01, 001) + * 2. If fails → try filename token statistics (dominant prefix approach) + * 3. Generate ID mapping (already_bids / numeric / semantic) + */ +// export const extractSubjectAnalysis = (allFiles: string[], userNSubjects?: number | null, dominantPrefixes?: { prefix: string; count: number; percentage: number }[]): SubjectAnalysis => { +// // ── Step 1: Try directory structure (mirrors _extract_subjects_from_directory_structure) +// const fromDir = extractFromDirectoryStructure(allFiles); +// if (fromDir && fromDir.subject_records.length > 0) { +// const idMapping = generateIdMapping(fromDir); +// return { ...fromDir, id_mapping: idMapping }; +// } + +// // ── Step 2: Filename token statistics (mirrors filename_tokenizer approach) +// const fromTokens = extractFromTokenStatistics(allFiles); +// if (fromTokens && fromTokens.subject_records.length > 0) { +// const idMapping = generateIdMapping(fromTokens); +// return { ...fromTokens, id_mapping: idMapping }; +// } + +// // ── Fallback: empty result +// return { +// success: false, +// method: "none", +// subject_records: [], +// subject_count: 0, +// has_site_info: false, +// variants_by_subject: {}, +// python_generated_filename_rules: [], +// id_mapping: { +// id_mapping: {}, +// reverse_mapping: {}, +// strategy_used: "none", +// metadata_columns: [], +// }, +// }; +// }; + +// ── Step 1: Directory structure patterns +// Mirrors _extract_subjects_from_directory_structure() in planner.py +// const extractFromDirectoryStructure = ( +// allFiles: string[] +// ): Omit | null => { +// const patterns: Array<[RegExp, boolean, number, number | null, string]> = [ +// [/^([A-Za-z]+)_sub(\d+)$/i, true, 2, 1, "site_prefixed"], +// [/^sub-(\w+)$/i, false, 1, null, "standard_bids"], +// [/^subject[_-]?(\d+)$/i, false, 1, null, "simple"], +// [/^(\d{3,})$/, false, 1, null, "numeric_only"], +// ]; + +// const subjectRecords: SubjectRecord[] = []; +// const seenIds = new Set(); + +// for (const filepath of allFiles) { +// const parts = filepath.split("/"); +// for (const part of parts.slice(0, 2)) { +// for (const [ +// regex, +// hasSite, +// idGroup, +// siteGroup, +// patternName, +// ] of patterns) { +// const match = part.match(regex); +// if (match) { +// const originalId = match[0]; +// if (seenIds.has(originalId)) break; +// seenIds.add(originalId); +// subjectRecords.push({ +// original_id: originalId, +// numeric_id: match[idGroup], +// site: hasSite && siteGroup ? match[siteGroup] : null, +// pattern_name: patternName, +// file_count: 0, +// }); +// break; +// } +// } +// } +// } + +// if (subjectRecords.length === 0) return null; + +// subjectRecords.sort((a, b) => { +// const na = parseInt(a.numeric_id) || 0; +// const nb = parseInt(b.numeric_id) || 0; +// return na - nb; +// }); + +// return { +// success: true, +// method: "directory_structure", +// subject_records: subjectRecords, +// subject_count: subjectRecords.length, +// has_site_info: subjectRecords.some((r) => r.site !== null), +// variants_by_subject: {}, +// python_generated_filename_rules: [], +// }; +// }; + +// ── Step 2: Token statistics (dominant prefix approach) +// Mirrors FilenamePatternAnalyzer + analyze_filenames_for_subjects() in filename_tokenizer.py +// const extractFromTokenStatistics = ( +// allFiles: string[] +// ): Omit | null => { +// // Extract just filenames (not full paths) — mirrors filename_tokenizer.py line: +// // filenames = [f.split('/')[-1] for f in all_files] +// const filenames = allFiles.map((f) => +// f.includes("/") ? f.split("/").pop()! : f +// ); + +// const stats = analyzeTokenStatistics(filenames); + +// if (stats.dominantPrefixes.length === 0) return null; + +// // Count files per prefix +// const prefixFileCounts: Record = {}; +// for (const filename of filenames) { +// const tokens = tokenizeFilename(filename); +// if (tokens.length > 0) { +// const first = tokens[0]; +// if (stats.dominantPrefixes.some((p) => p.prefix === first)) { +// prefixFileCounts[first] = (prefixFileCounts[first] || 0) + 1; +// } +// } +// } + +// const subjectRecords: SubjectRecord[] = stats.dominantPrefixes.map( +// (p, i) => ({ +// original_id: p.prefix, +// numeric_id: String(i + 1), +// site: null, +// pattern_name: "dominant_prefix", +// file_count: prefixFileCounts[p.prefix] || p.count, +// }) +// ); + +// return { +// success: true, +// method: "dominant_prefix_fallback", +// subject_records: subjectRecords, +// subject_count: subjectRecords.length, +// has_site_info: false, +// variants_by_subject: {}, +// python_generated_filename_rules: [], +// }; +// }; + +/** + * Mirrors _extract_numeric_id_from_identifier() in planner.py + * BZZ003 → "003", sub-01 → "01", patient021 → "021" + */ +const extractNumericIdFromIdentifier = (identifier: string): string | null => { + const numbers = identifier.match(/\d+/g); + if (!numbers) return null; + return numbers[numbers.length - 1]; // last numeric sequence, preserving leading zeros +}; + +// ── Step 1: Directory structure patterns +// Mirrors _extract_subjects_from_directory_structure() in planner.py +const extractFromDirectoryStructure = ( + allFiles: string[] +): Omit | null => { + const patterns: Array<[RegExp, boolean, number, number | null, string]> = [ + [/^([A-Za-z]+)_sub(\d+)$/i, true, 2, 1, "site_prefixed"], + [/^sub-(\w+)$/, false, 1, null, "standard_bids"], // directory named sub-01 + [/^subject[_-]?(\d+)$/i, false, 1, null, "simple"], + [/^\d{3,}$/, false, 1, null, "numeric_only"], // directory named 001 + ]; + + const subjectRecords: SubjectRecord[] = []; + const seenIds = new Set(); + + for (const filepath of allFiles) { + const parts = filepath.split("/"); + // Only check the first 2 path parts (directory levels), not the filename + // mirrors: for part in parts[:2] + const dirsOnly = parts.slice(0, Math.min(2, parts.length - 1)); // exclude filename + + for (const part of dirsOnly) { + for (const [ + regex, + hasSite, + idGroup, + siteGroup, + patternName, + ] of patterns) { + const match = part.match(regex); + if (match) { + const originalId = match[0]; + if (seenIds.has(originalId)) break; + seenIds.add(originalId); + subjectRecords.push({ + original_id: originalId, + numeric_id: match[idGroup], + site: hasSite && siteGroup ? match[siteGroup] : null, + pattern_name: patternName, + file_count: 0, + }); + break; + } + } + } + } + + if (subjectRecords.length === 0) return null; + + subjectRecords.sort((a, b) => { + const na = parseInt(a.numeric_id) || 0; + const nb = parseInt(b.numeric_id) || 0; + return na - nb; + }); + + return { + success: true, + method: "directory_structure", + subject_records: subjectRecords, + subject_count: subjectRecords.length, + has_site_info: subjectRecords.some((r) => r.site !== null), + variants_by_subject: {}, + python_generated_filename_rules: [], + }; +}; + +const DATA_EXTENSIONS = /\.(snirf|nii|nii\.gz|dcm|mat|nirs|h5|hdf5|edf|bdf)$/i; +const TRIO_FILENAMES = new Set([ + "dataset_description.json", + "participants.tsv", + "readme.md", + "readme.txt", + "readme.rst", + "readme", +]); +// ── Step 2: Flat filename identifier extraction +// Mirrors _extract_subjects_from_flat_filenames() in planner.py +// KEY DIFFERENCE from old version: uses base identifier (before first _) +// not tokenizer dominant prefixes +const extractFromFlatFilenames = ( + allFiles: string[] +): Omit | null => { + const identifierToFiles: Record = {}; + + for (const filepath of allFiles) { + const filename = filepath.split("/").pop()!; + + // Skip trio files + if (TRIO_FILENAMES.has(filename.toLowerCase())) continue; + // Skip non-data files (PDFs, docs, JSONs that aren't data) + if (!DATA_EXTENSIONS.test(filename)) continue; + // Remove extension(s): sub-01_ses-left2s_task-FRESHMOTOR_nirs.snirf → sub-01_ses-left2s_task-FRESHMOTOR_nirs + const nameNoExt = filename.replace(/(\.[^.]+)+$/, ""); + + // Extract base identifier — alphanumeric before first underscore + // sub-01_ses-left2s → sub-01 + // BZZ003_rest → BZZ003 + // VHMCT1mm-Hip → VHMCT1mm-Hip (no underscore, take full name) + const match = nameNoExt.match(/^([A-Za-z0-9\-]+)/); + if (match) { + const identifier = match[1]; + if (!identifierToFiles[identifier]) identifierToFiles[identifier] = []; + identifierToFiles[identifier].push(filepath); + } + } + + if (Object.keys(identifierToFiles).length === 0) return null; + + // Sort by extracted numeric ID if possible (mirrors sort_key in planner.py) + const sortedIdentifiers = Object.keys(identifierToFiles).sort((a, b) => { + const na = extractNumericIdFromIdentifier(a); + const nb = extractNumericIdFromIdentifier(b); + if (na && nb) return parseInt(na) - parseInt(nb); + return a.localeCompare(b); + }); + + const subjectRecords: SubjectRecord[] = sortedIdentifiers.map( + (identifier, i) => ({ + original_id: identifier, + numeric_id: String(i + 1), + site: null, + pattern_name: "filename_identifier", + file_count: identifierToFiles[identifier].length, + }) + ); + + return { + success: true, + method: "flat_filename_identifiers", + subject_records: subjectRecords, + subject_count: subjectRecords.length, + has_site_info: false, + variants_by_subject: {}, + python_generated_filename_rules: [], + }; +}; + +export const extractSubjectAnalysis = ( + allFiles: string[], + userNSubjects?: number | null, + dominantPrefixes?: { prefix: string; count: number; percentage: number }[] +): SubjectAnalysis => { + // Step 1: directory structure + let subjectInfo = extractFromDirectoryStructure(allFiles); + + // Step 2: flat filename fallback + if (!subjectInfo || subjectInfo.subject_records.length === 0) { + subjectInfo = extractFromFlatFilenames(allFiles); + } + + if (!subjectInfo || subjectInfo.subject_records.length === 0) { + return { + success: false, + method: "none", + subject_records: [], + subject_count: 0, + has_site_info: false, + variants_by_subject: {}, + python_generated_filename_rules: [], + id_mapping: { + id_mapping: {}, + reverse_mapping: {}, + strategy_used: "none", + metadata_columns: [], + }, + }; + } + + // ── CRITICAL validation: mirrors planner.py lines 190-215 + // If extracted count doesn't match user hint but dominant prefixes do, + // fall back to dominant prefixes (handles VHM/VHF body-part over-extraction) + const pythonCount = subjectInfo.subject_count; + if ( + userNSubjects && + pythonCount !== userNSubjects && + dominantPrefixes && + dominantPrefixes.length === userNSubjects + ) { + subjectInfo = { + success: true, + method: "dominant_prefix_fallback", + subject_records: dominantPrefixes.map((p, i) => ({ + original_id: p.prefix, + numeric_id: String(i + 1), + site: null, + pattern_name: "dominant_prefix", + file_count: p.count, + })), + subject_count: dominantPrefixes.length, + has_site_info: false, + variants_by_subject: {}, + python_generated_filename_rules: [], + }; + } + + const idMapping = generateIdMapping(subjectInfo); + return { ...subjectInfo, id_mapping: idMapping }; +}; + +// ── ID mapping — mirrors _generate_subject_id_mapping() in planner.py +const generateIdMapping = ( + subjectInfo: Omit +): SubjectAnalysis["id_mapping"] => { + const records = subjectInfo.subject_records; + const idMapping: Record = {}; + const reverseMapping: Record = {}; + + // Detect already-BIDS format (sub-01, sub-02...) + const allAlreadyBids = records.every((r) => /^sub-\w+$/i.test(r.original_id)); + + if (allAlreadyBids) { + for (const rec of records) { + const bidsId = rec.original_id.replace(/^sub-/i, ""); + idMapping[rec.original_id] = bidsId; + reverseMapping[bidsId] = rec.original_id; + } + return { + id_mapping: idMapping, + reverse_mapping: reverseMapping, + strategy_used: "already_bids", + metadata_columns: [], + }; + } + + // Numeric strategy: try to extract trailing numbers first + // BZZ003 → "003", patient021 → "021" (mirrors _extract_numeric_id_from_identifier) + const extractedNumbers: Record = {}; + for (const rec of records) { + const nums = rec.original_id.match(/\d+/g); + if (nums) extractedNumbers[rec.original_id] = nums[nums.length - 1]; + } + + const numericValues = Object.values(extractedNumbers); + const allUnique = new Set(numericValues).size === numericValues.length; + + if (Object.keys(extractedNumbers).length === records.length && allUnique) { + // Use extracted numeric IDs (preserving leading zeros) + for (const rec of records) { + const bidsId = extractedNumbers[rec.original_id]; + idMapping[rec.original_id] = bidsId; + reverseMapping[bidsId] = rec.original_id; + } + } else { + // Fall back to sequential numbering + for (let i = 0; i < records.length; i++) { + const orig = records[i].original_id; + const bidsId = String(i + 1); + idMapping[orig] = bidsId; + reverseMapping[bidsId] = orig; + } + } + + return { + id_mapping: idMapping, + reverse_mapping: reverseMapping, + strategy_used: "numeric", + metadata_columns: ["original_id"], + }; +}; diff --git a/src/components/User/Dashboard/DatasetOrganizer/utils/llmHelpers.ts b/src/components/User/Dashboard/DatasetOrganizer/utils/llmHelpers.ts new file mode 100644 index 0000000..3f2a6a1 --- /dev/null +++ b/src/components/User/Dashboard/DatasetOrganizer/utils/llmHelpers.ts @@ -0,0 +1,707 @@ +// src/components/DatasetOrganizer/utils/llmHelpers.ts +import { + categorizeFile, + detectModality, + getCountsByExtension, + getUserContextText, +} from "./fileAnalyzers"; +import { + extractSubjectAnalysis, + analyzeTokenStatistics, +} from "./filenameTokenizer"; +import { FileItem } from "redux/projects/types/projects.interface"; + +/** + * Build structured file summary for LLM + */ +export const buildFileSummary = (files: FileItem[]): string => { + let summary = ""; + + // Trio section — AI generated files only + const datasetDesc = files.find( + (f) => f.source === "ai" && f.name === "dataset_description.json" + ); + const readme = files.find((f) => f.source === "ai" && f.name === "README.md"); + const participants = files.find( + (f) => f.source === "ai" && f.name === "participants.tsv" + ); + + const hasTrioFiles = datasetDesc || readme || participants; + + if (hasTrioFiles) { + summary += "GENERATED BIDS METADATA FILES:\n"; + summary += "=".repeat(70) + "\n\n"; + + if (datasetDesc?.content) { + summary += "[dataset_description.json]:\n"; + summary += datasetDesc.content + "\n\n"; + } + + if (readme?.content) { + summary += "[README.md]:\n"; + summary += readme.content.slice(0, 1000) + "\n\n"; + } + + if (participants?.content) { + summary += "[participants.tsv]:\n"; + summary += participants.content + "\n\n"; + } + + summary += "=".repeat(70) + "\n\n"; + } + + // Data files section — user dropped files only + summary += "DATA FILES TO CONVERT:\n"; + summary += "=".repeat(70) + "\n"; + + const dataFiles = files.filter( + (f) => f.source === "user" && f.type === "file" + ); + + const formatLabel: Record = { + dicom: "format: DICOM → convert_to: nifti (dcm2niix)", + matlab: "format: MATLAB → convert_to: snirf", + homer3: "format: Homer3 → convert_to: snirf", + nifti: "format: NIfTI → format_ready: true", + hdf5: "format: SNIRF → format_ready: true", + }; + + // dataFiles.forEach((f) => { + // const category = categorizeFile(f); + // const fmt = formatLabel[f.fileType || ""] || ""; // add + // summary += ` - ${f.name} [${category}]`; + // if (fmt) summary += ` <${fmt}>`; // add + // if (f.sourcePath) summary += ` (${f.sourcePath})`; + // summary += "\n"; + // }); + const byType: Record = {}; + dataFiles.forEach((f) => { + const key = f.fileType || "other"; + if (!byType[key]) byType[key] = []; + byType[key].push(f); + }); + + Object.entries(byType).forEach(([type, typeFiles]) => { + const fmt = formatLabel[type] || ""; + const sample = typeFiles.slice(0, 5); + + summary += `\n[${type.toUpperCase()}] ${typeFiles.length} files total`; + if (fmt) summary += ` — ${fmt}`; + summary += "\n"; + + sample.forEach((f) => { + const category = categorizeFile(f); + summary += ` - ${f.name} [${category}]`; + if (f.sourcePath) summary += ` (${f.sourcePath})`; + summary += "\n"; + }); + + if (typeFiles.length > 5) { + summary += ` ... and ${typeFiles.length - 5} more ${type} files\n`; + } + }); + + return summary; +}; + +/** + * Analyze file patterns + */ +export const analyzeFilePatterns = (files: FileItem[]): string => { + const dataFiles = files.filter((f) => f.type === "file" && !f.isUserMeta); + const filenames = dataFiles.map((f) => f.name); + + const extensions = [ + ...new Set( + filenames.map((name) => { + const parts = name.toLowerCase().split("."); + return parts.length > 1 ? parts[parts.length - 1] : "none"; + }) + ), + ]; + + // Categorize files + const categorized: Record = { + anatomical: [], + functional: [], + diffusion: [], + other: [], + }; + + dataFiles.forEach((f) => { + const category = categorizeFile(f); + if (category.startsWith("anatomical")) { + categorized.anatomical.push(f.name); + } else if (category.startsWith("functional")) { + categorized.functional.push(f.name); + } else if (category.includes("diffusion")) { + categorized.diffusion.push(f.name); + } else { + categorized.other.push(f.name); + } + }); + + return ` +FILENAME ANALYSIS: +${"=".repeat(70)} +Total data files: ${dataFiles.length} +File types: ${extensions.join(", ")} + +File Categories: + Anatomical scans: ${categorized.anatomical.length} + Functional scans: ${categorized.functional.length} + Diffusion scans: ${categorized.diffusion.length} + Other files: ${categorized.other.length} + +Sample filenames (first 10): +${filenames + .slice(0, 10) + .map((name) => ` - ${name}`) + .join("\n")} +${ + filenames.length > 10 ? `\n ... and ${filenames.length - 10} more files` : "" +} +`; +}; + +/** + * Get user context (README, instructions, participant info) + */ +export const getUserContext = (files: FileItem[]): string => { + const userText = getUserContextText(files); + if (!userText) return "No user-provided context available."; + return `USER-PROVIDED CONTEXT:\n${"=".repeat(70)}\n${userText}`; +}; + +/** + * Get file annotations (notes) + */ +export const getFileAnnotations = (files: FileItem[]): string => { + const filesWithNotes = files.filter((f) => f.note); + if (filesWithNotes.length === 0) return ""; + + return ` +FILE ANNOTATIONS (User Notes): +${filesWithNotes.map((f) => ` ${f.name}: ${f.note}`).join("\n")} +`; +}; + +/** + * Download evidence JSON file + */ +export const downloadJSON = (data: any, filename: string) => { + const blob = new Blob([JSON.stringify(data, null, 2)], { + type: "application/json", + }); + const url = URL.createObjectURL(blob); + const a = document.createElement("a"); + a.href = url; + a.download = filename; + a.click(); + URL.revokeObjectURL(url); +}; + +/** + * Download text file(not using this function yet) + */ +export const downloadText = (text: string, filename: string) => { + const blob = new Blob([text], { type: "text/plain" }); + const url = URL.createObjectURL(blob); + const a = document.createElement("a"); + a.href = url; + a.download = filename; + a.click(); + URL.revokeObjectURL(url); +}; + +/** + * Build evidence bundle structure + */ +export const buildEvidenceBundle = ( + files: FileItem[], + baseDirectoryPath: string, + userOverrides?: { + nSubjects: number | null; + modalityHint: string; + describeText: string; + } +): any => { + const counts = getCountsByExtension(files); + const userText = getUserContextText(files); + + // add for samples ---start--- + const dataFiles = files.filter( + (f) => f.source === "user" && f.type === "file" + ); + + // Mirror autobidsify's _intelligent_file_sampling() + // Group by file type, take up to 5 samples per type + const samplesByType: Record = {}; + dataFiles.forEach((f) => { + const key = f.fileType || "other"; + if (!samplesByType[key]) samplesByType[key] = []; + if (samplesByType[key].length < 5) { + samplesByType[key].push(f); + } + }); + + const samples = Object.values(samplesByType) + .flat() + .map((f) => ({ + relpath: f.sourcePath || f.name, + filename: f.name, + suffix: f.name.split(".").pop() || "", + kind: f.fileType || "other", + size: 0, + })); + + // ----end--- + + // add this for subject_analysis.json + // const allFiles = files + // .filter((f) => f.source === "user" && f.type === "file") + // .map((f) => f.sourcePath || f.name); + const allFiles = files + .filter((f) => f.source === "user" && f.type === "file") + .map((f) => { + const path = f.sourcePath || f.name; + // Strip leading folder name — mirrors Python's relative-to-data_root paths + // "1-FRESH-Motor-snirf/sub-01_ses-..." → "sub-01_ses-..." + const parts = path.split("/"); + return parts.length > 1 ? parts.slice(1).join("/") : path; + }); + + const subjectAnalysis = extractSubjectAnalysis(allFiles); + // ← end + + // ── filename analysis (must come AFTER subjectAnalysis) + const justFilenames = allFiles.map((f) => + f.includes("/") ? f.split("/").pop()! : f + ); + const tokenStats = analyzeTokenStatistics(justFilenames); + const dominantCount = tokenStats.dominantPrefixes.length; + const userNSubjects = subjectAnalysis.subject_count || null; + let filenameConfidence: "high" | "medium" | "low" | "none" = "none"; + if (dominantCount > 0) { + if (userNSubjects && dominantCount === userNSubjects) + filenameConfidence = "high"; + else if (dominantCount >= 2 && dominantCount <= 10) + filenameConfidence = "medium"; + else filenameConfidence = "low"; + } + const filenameAnalysis = { + python_statistics: { + total_files: tokenStats.totalFiles, + token_frequency: tokenStats.tokenFrequency, + prefix_frequency: tokenStats.prefixFrequency, + dominant_prefixes: tokenStats.dominantPrefixes, + unique_token_count: Object.keys(tokenStats.tokenFrequency).length, + unique_prefix_count: Object.keys(tokenStats.prefixFrequency).length, + }, + confidence: filenameConfidence, + recommendation: buildFilenameRecommendation( + tokenStats.dominantPrefixes, + userNSubjects + ), + }; + + // subject count decision logic: + const finalSubjectCount = + userOverrides?.nSubjects ?? // user wins + subjectAnalysis.subject_count ?? + tokenStats.dominantPrefixes.length ?? + null; + + const participantEvidence = buildParticipantMetadataEvidence( + allFiles, + // pass the already-built documents array + files + .filter( + (f) => f.source === "user" && f.content && f.content.trim().length > 0 + ) + .map((f) => ({ + relpath: f.sourcePath || f.name, + filename: f.name, + content: f.content || "", + })) + ); + + return { + root: baseDirectoryPath, + counts_by_ext: counts, + samples, + all_files: allFiles, + filename_analysis: filenameAnalysis, // NEW + participant_metadata_evidence: participantEvidence, // NEW + subject_detection: { + method: "hybrid_analysis", + path_based_count: subjectAnalysis.subject_count, + path_based_confidence: subjectAnalysis.success ? "medium" : "none", + filename_based_count: tokenStats.dominantPrefixes.length, + filename_based_confidence: filenameConfidence, + final_count: finalSubjectCount, + count_source: + userOverrides?.nSubjects != null + ? "user_provided" + : subjectAnalysis.success + ? subjectAnalysis.method + : "filename_based", + best_pattern: subjectAnalysis.subject_records[0]?.pattern_name || "none", + }, + documents: files + .filter((f) => { + if (f.source !== "user") return false; // exclude AI files + if (!f.content || f.content.trim().length === 0) return false; + + // ✅ Text files - primary source + if (["text", "office", "meta"].includes(f.fileType || "")) return true; + + // ✅ NIfTI headers - useful for LLM to understand scan parameters + if (f.fileType === "nifti" && f.contentType === "nifti") return true; + + // ✅ HDF5/SNIRF structure - useful for fNIRS datasets + if (f.fileType === "hdf5" && f.contentType === "hdf5") return true; + + // ✅ NeuroJSON - already JSON text + if (f.fileType === "neurojsonText") return true; + + // ✅ Catch undefined fileType but has content (your current bug) + if (f.fileType === undefined && f.content) return true; + + return false; + }) + .map((f) => ({ + relpath: f.sourcePath || f.name, + filename: f.name, + type: f.fileType || "unknown", + content: f.content || "", + purpose: "experimental_protocol_or_metadata", + })), + user_hints: { + user_text: userText, + modality_hint: userOverrides?.modalityHint || detectModality(files), + n_subjects: finalSubjectCount, + }, + // subject_analysis: subjectAnalysis, + trio_found: { + "dataset_description.json": files.some( + (f) => f.source === "user" && f.name === "dataset_description.json" + ), + "README.md": files.some( + (f) => + f.source === "user" && + (f.name === "README.md" || + f.name === "README.txt" || + f.name === "README.rst" || + f.name === "readme.md") + ), + "participants.tsv": files.some( + (f) => f.source === "user" && f.name === "participants.tsv" + ), + }, + }; +}; + +const buildFilenameRecommendation = ( + dominantPrefixes: { prefix: string; count: number; percentage: number }[], + userNSubjects: number | null +): string => { + if (dominantPrefixes.length === 0) + return "No clear filename patterns detected. Recommend user describe subject identification."; + if (userNSubjects && dominantPrefixes.length === userNSubjects) { + const prefixStr = dominantPrefixes.map((p) => p.prefix).join(", "); + return `HIGH CONFIDENCE: Detected ${dominantPrefixes.length} dominant prefixes (${prefixStr}) matching user hint of ${userNSubjects} subjects.`; + } + if (dominantPrefixes.length >= 2 && dominantPrefixes.length <= 5) + return `MEDIUM CONFIDENCE: Detected ${dominantPrefixes.length} potential subject groups. Will send to LLM for validation.`; + return `LOW CONFIDENCE: Found ${dominantPrefixes.length} prefix patterns, which may or may not represent subjects. LLM will analyze.`; +}; + +const buildParticipantMetadataEvidence = ( + allFiles: string[], + documents: { relpath: string; filename: string; content: string }[] +): Record => { + const evidence: Record = {}; + + // Evidence 1: explicit metadata files + const metadataPatterns = [ + "participants", + "subjects", + "metadata", + "demographics", + "phenotype", + "participant_data", + "subject_info", + ]; + const metadataExts = [".csv", ".tsv", ".json", ".txt", ".xlsx"]; + const metadataFiles = allFiles.filter((f) => { + const fname = f.split("/").pop()!.toLowerCase(); + const ext = "." + fname.split(".").pop()!; + return ( + metadataPatterns.some((p) => fname.startsWith(p)) && + metadataExts.includes(ext) + ); + }); + evidence.explicit_metadata_files = + metadataFiles.length > 0 + ? { + found: true, + count: metadataFiles.length, + files: metadataFiles.map((f) => ({ + filename: f.split("/").pop(), + path: f, + })), + } + : { found: false }; + + // Evidence 2: DICOM headers (already extracted into documents content) + // Skip re-reading — not feasible client-side + + // Evidence 3: filename semantic patterns + const genderKws = [ + "male", + "female", + "_m_", + "_f_", + "_m.", + "_f.", + "VHM", + "VHF", + ]; + const groupKws = [ + "patient", + "control", + "healthy", + "hc", + "pt", + "ctrl", + "case", + ]; + const ageRegexes = [/\d{2}yo/, /\d{2}y\b/, /age\d{2}/, /y\d{2}/]; + + const genderHits: any[] = [], + groupHits: any[] = [], + ageHits: any[] = []; + for (const f of allFiles.slice(0, 200)) { + const fn = (f.split("/").pop() || "").toLowerCase(); + for (const kw of genderKws) { + if (fn.includes(kw.toLowerCase())) { + genderHits.push({ keyword: kw, filename: fn }); + break; + } + } + for (const kw of groupKws) { + if (fn.includes(kw)) { + groupHits.push({ keyword: kw, filename: fn }); + break; + } + } + for (const rx of ageRegexes) { + if (rx.test(fn)) { + ageHits.push({ pattern: rx.source, filename: fn }); + break; + } + } + } + const totalSemanticHints = + genderHits.length + groupHits.length + ageHits.length; + evidence.filename_semantic_patterns = + totalSemanticHints > 0 + ? { + found: true, + patterns: { + gender_keywords: genderHits.slice(0, 10), + group_keywords: groupHits.slice(0, 10), + age_patterns: ageHits.slice(0, 10), + }, + } + : { found: false }; + + // Evidence 4: demographic keywords in documents + const demoTerms = [ + "male", + "female", + "sex", + "gender", + "age", + "years old", + "patient", + "control", + "healthy", + "diagnosis", + "participants", + "subjects", + "volunteers", + "cohort", + "cadaver", + "adult", + "child", + ]; + const demoHits: any[] = []; + for (const doc of documents) { + const content = (doc.content || "").toLowerCase(); + const found: any[] = []; + for (const term of demoTerms) { + const idx = content.indexOf(term); + if (idx !== -1) { + const snippet = content + .slice(Math.max(0, idx - 100), idx + 100) + .trim() + .replace(/\s+/g, " "); + found.push({ term, context_snippet: snippet.slice(0, 200) }); + } + } + if (found.length > 0) + demoHits.push({ document: doc.filename, found_terms: found.slice(0, 5) }); + } + evidence.document_demographic_keywords = + demoHits.length > 0 + ? { + found: true, + documents_with_keywords: demoHits.length, + details: demoHits.slice(0, 5), + } + : { found: false }; + + // Evidence 5: balanced prefix distribution + const justFilenames = allFiles.map((f) => + f.includes("/") ? f.split("/").pop()! : f + ); + const tokenStats = analyzeTokenStatistics(justFilenames); + const dominant = tokenStats.dominantPrefixes; + if (dominant.length === 2) { + const [p1, p2] = dominant; + const ratio = + Math.min(p1.percentage, p2.percentage) / + Math.max(p1.percentage, p2.percentage); + if (ratio > 0.8) { + evidence.balanced_prefix_distribution = { + found: true, + prefix_1: p1.prefix, + prefix_1_percentage: p1.percentage, + prefix_1_count: p1.count, + prefix_2: p2.prefix, + prefix_2_percentage: p2.percentage, + prefix_2_count: p2.count, + distribution_ratio: Math.round(ratio * 100) / 100, + note: "Two balanced groups may indicate gender/group split", + }; + } else { + evidence.balanced_prefix_distribution = { found: false }; + } + } else { + evidence.balanced_prefix_distribution = { found: false }; + } + + const evidenceCount = Object.values(evidence).filter( + (v) => typeof v === "object" && v?.found === true + ).length; + evidence.summary = { + total_evidence_types_found: evidenceCount, + evidence_types: Object.entries(evidence) + .filter(([, v]) => typeof v === "object" && v?.found === true) + .map(([k]) => k), + }; + + return evidence; +}; + +/** + * Extract subject identifiers from file list + * Mirrors autobidsify's _extract_subjects_from_flat_filenames() + */ +export const extractSubjectsFromFiles = ( + files: FileItem[] +): { + subjects: { originalId: string; bidsId: string }[]; + strategy: string; +} => { + const dataFiles = files.filter( + (f) => f.source === "user" && f.type === "file" + ); + + // Count occurrences of each base identifier + const identifierCounts: Record = {}; + // dataFiles.forEach((f) => { + // const nameNoExt = f.name.replace(/\.[^/.]+$/, "").replace(/\.nii$/, ""); + // const match = nameNoExt.match(/^([A-Za-z0-9\-]+)/); + // if (match) { + // const id = match[1]; + // identifierCounts[id] = (identifierCounts[id] || 0) + 1; + // } + // }); + dataFiles.forEach((f) => { + const nameNoExt = f.name + .replace(/\.nii\.gz$/i, "") + .replace(/\.[^/.]+$/, "") + .replace(/\s*\([^)]*\)/, ""); // remove (309) etc. + + // Split on first digit sequence or underscore — take prefix only + // VHMCT1mm → VHMCT, sub-01 → sub-01, BZZ003 → BZZ + const match = nameNoExt.match(/^([A-Za-z]+(?:-[A-Za-z]+)*)/); + if (match) { + const id = match[1]; + identifierCounts[id] = (identifierCounts[id] || 0) + 1; + } + }); + + // Sort by frequency — most common identifiers are likely subjects + // const sorted = Object.entries(identifierCounts).sort((a, b) => b[1] - a[1]); + + // Step 2: Keep only identifiers that appear in multiple files + // (single-file identifiers are likely body parts, not subjects) + const totalFiles = dataFiles.length; + const threshold = Math.max(2, Math.floor(totalFiles * 0.05)); // at least 5% of files + + const filtered = Object.entries(identifierCounts) + .filter(([, count]) => count >= threshold) + .sort((a, b) => b[1] - a[1]); + + // If filtering leaves nothing, fall back to all identifiers + const candidates = + filtered.length > 0 + ? filtered + : Object.entries(identifierCounts).sort((a, b) => b[1] - a[1]); + // Step 3: Use numeric strategy for >10 subjects + const strategy = candidates.length > 10 ? "numeric" : "numeric"; + // const strategy = sorted.length > 10 ? "numeric" : "semantic"; + + // const subjects = sorted.map(([originalId], i) => ({ + // originalId, + // bidsId: + // strategy === "numeric" + // ? String(i + 1) + // : originalId.replace(/[^a-zA-Z0-9]/g, ""), + // })); + const subjects = candidates.map(([originalId], i) => ({ + originalId, + bidsId: String(i + 1), + })); + + return { subjects, strategy }; +}; + +export const buildIngestInfo = ( + baseDirectoryPath: string + // outputDir: string +): object => { + // Remove trailing slash if any + const cleanPath = baseDirectoryPath.replace(/\/$/, ""); + + // Get parent directory + const parentDir = cleanPath.substring(0, cleanPath.lastIndexOf("/")); + + // Append outputs: "/home/.../test3-web/outputs" + const outputDir = `${parentDir}/outputs`; + + return { + step: "ingest", + timestamp: new Date().toISOString(), + input_path: baseDirectoryPath, + input_type: "directory", + output_dir: outputDir, + staging_dir: null, + actual_data_path: baseDirectoryPath, // ← the key field executor uses + status: "complete", + }; +}; diff --git a/src/components/User/Dashboard/DatasetOrganizer/utils/llmPrompts.ts b/src/components/User/Dashboard/DatasetOrganizer/utils/llmPrompts.ts new file mode 100644 index 0000000..6d6a796 --- /dev/null +++ b/src/components/User/Dashboard/DatasetOrganizer/utils/llmPrompts.ts @@ -0,0 +1,580 @@ +// src/components/DatasetOrganizer/utils/llmPrompts.ts + +/** + * Prompt for dataset_description.json generation + * Based on auto-bidsify's PROMPT_TRIO_DATASET_DESC + */ +export const getDatasetDescriptionPrompt = ( + userText: string, + evidenceBundle?: any +): string => { + const documentsContext = + evidenceBundle?.documents + ?.map((d: any) => `[${d.filename}]:\n${d.content}`) + .join("\n\n") || ""; + return `You are a BIDS dataset_description.json generator. + + CRITICAL: Use the following user-provided content to extract dataset information! + + USER-PROVIDED CONTENT: + ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + ${userText || "(no readme/instructions provided)"} + ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + + ALL UPLOADED DOCUMENTS (search these for dataset name, authors, etc.): + ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + ${documentsContext || "(no documents)"} + ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + + Also consider the dataset folder name for clues about the dataset name: + File paths start with: ${evidenceBundle?.root || ""} + + CRITICAL RULES: + - Authors MUST be array: ["Name 1", "Name 2", "Name 3"] + - DO NOT include empty strings "" or empty arrays [] + - DO NOT use placeholders like "Extract" or "Dataset Name" + - Extract ACTUAL dataset name from content + - License: use "PD" if not specified, normalize "CC BY 4.0" to "CC-BY-4.0" + + Extract from user-provided content: + - Dataset name (look for study title, project name, experiment name) + - Authors/institutions mentioned + - Funding sources (if mentioned) + - License information + + Output ONLY valid JSON (no markdown fences, no explanations): + { + "Name": "Actual Dataset Name Here", + "BIDSVersion": "1.10.0", + "DatasetType": "raw", + "License": "PD", + "Authors": ["Actual Author Name"] + }`; +}; + +/** + * Prompt for README.md generation + * Based on auto-bidsify's PROMPT_TRIO_README + */ +export const getReadmePrompt = (userText: string): string => { + return `Generate a comprehensive BIDS README.md file. + + CRITICAL: Use the following user-provided content as the PRIMARY source! + + USER-PROVIDED CONTENT: + ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + ${userText} + ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + + Create a comprehensive README with these sections: + - ## Overview (extract from user content) + - ## Dataset Description (expand on user content) + - ## Data Acquisition (if information available) + - ## File Organization (describe BIDS structure) + - ## Usage Notes + - ## References (if mentioned in user content) + + Use the user-provided content to inform ALL sections. + Expand and structure the information, but stay true to the original content. + + OUTPUT: Direct Markdown text only (no JSON wrapper, no code fences)`; +}; + +/** + * Prompt for participants.tsv generation + * Based on auto-bidsify's PROMPT_TRIO_PARTICIPANTS + */ +export const getParticipantsPrompt = (userText: string): string => { + return `You are a BIDS participants.tsv column schema generator. + +USER-PROVIDED CONTENT: +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +${userText} +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + +YOUR JOB: Decide which columns belong in participants.tsv based ONLY on what is explicitly stated in the user content above. + +STRICT RULES: +- participant_id is ALWAYS required +- ONLY add columns for demographics EXPLICITLY mentioned in the content +- DO NOT invent age, sex, handedness, or any column not directly stated +- If no demographic info is mentioned, return ONLY participant_id + +Output ONLY valid JSON (no markdown fences, no explanation): +{ + "columns": [ + {"name": "participant_id", "required": true} + ] +} + +Examples: +- Content mentions "1 male, 1 female" → add {"name": "sex", "levels": ["M", "F"]} +- Content mentions "patients and controls" → add {"name": "group", "levels": ["patient", "control"]} +- Content mentions nothing about demographics → return only participant_id +`; +}; + +// export const getParticipantsPrompt = (userText: string): string => { +// return `Generate a BIDS participants.tsv file. + +// CRITICAL: Extract participant metadata from the following user-provided content! + +// USER-PROVIDED CONTENT: +// ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +// ${userText} +// ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + +// STRICT RULES: +// - First column MUST be "participant_id" +// - Use tab (\\t) as delimiter +// - ONLY include columns for data EXPLICITLY mentioned in the user content above +// - DO NOT invent or assume age, sex, handedness, or any other column unless it is directly stated in the content +// - If no demographic data is mentioned, output ONLY participant_id column +// - If only subject IDs are known, output the minimal form below + +// MINIMAL FORM (use this when no demographics are mentioned): +// participant_id +// sub-01 +// sub-02 + +// Extract participant information: +// - Subject IDs (look for "sub-01", "2 subjects", "participants: sub-01 and sub-02", etc.) +// - Demographics if available: +// - "1 male, 1 female" → sex column: M, F +// - "ages 25-65" → age column +// - "patients and controls" → group column +// - "right-handed" → handedness column + +// Rules: +// - First column MUST be "participant_id" +// - Use tab (\\t) as delimiter +// - Include only columns with actual data (no empty columns) +// - If only subject IDs known, output: participant_id\\nsub-01\\nsub-02 + +// Examples: +// - If text says "2 subjects: sub-01 and sub-02" with no demographics: +// participant_id +// sub-01 +// sub-02 + +// - If text says "sub-01 (25y, male), sub-02 (30y, female)": +// participant_id\\tage\\tsex +// sub-01\\t25\\tM +// sub-02\\t30\\tF + +// OUTPUT: Direct TSV text only (no JSON, no code fences, no markdown)`; +// }; + +/** + * Main prompt for BIDS conversion script generation + */ +export const getConversionScriptPrompt = ( + baseDirectoryPath: string, + fileSummary: string, + filePatterns: string, + userContext: string, + annotations: string +): string => { + return `You are a BIDS conversion expert specializing in neuroimaging data. + + ╔════════════════════════════════════════════════════════════════╗ + ║ TASK: Generate Python script to convert dataset to BIDS ║ + ╚════════════════════════════════════════════════════════════════╝ + + BASE DIRECTORY: ${baseDirectoryPath} + + ${fileSummary} + + ${filePatterns} + + ${userContext} + + ${annotations} + + CRITICAL FILE CATEGORIZATION RULES: + ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + Files are marked with categories. YOU MUST respect these categories: + + - [anatomical-T1w] → Goes to sub-XX/anat/ folder, rename to sub-XX_T1w.nii.gz + - [anatomical-T2w] → Goes to sub-XX/anat/ folder, rename to sub-XX_T2w.nii.gz + - [functional-bold] → Goes to sub-XX/func/ folder, rename to sub-XX_task-_run-XX_bold.nii.gz + - [functional-nirs] → Goes to sub-XX/func/ folder, rename to sub-XX_task-_nirs.snirf + - [anatomical-dicom] → Convert to NIfTI using dcm2niix, then goes to sub-XX/anat/ + - [diffusion] → Goes to sub-XX/dwi/ folder + - [fieldmap] → Goes to sub-XX/fmap/ folder + + FORMAT CONVERSION RULES: + ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + Some files require conversion before copying to BIDS: + + - + → Run: subprocess.run(['dcm2niix', '-o', dest_dir, '-f', bids_filename, src_file]) + → Output goes to sub-XX/anat/ + + - + → Use MNE-Python: mne.export.export_raw(dst, raw, fmt='snirf') + → OR note in script that manual conversion is needed + → Output goes to sub-XX/nirs/ + + - + → Same as MATLAB conversion above + → Output goes to sub-XX/nirs/ + + - + → Direct copy, no conversion needed + + - + → Direct copy, no conversion needed + + FILENAME-BASED DETECTION (if category unclear): + - Contains "task-" AND "bold" → ALWAYS functional (func/ folder) + - Contains "T1w" → ALWAYS anatomical (anat/ folder) + - Contains "T2w" OR "inplaneT2" → ALWAYS anatomical (anat/ folder) + - Ends with ".snirf" → ALWAYS functional (func/ folder) + - Ends with ".dcm" → ALWAYS needs dcm2niix conversion → anat/ folder + - Ends with ".mat" → ALWAYS needs snirf conversion → nirs/ folder + - Ends with ".nirs" → ALWAYS needs snirf conversion → nirs/ folder + + ⚠️ CRITICAL: NEVER put task-based files in anat/ folder! + ⚠️ CRITICAL: NEVER put T1w/T2w files in func/ folder! + + CRITICAL INSTRUCTIONS: + ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + + 1. The BIDS metadata files (dataset_description.json, README.md, participants.tsv) + have ALREADY been generated above. Your script MUST: + ✓ Use the EXACT content from dataset_description.json (copy it verbatim) + ✓ Use the EXACT participant IDs from participants.tsv + ✓ Write these files as-is to the BIDS directory + + 2. All file paths are RELATIVE to base directory: ${baseDirectoryPath} + When accessing files: os.path.join(base_dir, relative_path) + + Example: + File shown as: "Balloon Analog Risk-taking Task/sub-01_T1w.nii.gz" + Full path: os.path.join('${baseDirectoryPath}', 'Balloon Analog Risk-taking Task', 'sub-01_T1w.nii.gz') + + 3. BIDS directory structure to create: + bids_dataset/ + ├── dataset_description.json ← Write exact content from above + ├── README.md ← Write exact content from above + ├── participants.tsv ← Write exact content from above + └── sub-XX/ + ├── anat/ ← Anatomical scans only! + │ ├── sub-XX_T1w.nii.gz + │ └── sub-XX_T2w.nii.gz + └── func/ ← Functional scans only! + └── sub-XX_task-_run-XX_bold.nii.gz + + 4. For EACH data file, you must: + a) Extract subject ID from filename (e.g., "sub-01" from "sub-01_T1w.nii.gz") + b) Determine modality from file category: + - [anatomical-*] → modality = 'anat' + - [functional-*] → modality = 'func' + - [diffusion] → modality = 'dwi' + c) Construct source path including any parent folders + d) Create destination path: bids_dir/sub-XX/modality/new_filename + e) Copy the file + f) Create JSON sidecar (for imaging files) + + 5. Handle run numbers correctly: + - Functional scans often have run-01, run-02, run-03 + - Extract run number ONLY from files that have "_run-" in filename + - Anatomical scans typically don't have run numbers + + 6. Error handling: + - Wrap file operations in try-except + - Print progress messages + - Print errors but continue processing + + OUTPUT REQUIREMENTS: + ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + Generate a complete, runnable Python script that: + - Imports: os, shutil, json, pathlib + - Defines base_dir and bids_dir + - Creates BIDS directory structure (based on participants.tsv) + - Writes the three metadata files (exact content from above) + - Loops through data files and processes each one + - Includes clear comments explaining each step + - Has error handling and progress messages + + OUTPUT ONLY THE PYTHON SCRIPT (no markdown code fences, no explanations before or after).`; +}; + +/** + * Prompt for BIDSPlan.yaml generation + * Based on autobidsify's PROMPT_BIDS_PLAN + */ +export const getBIDSPlanPrompt = ( + fileSummary: string, + filePatterns: string, + userContext: string, + subjectInfo: { + subjects: { originalId: string; bidsId: string }[]; + strategy: string; + }, + countsByExt: Record, + sampleFiles: string, + evidenceBundle: any +): string => { + const subjectAnalysis = evidenceBundle.subject_analysis; + const assignmentRules = subjectInfo.subjects + .slice(0, 50) // cap at 50 + .map( + (s) => + `- match:\n - '*${s.originalId}*'\n original: ${s.originalId}\n subject: '${s.bidsId}'` + ) + .join("\n"); + + const subjectLabels = subjectInfo.subjects + .slice(0, 50) + .map((s) => ` - '${s.bidsId}'`) + .join("\n"); + + const participantMetadata = subjectInfo.subjects + .slice(0, 50) + .map((s) => ` '${s.bidsId}':\n original_id: ${s.originalId}`) + .join("\n"); + + const countsText = Object.entries(countsByExt) + .map(([ext, count]) => ` ${ext}: ${count} files`) + .join("\n"); + + const pythonSubjectAnalysisText = subjectAnalysis + ? `\nPYTHON SUBJECT ANALYSIS (for context only — do NOT re-detect subjects):\n${JSON.stringify( + { + method: subjectAnalysis.method, + subject_count: subjectAnalysis.subject_count, + subject_examples: (subjectAnalysis.subject_records || []) + .slice(0, 5) + .map((r: any) => ({ + original: r.original_id, + file_count: r.file_count, + })), + }, + null, + 2 + )}\n` + : ""; + + return `You are a BIDS dataset architect. Generate a BIDSPlan.yaml file. + +${fileSummary} + +${filePatterns} + +${userContext} + +${pythonSubjectAnalysisText} + +CONVERSION RULES (CRITICAL): +- .dcm → format_ready: false, convert_to: nifti, modality: mri +- .nii/.nii.gz → format_ready: true, convert_to: none, modality: mri +- .jnii/.bnii → format_ready: false, convert_to: nifti, modality: mri +- .mat → format_ready: false, convert_to: snirf, modality: nirs +- .nirs → format_ready: false, convert_to: snirf, modality: nirs +- .snirf → format_ready: true, convert_to: none, modality: nirs + +YOUR ONLY JOB: Generate the mappings section based on the file types present. +Copy assignment_rules, participant_metadata, and subjects sections EXACTLY as shown in the OUTPUT below. + +OUTPUT (Raw YAML only, no markdown, no explanation): + +assignment_rules: +${assignmentRules} + +FILE EXTENSION COUNTS (use these to determine which mappings to generate): +${countsText} + +SAMPLE FILENAMES (use these to determine correct bids_template and match_pattern): +${sampleFiles} + +MAPPINGS FORMAT (ONE entry per file extension, use glob patterns NOT individual filenames): + +Example 1 - DICOM: + mappings: + - modality: mri + match: ['*.dcm', '**/*.dcm'] + format_ready: false + convert_to: nifti + filename_rules: + - match_pattern: '.*' + bids_template: 'sub-X_T1w.nii.gz' + +Example 2 - fNIRS .mat: + mappings: + - modality: nirs + match: ['*.mat', '**/*.mat'] + format_ready: false + convert_to: snirf + filename_rules: + - match_pattern: '.*' + bids_template: 'sub-X_task-rest_nirs.snirf' + +Example 3 - Mixed: + mappings: + - modality: mri + match: ['*.nii.gz'] + format_ready: true + convert_to: none + filename_rules: + - match_pattern: '.*T1.*' + bids_template: 'sub-X_T1w.nii.gz' + - modality: nirs + match: ['*.mat'] + format_ready: false + convert_to: snirf + filename_rules: + - match_pattern: '.*' + bids_template: 'sub-X_task-rest_nirs.snirf' + +participant_metadata: +${participantMetadata} + +subjects: + count: ${subjectInfo.subjects.length} + id_strategy: ${subjectInfo.strategy} + labels: +${subjectLabels} + source: python_extracted`; +}; + +/** + * Prompt for BIDSPlan.yaml generation + * Mirrors autobidsify's PROMPT_BIDS_PLAN + build_bids_plan()'s optimized_bundle + */ +// export const getBIDSPlanPrompt = (evidenceBundle: any): string => { +// // ── Pull subject analysis from evidence bundle (generated by extractSubjectAnalysis) +// const subjectAnalysis = evidenceBundle.subject_analysis; +// const idMapping: Record = +// subjectAnalysis?.id_mapping?.id_mapping || {}; +// const subjectRecords: any[] = subjectAnalysis?.subject_records || []; + +// // ── Build assignment_rules (mirrors planner.py's _apply_python_rules_to_plan) +// const assignmentRules = subjectRecords +// .slice(0, 50) +// .map( +// (r) => +// `- match:\n - '*${r.original_id}*'\n original: ${ +// r.original_id +// }\n subject: '${idMapping[r.original_id] ?? r.numeric_id}'` +// ) +// .join("\n"); + +// // ── Build subjects section +// const subjectLabels = subjectRecords +// .slice(0, 50) +// .map((r) => ` - '${idMapping[r.original_id] ?? r.numeric_id}'`) +// .join("\n"); + +// // ── Build participant_metadata section +// const participantMetadata = subjectRecords +// .slice(0, 50) +// .map( +// (r) => +// ` '${idMapping[r.original_id] ?? r.numeric_id}':\n original_id: ${ +// r.original_id +// }` +// ) +// .join("\n"); + +// // ── Build file extension counts +// const countsText = Object.entries( +// evidenceBundle.counts_by_ext as Record +// ) +// .map(([ext, count]) => ` ${ext}: ${count} files`) +// .join("\n"); + +// // ── Build sample files (mirrors optimized_bundle.sample_files) +// const sampleFiles = +// (evidenceBundle.sample as Array<{ relpath: string }>) +// ?.map((s) => ` - ${s.relpath}`) +// .join("\n") ?? ""; + +// // ── Build python_subject_analysis block (mirrors planner.py's optimized_bundle) +// const subjectExamples = subjectRecords.slice(0, 10).map((r) => ({ +// original: r.original_id, +// bids_id: idMapping[r.original_id] ?? r.numeric_id, +// })); + +// const pythonSubjectAnalysis = JSON.stringify( +// { +// success: subjectAnalysis?.success ?? false, +// method: subjectAnalysis?.method ?? "none", +// subject_count: subjectAnalysis?.subject_count ?? 0, +// subject_examples: subjectExamples, +// id_mapping: subjectAnalysis?.id_mapping ?? {}, +// }, +// null, +// 2 +// ); + +// return `You are a BIDS dataset architect with complete decision-making authority. + +// ═══════════════════════════════════════════════════════════════════════ +// SUPPORTED FORMATS AND CONVERSION RULES (v10 - CRITICAL) +// ═══════════════════════════════════════════════════════════════════════ + +// MRI FORMATS (modality: mri): +// Input formats: +// • DICOM (.dcm) → Convert to NIfTI using dcm2niix +// • NIfTI (.nii, .nii.gz) → Already BIDS-ready, copy directly +// • JNIfTI (.jnii, .bnii) → Convert to NIfTI using jnifti_converter +// BIDS output: .nii.gz files only + +// fNIRS FORMATS (modality: nirs): +// Input formats: +// • SNIRF (.snirf) → Already BIDS-ready, copy directly +// • Homer3 (.nirs) → Convert to SNIRF +// • MATLAB (.mat) → Convert to SNIRF +// BIDS output: .snirf files only + +// FORMAT_READY AND CONVERT_TO RULES: +// format_ready: true → .nii/.nii.gz (MRI) or .snirf (fNIRS) — just copy +// format_ready: false → .dcm (convert_to: nifti), .jnii/.bnii (convert_to: nifti), +// .mat (convert_to: snirf), .nirs (convert_to: snirf) + +// CRITICAL: assignment_rules subject values must be BARE IDs (no 'sub-' prefix). +// ✓ subject: '1' ← correct +// ✗ subject: 'sub-1' ← wrong, executor adds sub- automatically + +// YOUR ONLY JOB: Generate the mappings section based on the file types present. +// Copy assignment_rules, participant_metadata, and subjects sections EXACTLY as shown below. + +// ═══════════════════════════════════════════════════════════════════════ +// PYTHON SUBJECT ANALYSIS (use this — do NOT re-detect subjects yourself) +// ═══════════════════════════════════════════════════════════════════════ +// ${pythonSubjectAnalysis} + +// FILE EXTENSION COUNTS: +// ${countsText} + +// SAMPLE FILE PATHS (use these for match patterns and bids_template): +// ${sampleFiles} + +// ═══════════════════════════════════════════════════════════════════════ +// OUTPUT (Raw YAML only, no markdown, no explanation) +// ═══════════════════════════════════════════════════════════════════════ + +// assignment_rules: +// ${assignmentRules} + +// mappings: +// - modality: mri # example — generate based on file types present +// match: ['*.dcm', '**/*.dcm'] +// format_ready: false +// convert_to: nifti +// filename_rules: +// - match_pattern: '.*' +// bids_template: 'sub-X_T1w.nii.gz' + +// participant_metadata: +// ${participantMetadata} + +// subjects: +// count: ${subjectAnalysis?.subject_count ?? 0} +// id_strategy: ${subjectAnalysis?.id_mapping?.strategy_used ?? "numeric"} +// labels: +// ${subjectLabels} +// source: python_extracted`; +// }; diff --git a/src/components/User/Dashboard/ProfileTab.tsx b/src/components/User/Dashboard/ProfileTab.tsx index 03debf2..d912eac 100644 --- a/src/components/User/Dashboard/ProfileTab.tsx +++ b/src/components/User/Dashboard/ProfileTab.tsx @@ -4,9 +4,28 @@ import { Business, CalendarToday, Verified, + Edit, + Save, + Cancel, } from "@mui/icons-material"; -import { Box, Typography, Grid, Paper, Chip, Divider } from "@mui/material"; -import React from "react"; +import { + Box, + Typography, + Grid, + Paper, + Chip, + Divider, + Button, + TextField, + CircularProgress, + Alert, +} from "@mui/material"; +import { Colors } from "design/theme"; +import { useAppDispatch } from "hooks/useAppDispatch"; +import { useAppSelector } from "hooks/useAppSelector"; +import React, { useState } from "react"; +import { updateProfile } from "redux/auth/auth.action"; +import { AuthSelector } from "redux/auth/auth.selector"; interface User { id: number; @@ -26,6 +45,18 @@ interface ProfileTabProps { } const ProfileTab: React.FC = ({ user }) => { + const dispatch = useAppDispatch(); + const { loading, error } = useAppSelector(AuthSelector); + + const [isEditing, setIsEditing] = useState(false); + const [editData, setEditData] = useState({ + firstName: user.firstName || "", + lastName: user.lastName || "", + company: user.company || "", + interests: user.interests || "", + }); + const [successMessage, setSuccessMessage] = useState(""); + const formatDate = (dateString?: string) => { if (!dateString) return "N/A"; return new Date(dateString).toLocaleDateString("en-US", { @@ -35,11 +66,121 @@ const ProfileTab: React.FC = ({ user }) => { }); }; + const handleEdit = () => { + setIsEditing(true); + setSuccessMessage(""); + }; + + const handleCancel = () => { + setIsEditing(false); + setEditData({ + firstName: user.firstName || "", + lastName: user.lastName || "", + company: user.company || "", + interests: user.interests || "", + }); + setSuccessMessage(""); + }; + + const handleChange = (e: React.ChangeEvent) => { + setEditData({ + ...editData, + [e.target.name]: e.target.value, + }); + }; + + const handleSave = async () => { + try { + await dispatch(updateProfile(editData)).unwrap(); + setIsEditing(false); + setSuccessMessage("Profile updated successfully!"); + setTimeout(() => setSuccessMessage(""), 3000); + } catch (error) { + console.error("Error updating profile:", error); + } + }; + return ( - - Profile Information - + {/* Header with Edit Button */} + + Profile Information + {!isEditing ? ( + + ) : ( + + + + + )} + + + {/* Success Message */} + {successMessage && ( + + {successMessage} + + )} + + {/* Error Message */} + {error && ( + + {error} + + )} @@ -85,52 +226,119 @@ const ProfileTab: React.FC = ({ user }) => { {/* First Name */} - {user.firstName && ( - - - First Name - + + + First Name + + {isEditing ? ( + + ) : ( {user.firstName} - - )} - + )} + {/* Last Name */} - {user.lastName && ( - - - Last Name - + + + Last Name + + {isEditing ? ( + + ) : ( {user.lastName} - - )} + )} + {/* Company */} - {user.company && ( - - - - - - Company/Institution - + + + + + + Company/Institution + + {isEditing ? ( + + ) : ( {user.company} - + )} - - )} + + {/* Interests */} - {user.interests && ( - - - Research Interests - + + + Research Interests + + {isEditing ? ( + + ) : ( - {user.interests} + {user.interests || "Not specified"} - - )} - + )} + @@ -163,15 +371,6 @@ const ProfileTab: React.FC = ({ user }) => { )} - - {/* Note about editing */} - - To update your profile information, please contact support. - ); }; diff --git a/src/components/User/Dashboard/ProjectsTab.tsx b/src/components/User/Dashboard/ProjectsTab.tsx new file mode 100644 index 0000000..6095c9b --- /dev/null +++ b/src/components/User/Dashboard/ProjectsTab.tsx @@ -0,0 +1,612 @@ +// src/components/Dashboard/ProjectsTab.tsx +import { FolderOpen, Add, Delete, Edit, Visibility } from "@mui/icons-material"; +import { + Box, + Typography, + Paper, + CircularProgress, + Alert, + List, + ListItem, + ListItemText, + Divider, + Button, + Chip, + IconButton, + Dialog, + DialogTitle, + DialogContent, + DialogActions, + TextField, +} from "@mui/material"; +import { Colors } from "design/theme"; +import { useAppDispatch } from "hooks/useAppDispatch"; +import { useAppSelector } from "hooks/useAppSelector"; +import React, { useEffect, useState } from "react"; +import { useNavigate } from "react-router-dom"; +import { + getUserProjects, + createProject, + deleteProject, + updateProject, +} from "redux/projects/projects.action"; +import { + selectUserProjects, + selectProjectsLoading, + selectProjectsError, + selectIsCreatingProject, +} from "redux/projects/projects.selector"; + +interface ProjectsTabProps { + userId: number; +} + +const ProjectsTab: React.FC = ({ userId }) => { + const dispatch = useAppDispatch(); + const navigate = useNavigate(); + + const projects = useAppSelector(selectUserProjects); + const loading = useAppSelector(selectProjectsLoading); + const error = useAppSelector(selectProjectsError); + const isCreating = useAppSelector(selectIsCreatingProject); + + const [createDialogOpen, setCreateDialogOpen] = useState(false); + const [newProjectName, setNewProjectName] = useState(""); + const [newProjectDescription, setNewProjectDescription] = useState(""); + const [deleteDialogOpen, setDeleteDialogOpen] = useState(false); + const [projectToDelete, setProjectToDelete] = useState<{ + id: string; // ← was number + name: string; + } | null>(null); + const [editDialogOpen, setEditDialogOpen] = useState(false); + const [editingProject, setEditingProject] = useState<{ + id: string; // ← was number + name: string; + description: string; + } | null>(null); + + useEffect(() => { + dispatch(getUserProjects()); + }, [dispatch]); + + const handleViewProject = (publicId: string) => { + // ← was number + navigate(`/projects/${publicId}`); + }; + + const handleCreateOpen = () => { + setCreateDialogOpen(true); + }; + + const handleCreateClose = () => { + setNewProjectName(""); + setNewProjectDescription(""); + setCreateDialogOpen(false); + }; + + const handleCreateSubmit = async () => { + if (!newProjectName.trim()) return; + + try { + await dispatch( + createProject({ + name: newProjectName.trim(), + description: newProjectDescription.trim() || undefined, + }) + ).unwrap(); + + handleCreateClose(); + // Refetch projects after create + dispatch(getUserProjects()); + } catch (error) { + console.error("Error creating project:", error); + } + }; + + const handleDeleteClick = (publicId: string, projectName: string) => { + // ← was number + setProjectToDelete({ id: publicId, name: projectName }); + setDeleteDialogOpen(true); + }; + + const handleDeleteConfirm = async () => { + if (!projectToDelete) return; + + try { + await dispatch(deleteProject({ projectId: projectToDelete.id })).unwrap(); + setDeleteDialogOpen(false); + setProjectToDelete(null); + + // Refetch projects after delete + dispatch(getUserProjects()); + } catch (error) { + console.error("Error deleting project:", error); + } + }; + + const handleDeleteCancel = () => { + setDeleteDialogOpen(false); + setProjectToDelete(null); + }; + + const handleEditClick = (project: any) => { + setEditingProject({ + id: project.public_id, // ← was project.id + name: project.name, + description: project.description || "", + }); + setEditDialogOpen(true); + }; + + const handleEditSubmit = async () => { + if (!editingProject || !editingProject.name.trim()) return; + + try { + await dispatch( + updateProject({ + projectId: editingProject.id, + name: editingProject.name.trim(), + description: editingProject.description.trim() || undefined, + }) + ).unwrap(); + + // Refetch projects + dispatch(getUserProjects()); + + handleEditClose(); + } catch (error) { + console.error("Error updating project:", error); + } + }; + + const handleEditClose = () => { + setEditDialogOpen(false); + setEditingProject(null); + }; + + const formatDate = (dateString: string) => { + return new Date(dateString).toLocaleDateString("en-US", { + year: "numeric", + month: "short", + day: "numeric", + }); + }; + + if (loading && projects.length === 0) { + return ( + + + + ); + } + + if (error) { + return ( + + {error} + + ); + } + + return ( + + {/* Header with Create Button */} + + + + Dataset Organizer Projects + + + Organize and convert your neuroimaging datasets to BIDS format + + + + + + {/* Empty State */} + {projects.length === 0 ? ( + + + + No Projects Yet + + + Create a project to start organizing your neuroimaging datasets + + + + ) : ( + // Projects List + + + {projects.map((project, index) => ( + + {index > 0 && } + + + + + + {project.name} + + + + } + secondary={ + <> + {project.description && ( + + {project.description} + + )} + + Created {formatDate(project.created_at)} + + + } + /> + + + handleEditClick(project)} + sx={{ + color: Colors.purple, + "&:hover": { + backgroundColor: "rgba(128, 90, 213, 0.1)", + }, + }} + > + + + + handleDeleteClick(project.public_id, project.name) // ← was project.id + } + sx={{ + color: Colors.rose, + "&:hover": { + backgroundColor: "rgba(211, 47, 47, 0.1)", + }, + }} + > + + + + + + ))} + + + )} + + {/* Create Project Dialog */} + + + Create New Project + + + setNewProjectName(e.target.value)} + sx={{ + mb: 2, + mt: 1, + "& .MuiInputLabel-root.Mui-focused": { + color: Colors.purple, + }, + "& .MuiOutlinedInput-root.Mui-focused .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + "& .MuiOutlinedInput-root:hover .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + }} + /> + setNewProjectDescription(e.target.value)} + sx={{ + "& .MuiInputLabel-root.Mui-focused": { + color: Colors.purple, + }, + "& .MuiOutlinedInput-root.Mui-focused .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + "& .MuiOutlinedInput-root:hover .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + }} + /> + + + + + + + + {/* Delete Confirmation Dialog */} + + + Delete Project? + + + + Are you sure you want to delete "{projectToDelete?.name}"? + + + This will permanently delete the project and all its data. + + + + + + + + + {/* Edit Project Dialog */} + + + Edit Project + + + + setEditingProject( + editingProject + ? { ...editingProject, name: e.target.value } + : null + ) + } + sx={{ + mb: 2, + mt: 1, + "& .MuiInputLabel-root.Mui-focused": { + color: Colors.purple, + }, + "& .MuiOutlinedInput-root.Mui-focused .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + "& .MuiOutlinedInput-root:hover .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + }} + /> + + setEditingProject( + editingProject + ? { ...editingProject, description: e.target.value } + : null + ) + } + sx={{ + mb: 2, + "& .MuiInputLabel-root.Mui-focused": { + color: Colors.purple, + }, + "& .MuiOutlinedInput-root.Mui-focused .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + "& .MuiOutlinedInput-root:hover .MuiOutlinedInput-notchedOutline": + { + borderColor: Colors.purple, + }, + }} + /> + + + + + + + + ); +}; + +export default ProjectsTab; diff --git a/src/components/User/Dashboard/SavedDatasetsTab.tsx b/src/components/User/Dashboard/SavedDatasetsTab.tsx new file mode 100644 index 0000000..df819fe --- /dev/null +++ b/src/components/User/Dashboard/SavedDatasetsTab.tsx @@ -0,0 +1,153 @@ +import { Bookmark, Visibility } from "@mui/icons-material"; +import { + Box, + Typography, + Paper, + CircularProgress, + Alert, + List, + ListItem, + ListItemText, + Divider, + Button, + Chip, +} from "@mui/material"; +import { Colors } from "design/theme"; +import { useAppDispatch } from "hooks/useAppDispatch"; +import { useAppSelector } from "hooks/useAppSelector"; +import React, { useEffect } from "react"; +import { useNavigate } from "react-router-dom"; +import { getUserSavedDatasets } from "redux/activities/activities.action"; +import { + selectUserSavedDatasets, + selectActivitiesLoading, + selectActivitiesError, +} from "redux/activities/activities.selector"; + +interface SavedDatasetsTabProps { + userId: number; +} + +const SavedDatasetsTab: React.FC = ({ userId }) => { + const dispatch = useAppDispatch(); + const navigate = useNavigate(); + + // Get real data from Redux + const savedDatasets = useAppSelector(selectUserSavedDatasets); + const loading = useAppSelector(selectActivitiesLoading); + const error = useAppSelector(selectActivitiesError); + + useEffect(() => { + dispatch(getUserSavedDatasets()); + }, [dispatch]); + + const handleViewDataset = (dbName: string, datasetId: string) => { + navigate(`/db/${dbName}/${datasetId}`); + }; + + const formatDate = (dateString: string) => { + return new Date(dateString).toLocaleDateString("en-US", { + year: "numeric", + month: "short", + day: "numeric", + }); + }; + + if (loading) { + return ( + + + + ); + } + + if (error) { + return ( + + {error} + + ); + } + + if (savedDatasets.length === 0) { + return ( + + + + No Saved Datasets + + + Datasets you save will appear here + + + ); + } + + return ( + + + Saved Datasets + + + You have {savedDatasets.length} saved{" "} + {savedDatasets.length === 1 ? "dataset" : "datasets"} + + + + + {savedDatasets.map((dataset, index) => ( + + {index > 0 && } + + + + + {dataset.ds_id} + + + + } + secondary={`Saved on ${formatDate(dataset.saved_at)}`} + /> + + + + + ))} + + + + ); +}; + +export default SavedDatasetsTab; diff --git a/src/components/User/Dashboard/likedDatasetsTab.tsx b/src/components/User/Dashboard/likedDatasetsTab.tsx new file mode 100644 index 0000000..566fa0a --- /dev/null +++ b/src/components/User/Dashboard/likedDatasetsTab.tsx @@ -0,0 +1,153 @@ +import { Favorite, Visibility } from "@mui/icons-material"; +import { + Box, + Typography, + Paper, + CircularProgress, + Alert, + List, + ListItem, + ListItemText, + Divider, + Button, + Chip, +} from "@mui/material"; +import { Colors } from "design/theme"; +import { useAppDispatch } from "hooks/useAppDispatch"; +import { useAppSelector } from "hooks/useAppSelector"; +import React, { useEffect } from "react"; +import { useNavigate } from "react-router-dom"; +import { getUserLikedDatasets } from "redux/activities/activities.action"; +import { + selectUserLikedDatasets, + selectActivitiesLoading, + selectActivitiesError, +} from "redux/activities/activities.selector"; + +interface LikedDatasetsTabProps { + userId: number; +} + +const LikedDatasetsTab: React.FC = ({ userId }) => { + const dispatch = useAppDispatch(); + const navigate = useNavigate(); + + // Get real data from Redux + const likedDatasets = useAppSelector(selectUserLikedDatasets); + const loading = useAppSelector(selectActivitiesLoading); + const error = useAppSelector(selectActivitiesError); + + useEffect(() => { + dispatch(getUserLikedDatasets()); + }, [dispatch]); + + const handleViewDataset = (dbName: string, datasetId: string) => { + navigate(`/db/${dbName}/${datasetId}`); + }; + + const formatDate = (dateString: string) => { + return new Date(dateString).toLocaleDateString("en-US", { + year: "numeric", + month: "short", + day: "numeric", + }); + }; + + if (loading) { + return ( + + + + ); + } + + if (error) { + return ( + + {error} + + ); + } + + if (likedDatasets.length === 0) { + return ( + + + + No Liked Datasets + + + Datasets you like will appear here + + + ); + } + + return ( + + + Liked Datasets + + + You have liked {likedDatasets.length}{" "} + {likedDatasets.length === 1 ? "dataset" : "datasets"} + + + + + {likedDatasets.map((dataset, index) => ( + + {index > 0 && } + + + + + {dataset.ds_id} + + + + } + secondary={`Liked on ${formatDate(dataset.liked_at)}`} + /> + + + + + ))} + + + + ); +}; + +export default LikedDatasetsTab; diff --git a/src/components/User/UserDashboard.tsx b/src/components/User/UserDashboard.tsx index 6759fb5..21d4e53 100644 --- a/src/components/User/UserDashboard.tsx +++ b/src/components/User/UserDashboard.tsx @@ -1,6 +1,17 @@ +import CollectionsTab from "./Dashboard/CollectionsTab"; import ProfileTab from "./Dashboard/ProfileTab"; +import ProjectsTab from "./Dashboard/ProjectsTab"; +import SavedDatasetsTab from "./Dashboard/SavedDatasetsTab"; import SecurityTab from "./Dashboard/SecurityTab"; -import { AccountCircle, Lock, Settings } from "@mui/icons-material"; +import LikedDatasetsTab from "./Dashboard/likedDatasetsTab"; +import { + AccountCircle, + Lock, + Settings, + Bookmark, + Favorite, + FolderOpen, +} from "@mui/icons-material"; import { Box, Container, @@ -96,6 +107,8 @@ const UserDashboard: React.FC = () => { value={tabValue} onChange={handleTabChange} aria-label="dashboard tabs" + variant="scrollable" + scrollButtons="auto" sx={{ borderBottom: 1, borderColor: "divider", @@ -122,6 +135,24 @@ const UserDashboard: React.FC = () => { id="dashboard-tab-1" aria-controls="dashboard-tabpanel-1" /> + } + label="Collections" + id="dashboard-tab-2" + aria-controls="dashboard-tabpanel-2" + /> + } + label="Liked" + id="dashboard-tab-3" + aria-controls="dashboard-tabpanel-3" + /> + } + label="Projects" + id="dashboard-tab-3" + aria-controls="dashboard-tabpanel-3" + /> } label="Settings" @@ -136,6 +167,15 @@ const UserDashboard: React.FC = () => { + + + + + + + + + ); diff --git a/src/design/theme.ts b/src/design/theme.ts index 465682b..5a02123 100644 --- a/src/design/theme.ts +++ b/src/design/theme.ts @@ -1,5 +1,3 @@ -// import { orange, purple } from "@mui/material/colors"; -import { lightGreen } from "@mui/material/colors"; import { createTheme } from "@mui/material/styles"; const primary = { @@ -37,6 +35,7 @@ export const Colors = { blue: "#1976d2", lightBlue: "#e8f0fe", pink: "#FF69B4", + rose: "#EC4899", purpleGrey: "#99aff0", primary, secondary, diff --git a/src/hooks/useSessionPoller.ts b/src/hooks/useSessionPoller.ts new file mode 100644 index 0000000..81f9cb6 --- /dev/null +++ b/src/hooks/useSessionPoller.ts @@ -0,0 +1,43 @@ +import { useAppDispatch } from "./useAppDispatch"; +import { useAppSelector } from "./useAppSelector"; +import { useEffect, useRef } from "react"; +import { getCurrentUser } from "redux/auth/auth.action"; + +const POLL_INTERVAL_MS = 5 * 60 * 1000; // 5 minutes + +export const useSessionPoller = () => { + const dispatch = useAppDispatch(); + const isLoggedIn = useAppSelector((state) => state.auth.isLoggedIn); + const intervalRef = useRef | null>(null); + + // Interval polling + useEffect(() => { + if (!isLoggedIn) { + if (intervalRef.current) clearInterval(intervalRef.current); + return; + } + + intervalRef.current = setInterval(() => { + dispatch(getCurrentUser()); + }, POLL_INTERVAL_MS); + + return () => { + if (intervalRef.current) clearInterval(intervalRef.current); + }; + }, [isLoggedIn, dispatch]); + + // Immediate check when user returns to the tab + useEffect(() => { + if (!isLoggedIn) return; + + const handleVisibilityChange = () => { + if (document.visibilityState === "visible") { + dispatch(getCurrentUser()); + } + }; + + document.addEventListener("visibilitychange", handleVisibilityChange); + return () => + document.removeEventListener("visibilitychange", handleVisibilityChange); + }, [isLoggedIn, dispatch]); +}; diff --git a/src/pages/UpdatedDatasetDetailPage.tsx b/src/pages/UpdatedDatasetDetailPage.tsx index 82768bd..52f87ca 100644 --- a/src/pages/UpdatedDatasetDetailPage.tsx +++ b/src/pages/UpdatedDatasetDetailPage.tsx @@ -18,6 +18,7 @@ import { Tooltip, IconButton, } from "@mui/material"; +import DatasetActions from "components/DatasetDetailPage/DatasetAction"; import FileTree from "components/DatasetDetailPage/FileTree/FileTree"; import { buildTreeFromDoc, @@ -30,14 +31,44 @@ import { Colors } from "design/theme"; import { useAppDispatch } from "hooks/useAppDispatch"; import { useAppSelector } from "hooks/useAppSelector"; import React, { useEffect, useMemo, useState, useRef } from "react"; -// import ReactJson from "react-json-view"; import { useParams, useNavigate, useSearchParams } from "react-router-dom"; +import { + likeDataset, + unlikeDataset, + saveDataset, + unsaveDataset, + getDatasetStats, + checkUserActivity, +} from "redux/activities/activities.action"; +import { + selectIsDatasetLiked, + selectIsDatasetSaved, + selectDatasetLikesCount, + selectDatasetViewsCount, + selectIsLikeLoading, + selectIsSaveLoading, +} from "redux/activities/activities.selector"; +import { AuthSelector } from "redux/auth/auth.selector"; +import { + getUserCollections, + createCollection, + addDatasetToCollection, + removeDatasetFromCollection, + getDatasetCollections, +} from "redux/collections/collections.action"; +import { + selectUserCollections, + selectDatasetCollections, + selectIsCreatingCollection, + selectIsAddingToCollection, + selectCollectionsLoading, +} from "redux/collections/collections.selector"; import { fetchDocumentDetails, fetchDbInfoByDatasetId, } from "redux/neurojson/neurojson.action"; import { NeurojsonSelector } from "redux/neurojson/neurojson.selector"; -import { NeurojsonService } from "services/neurojson.service"; +// import { NeurojsonService } from "services/neurojson.service"; import RoutesEnum from "types/routes.enum"; interface ExternalDataLink { @@ -66,6 +97,167 @@ const UpdatedDatasetDetailPage: React.FC = () => { error, datasetViewInfo: dbViewInfo, } = useAppSelector(NeurojsonSelector); + // user auth state + const { isLoggedIn: isAuthenticated } = useAppSelector(AuthSelector); + + // Collections state + const userCollections = useAppSelector(selectUserCollections); + const datasetCollections = useAppSelector(selectDatasetCollections); + const isCreatingCollection = useAppSelector(selectIsCreatingCollection); + const isAddingToCollection = useAppSelector(selectIsAddingToCollection); + const isLoadingCollections = useAppSelector(selectCollectionsLoading); + + // activities state - with safe defaults + const isLiked = useAppSelector((state) => + dbName && docId ? selectIsDatasetLiked(state, dbName, docId) : false + ); + const isSaved = useAppSelector((state) => + dbName && docId ? selectIsDatasetSaved(state, dbName, docId) : false + ); + const likesCount = useAppSelector((state) => + dbName && docId ? selectDatasetLikesCount(state, dbName, docId) : 0 + ); + const viewsCount = useAppSelector((state) => + dbName && docId ? selectDatasetViewsCount(state, dbName, docId) : 0 + ); + const isLikeLoading = useAppSelector((state) => + dbName && docId ? selectIsLikeLoading(state, dbName, docId) : false + ); + const isSaveLoading = useAppSelector((state) => + dbName && docId ? selectIsSaveLoading(state, dbName, docId) : false + ); + + // Handle like/unlike + const handleLikeToggle = async () => { + if (!dbName || !docId) return; + + try { + if (isLiked) { + await dispatch(unlikeDataset({ dbName, datasetId: docId })).unwrap(); + } else { + await dispatch(likeDataset({ dbName, datasetId: docId })).unwrap(); + } + // Refresh stats after like/unlike + dispatch(getDatasetStats({ dbName, datasetId: docId })); + } catch (error) { + console.error("Error toggling like:", error); + } + }; + + // Handle save/unsave + // const handleSaveToggle = async () => { + // if (!dbName || !docId) return; + + // try { + // if (isSaved) { + // await dispatch(unsaveDataset({ dbName, datasetId: docId })).unwrap(); + // } else { + // await dispatch(saveDataset({ dbName, datasetId: docId })).unwrap(); + // } + // } catch (error) { + // console.error("Error toggling save:", error); + // } + // }; + + // useEffect to load user activity status and stats + useEffect(() => { + if (!dbName || !docId) return; + + // Fetch stats (views count, likes count) + dispatch(getDatasetStats({ dbName, datasetId: docId })); + + // Check if user has liked/saved this dataset (only if authenticated) + if (isAuthenticated) { + dispatch(checkUserActivity({ dbName, datasetId: docId })); + // Load user's collections + dispatch(getUserCollections()); + + // Check which collections contain this dataset + dispatch(getDatasetCollections({ dbName, datasetId: docId })); + } + }, [dbName, docId, isAuthenticated, dispatch]); + + // Merge user collections with dataset collections to show checkmark + const collectionsForMenu = userCollections.map((collection) => ({ + id: collection.id, + name: collection.name, + isInCollection: datasetCollections.some((dc) => dc.id === collection.id), // if true --> has checkmark + })); + + // Check if dataset is in any collection + const isInAnyCollection = datasetCollections.length > 0; + + const handleCreateCollection = async (name: string, description?: string) => { + if (!dbName || !docId) return; + + try { + const newCollection = await dispatch( + createCollection({ name, description }) + ).unwrap(); + + // After creating, add this dataset to the new collection + await dispatch( + addDatasetToCollection({ + collectionId: newCollection.id, + dbName, + datasetId: docId, + }) + ).unwrap(); + + // Refetch collections + dispatch(getUserCollections()); + dispatch(getDatasetCollections({ dbName, datasetId: docId })); + } catch (error) { + console.error("Error creating collection:", error); + } + }; + + const handleAddToCollection = async (collectionId: number) => { + if (!dbName || !docId) return; + + try { + await dispatch( + addDatasetToCollection({ + collectionId, + dbName, + datasetId: docId, + }) + ).unwrap(); + + // Refetch to update menu + dispatch(getDatasetCollections({ dbName, datasetId: docId })); + } catch (error) { + console.error("Error adding to collection:", error); + } + }; + + // const handleRemoveFromCollection = async (collectionId: number) => { + // if (!dbName || !docId) return; + + // try { + // // Find the dataset's database ID from the collections + // const collection = datasetCollections.find((c) => c.id === collectionId); + // if (!collection || !collection.datasets) return; + + // const dataset = collection.datasets.find( + // (ds) => ds.couch_db === dbName && ds.ds_id === docId + // ); + // if (!dataset) return; + + // await dispatch( + // removeDatasetFromCollection({ + // collectionId, + // datasetId: dataset.id, + // }) + // ).unwrap(); + + // // Refetch to update menu + // dispatch(getDatasetCollections({ dbName, datasetId: docId })); + // } catch (error) { + // console.error("Error removing from collection:", error); + // } + // }; + // get params from url const [searchParams, setSearchParams] = useSearchParams(); const focus = searchParams.get("focus") || undefined; // get highlight from url @@ -82,17 +274,15 @@ const UpdatedDatasetDetailPage: React.FC = () => { const [jsonSize, setJsonSize] = useState(0); const [previewIndex, setPreviewIndex] = useState(0); const [isPreviewLoading, setIsPreviewLoading] = useState(false); - // const [copiedToast, setCopiedToast] = useState<{ - // open: boolean; - // text: string; - // }>({ - // open: false, - // text: "", - // }); - // const [copiedUrlOpen, setCopiedUrlOpen] = useState(false); const [copiedKey, setCopiedKey] = useState(null); const copyTimer = useRef(null); - const aiSummary = datasetDocument?.[".datainfo"]?.AISummary ?? ""; + // const aiSummary = datasetDocument?.[".datainfo"]?.AISummary ?? ""; + const rawSummary = datasetDocument?.[".datainfo"]?.AISummary; + const aiSummary: string = !rawSummary + ? "" + : typeof rawSummary === "string" + ? rawSummary + : Object.values(rawSummary).filter(Boolean).join("\n\n"); const readme = datasetDocument?.["README"] ?? ""; const handleSelectRevision = (newRev?: string | null) => { setSearchParams((prev) => { @@ -111,17 +301,6 @@ const UpdatedDatasetDetailPage: React.FC = () => { ); const treeTitle = "Files"; - // const filesCount = externalLinks.length; - // const totalBytes = useMemo(() => { - // let bytes = 0; - // for (const l of externalLinks) { - // const m = l.url.match(/size=(\d+)/); - // if (m) bytes += parseInt(m[1], 10); - // } - // return bytes; - // }, [externalLinks]); - - // add spinner const formatSize = (sizeInBytes: number): string => { if (sizeInBytes < 1024) { @@ -283,12 +462,6 @@ const UpdatedDatasetDetailPage: React.FC = () => { } }; - // const handleUrlCopyClick = async (e: React.MouseEvent, path: string) => { - // await copyPreviewUrl(path); - // setCopiedUrlOpen(true); - // setTimeout(() => setCopiedUrlOpen(false), 2500); - // }; - const handleUrlCopyClick = async ( e: React.MouseEvent, path: string @@ -684,22 +857,6 @@ const UpdatedDatasetDetailPage: React.FC = () => { return ( <> - {/* */} - {/* Breadcrumb Navigation (Home → Database → Dataset) */} { : datasetDocument["dataset_description.json"].Authors} )} - + {/* user actions component */} + {/* ai summary */} {aiSummary ? ( <> diff --git a/src/redux/activities/activities.action.ts b/src/redux/activities/activities.action.ts new file mode 100644 index 0000000..e308d61 --- /dev/null +++ b/src/redux/activities/activities.action.ts @@ -0,0 +1,224 @@ +import { + LikeDatasetPayload, + SaveDatasetPayload, + AddCommentPayload, + UpdateCommentPayload, + DeleteCommentPayload, + GetCommentsPayload, + GetDatasetStatsPayload, + GetMostViewedDatasetsPayload, + CheckUserActivityPayload, +} from "./types/activities.interface"; +import { createAsyncThunk } from "@reduxjs/toolkit"; +import { ActivitiesService } from "services/activities.service"; + +// Like a dataset +export const likeDataset = createAsyncThunk( + "activities/likeDataset", + async (payload: LikeDatasetPayload, { rejectWithValue }) => { + try { + await ActivitiesService.likeDataset(payload.dbName, payload.datasetId); + return payload; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to like dataset"); + } + } +); + +// Unlike a dataset +export const unlikeDataset = createAsyncThunk( + "activities/unlikeDataset", + async (payload: LikeDatasetPayload, { rejectWithValue }) => { + try { + await ActivitiesService.unlikeDataset(payload.dbName, payload.datasetId); + return payload; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to unlike dataset"); + } + } +); + +// Save a dataset +export const saveDataset = createAsyncThunk( + "activities/saveDataset", + async (payload: SaveDatasetPayload, { rejectWithValue }) => { + try { + await ActivitiesService.saveDataset(payload.dbName, payload.datasetId); + return payload; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to save dataset"); + } + } +); + +// Unsave a dataset +export const unsaveDataset = createAsyncThunk( + "activities/unsaveDataset", + async (payload: SaveDatasetPayload, { rejectWithValue }) => { + try { + await ActivitiesService.unsaveDataset(payload.dbName, payload.datasetId); + return payload; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to unsave dataset"); + } + } +); + +// Get comments +export const getComments = createAsyncThunk( + "activities/getComments", + async (payload: GetCommentsPayload, { rejectWithValue }) => { + try { + const response = await ActivitiesService.getComments( + payload.dbName, + payload.datasetId + ); + return { + ...payload, + comments: response.comments, + }; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to fetch comments"); + } + } +); + +// Add a comment +export const addComment = createAsyncThunk( + "activities/addComment", + async (payload: AddCommentPayload, { rejectWithValue }) => { + try { + const response = await ActivitiesService.addComment( + payload.dbName, + payload.datasetId, + payload.body + ); + return { + dbName: payload.dbName, + datasetId: payload.datasetId, + comment: response.comment, + }; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to add comment"); + } + } +); + +// Update a comment +export const updateComment = createAsyncThunk( + "activities/updateComment", + async (payload: UpdateCommentPayload, { rejectWithValue }) => { + try { + const response = await ActivitiesService.updateComment( + payload.commentId, + payload.body + ); + return { + commentId: payload.commentId, + comment: response.comment, + }; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to update comment"); + } + } +); + +// Delete a comment +export const deleteComment = createAsyncThunk( + "activities/deleteComment", + async (payload: DeleteCommentPayload, { rejectWithValue }) => { + try { + await ActivitiesService.deleteComment(payload.commentId); + return payload; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to delete comment"); + } + } +); + +// Get dataset statistics (views and likes count) +export const getDatasetStats = createAsyncThunk( + "activities/getDatasetStats", + async (payload: GetDatasetStatsPayload, { rejectWithValue }) => { + try { + const response = await ActivitiesService.getDatasetStats( + payload.dbName, + payload.datasetId + ); + return { + dbName: payload.dbName, + datasetId: payload.datasetId, + viewsCount: response.viewsCount, + likesCount: response.likesCount, + }; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to fetch dataset stats"); + } + } +); + +// Get most viewed datasets +export const getMostViewedDatasets = createAsyncThunk( + "activities/getMostViewedDatasets", + async (payload: GetMostViewedDatasetsPayload = {}, { rejectWithValue }) => { + try { + const response = await ActivitiesService.getMostViewedDatasets( + payload.limit || 10 + ); + return { + mostViewed: response.mostViewed, + datasetsCount: response.datasetsCount, + }; + } catch (error: any) { + return rejectWithValue( + error.message || "Failed to fetch most viewed datasets" + ); + } + } +); + +export const checkUserActivity = createAsyncThunk( + "activities/checkUserActivity", + async (payload: CheckUserActivityPayload, { rejectWithValue }) => { + try { + const response = await ActivitiesService.checkUserActivity( + payload.dbName, + payload.datasetId + ); + return { + dbName: payload.dbName, + datasetId: payload.datasetId, + isLiked: response.isLiked, + isSaved: response.isSaved, + }; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to check user activity"); + } + } +); + +// Get user's saved datasets +export const getUserSavedDatasets = createAsyncThunk( + "activities/getUserSavedDatasets", + async (_, { rejectWithValue }) => { + try { + const response = await ActivitiesService.getUserSavedDatasets(); + return response.savedDatasets; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to fetch saved datasets"); + } + } +); + +// Get user's liked datasets +export const getUserLikedDatasets = createAsyncThunk( + "activities/getUserLikedDatasets", + async (_, { rejectWithValue }) => { + try { + const response = await ActivitiesService.getUserLikedDatasets(); + return response.likedDatasets; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to fetch liked datasets"); + } + } +); diff --git a/src/redux/activities/activities.selector.ts b/src/redux/activities/activities.selector.ts new file mode 100644 index 0000000..210d496 --- /dev/null +++ b/src/redux/activities/activities.selector.ts @@ -0,0 +1,136 @@ +import { RootState } from "../store"; +import { DatasetActivityStatus } from "./types/activities.interface"; + +// Main selector +export const ActivitiesSelector = (state: RootState) => state.activities; + +// Helper to get dataset key +const getDatasetKey = (dbName: string, datasetId: string): string => { + return `${dbName}:${datasetId}`; +}; + +// Get activity status for a specific dataset +export const selectDatasetActivityStatus = ( + state: RootState, + dbName: string, + datasetId: string +): DatasetActivityStatus | undefined => { + const key = getDatasetKey(dbName, datasetId); + return state.activities.datasetActivities[key]; +}; + +// Check if dataset is liked +export const selectIsDatasetLiked = ( + state: RootState, + dbName: string, + datasetId: string +): boolean => { + const status = selectDatasetActivityStatus(state, dbName, datasetId); + return status?.isLiked || false; +}; + +// Check if dataset is saved +export const selectIsDatasetSaved = ( + state: RootState, + dbName: string, + datasetId: string +): boolean => { + const status = selectDatasetActivityStatus(state, dbName, datasetId); + return status?.isSaved || false; +}; + +// Get comments for a dataset +export const selectDatasetComments = ( + state: RootState, + dbName: string, + datasetId: string +) => { + const status = selectDatasetActivityStatus(state, dbName, datasetId); + return status?.comments || []; +}; + +// Get views count for a dataset +export const selectDatasetViewsCount = ( + state: RootState, + dbName: string, + datasetId: string +): number => { + const status = selectDatasetActivityStatus(state, dbName, datasetId); + return status?.viewsCount || 0; +}; + +// Get likes count for a dataset +export const selectDatasetLikesCount = ( + state: RootState, + dbName: string, + datasetId: string +): number => { + const status = selectDatasetActivityStatus(state, dbName, datasetId); + return status?.likesCount || 0; +}; + +// Get loading states +export const selectIsLikeLoading = ( + state: RootState, + dbName: string, + datasetId: string +): boolean => { + const status = selectDatasetActivityStatus(state, dbName, datasetId); + return status?.isLoadingLike || false; +}; + +export const selectIsSaveLoading = ( + state: RootState, + dbName: string, + datasetId: string +): boolean => { + const status = selectDatasetActivityStatus(state, dbName, datasetId); + return status?.isLoadingSave || false; +}; + +export const selectAreCommentsLoading = ( + state: RootState, + dbName: string, + datasetId: string +): boolean => { + const status = selectDatasetActivityStatus(state, dbName, datasetId); + return status?.isLoadingComments || false; +}; + +export const selectAreStatsLoading = ( + state: RootState, + dbName: string, + datasetId: string +): boolean => { + const status = selectDatasetActivityStatus(state, dbName, datasetId); + return status?.isLoadingStats || false; +}; + +// Get most viewed datasets +export const selectMostViewedDatasets = (state: RootState) => { + return state.activities.mostViewedDatasets; +}; + +// Get user's saved datasets +export const selectUserSavedDatasets = (state: RootState) => { + return state.activities.userSavedDatasets; +}; + +// Get user's liked datasets +export const selectUserLikedDatasets = (state: RootState) => { + return state.activities.userLikedDatasets; +}; + +// Get error +export const selectActivitiesError = (state: RootState): string | null => { + return state.activities.error; +}; + +// Get global loading state +export const selectActivitiesLoading = (state: RootState): boolean => { + return state.activities.loading; +}; + +// import { RootState } from "../store"; + +// export const ActivitiesSelector = (state: RootState) => state.activities; diff --git a/src/redux/activities/activities.slice.ts b/src/redux/activities/activities.slice.ts new file mode 100644 index 0000000..97887e5 --- /dev/null +++ b/src/redux/activities/activities.slice.ts @@ -0,0 +1,327 @@ +import { + likeDataset, + unlikeDataset, + saveDataset, + unsaveDataset, + getComments, + addComment, + updateComment, + deleteComment, + getDatasetStats, + getMostViewedDatasets, + checkUserActivity, + getUserLikedDatasets, + getUserSavedDatasets, +} from "./activities.action"; +import { ActivitiesState } from "./types/activities.interface"; +import { createSlice, PayloadAction } from "@reduxjs/toolkit"; + +// Helper function to get dataset key +const getDatasetKey = (dbName: string, datasetId: string): string => { + return `${dbName}:${datasetId}`; +}; + +// Helper function to get or initialize dataset activity status +const getOrInitStatus = (state: ActivitiesState, key: string) => { + if (!state.datasetActivities[key]) { + state.datasetActivities[key] = { + isLiked: false, + isSaved: false, + comments: [], + viewsCount: 0, + likesCount: 0, + isLoadingLike: false, + isLoadingSave: false, + isLoadingComments: false, + isLoadingStats: false, + }; + } + return state.datasetActivities[key]; +}; + +const initialState: ActivitiesState = { + datasetActivities: {}, + mostViewedDatasets: [], + userSavedDatasets: [], + userLikedDatasets: [], + error: null, + loading: false, +}; + +const activitiesSlice = createSlice({ + name: "activities", + initialState, + reducers: { + clearError: (state) => { + state.error = null; + }, + // Initialize dataset activity status (useful when navigating to a dataset page) + initializeDatasetStatus: ( + state, + action: PayloadAction<{ dbName: string; datasetId: string }> + ) => { + const { dbName, datasetId } = action.payload; + const key = getDatasetKey(dbName, datasetId); + getOrInitStatus(state, key); + }, + }, + extraReducers: (builder) => { + builder + // Like Dataset + .addCase(likeDataset.pending, (state, action) => { + const { dbName, datasetId } = action.meta.arg; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLoadingLike = true; + state.error = null; + }) + .addCase(likeDataset.fulfilled, (state, action) => { + const { dbName, datasetId } = action.payload; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLiked = true; + status.isLoadingLike = false; + }) + .addCase(likeDataset.rejected, (state, action) => { + const { dbName, datasetId } = action.meta.arg; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLoadingLike = false; + state.error = action.payload as string; + }) + // Unlike Dataset + .addCase(unlikeDataset.pending, (state, action) => { + const { dbName, datasetId } = action.meta.arg; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLoadingLike = true; + state.error = null; + }) + .addCase(unlikeDataset.fulfilled, (state, action) => { + const { dbName, datasetId } = action.payload; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLiked = false; + status.isLoadingLike = false; + }) + .addCase(unlikeDataset.rejected, (state, action) => { + const { dbName, datasetId } = action.meta.arg; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLoadingLike = false; + state.error = action.payload as string; + }) + // Save Dataset + .addCase(saveDataset.pending, (state, action) => { + const { dbName, datasetId } = action.meta.arg; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLoadingSave = true; + state.error = null; + }) + .addCase(saveDataset.fulfilled, (state, action) => { + const { dbName, datasetId } = action.payload; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isSaved = true; + status.isLoadingSave = false; + }) + .addCase(saveDataset.rejected, (state, action) => { + const { dbName, datasetId } = action.meta.arg; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLoadingSave = false; + state.error = action.payload as string; + }) + // Unsave Dataset + .addCase(unsaveDataset.pending, (state, action) => { + const { dbName, datasetId } = action.meta.arg; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLoadingSave = true; + state.error = null; + }) + .addCase(unsaveDataset.fulfilled, (state, action) => { + const { dbName, datasetId } = action.payload; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isSaved = false; + status.isLoadingSave = false; + }) + .addCase(unsaveDataset.rejected, (state, action) => { + const { dbName, datasetId } = action.meta.arg; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLoadingSave = false; + state.error = action.payload as string; + }) + // Get Comments + .addCase(getComments.pending, (state, action) => { + const { dbName, datasetId } = action.meta.arg; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLoadingComments = true; + state.error = null; + }) + .addCase(getComments.fulfilled, (state, action) => { + const { dbName, datasetId, comments } = action.payload; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.comments = comments; + status.isLoadingComments = false; + }) + .addCase(getComments.rejected, (state, action) => { + const { dbName, datasetId } = action.meta.arg; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLoadingComments = false; + state.error = action.payload as string; + }) + // Add Comment + .addCase(addComment.pending, (state) => { + state.loading = true; + state.error = null; + }) + .addCase(addComment.fulfilled, (state, action) => { + const { dbName, datasetId, comment } = action.payload; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.comments = [comment, ...status.comments]; + state.loading = false; + }) + .addCase(addComment.rejected, (state, action) => { + state.loading = false; + state.error = action.payload as string; + }) + // Update Comment + .addCase(updateComment.pending, (state) => { + state.loading = true; + state.error = null; + }) + .addCase(updateComment.fulfilled, (state, action) => { + const { comment } = action.payload; + // Find and update the comment in all datasets + Object.values(state.datasetActivities).forEach((status) => { + const index = status.comments.findIndex((c) => c.id === comment.id); + if (index !== -1) { + status.comments[index] = comment; + } + }); + state.loading = false; + }) + .addCase(updateComment.rejected, (state, action) => { + state.loading = false; + state.error = action.payload as string; + }) + // Delete Comment + .addCase(deleteComment.pending, (state) => { + state.loading = true; + state.error = null; + }) + .addCase(deleteComment.fulfilled, (state, action) => { + const { commentId } = action.payload; + // Remove the comment from all datasets + Object.values(state.datasetActivities).forEach((status) => { + status.comments = status.comments.filter((c) => c.id !== commentId); + }); + state.loading = false; + }) + .addCase(deleteComment.rejected, (state, action) => { + state.loading = false; + state.error = action.payload as string; + }) + // Get Dataset Stats + .addCase(getDatasetStats.pending, (state, action) => { + const { dbName, datasetId } = action.meta.arg; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLoadingStats = true; + state.error = null; + }) + .addCase(getDatasetStats.fulfilled, (state, action) => { + const { dbName, datasetId, viewsCount, likesCount } = action.payload; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.viewsCount = viewsCount; + status.likesCount = likesCount; + status.isLoadingStats = false; + }) + .addCase(getDatasetStats.rejected, (state, action) => { + const { dbName, datasetId } = action.meta.arg; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLoadingStats = false; + state.error = action.payload as string; + }) + // Get Most Viewed Datasets + .addCase(getMostViewedDatasets.pending, (state) => { + state.loading = true; + state.error = null; + }) + .addCase(getMostViewedDatasets.fulfilled, (state, action) => { + state.mostViewedDatasets = action.payload.mostViewed; + state.loading = false; + }) + .addCase(getMostViewedDatasets.rejected, (state, action) => { + state.loading = false; + state.error = action.payload as string; + }) + // Check User Activity + .addCase(checkUserActivity.pending, (state, action) => { + const { dbName, datasetId } = action.meta.arg; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLoadingLike = true; + status.isLoadingSave = true; + state.error = null; + }) + .addCase(checkUserActivity.fulfilled, (state, action) => { + const { dbName, datasetId, isLiked, isSaved } = action.payload; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLiked = isLiked; + status.isSaved = isSaved; + status.isLoadingLike = false; + status.isLoadingSave = false; + }) + .addCase(checkUserActivity.rejected, (state, action) => { + const { dbName, datasetId } = action.meta.arg; + const key = getDatasetKey(dbName, datasetId); + const status = getOrInitStatus(state, key); + status.isLoadingLike = false; + status.isLoadingSave = false; + state.error = action.payload as string; + }) + // Get User Saved Datasets + .addCase(getUserSavedDatasets.pending, (state) => { + state.loading = true; + state.error = null; + }) + .addCase(getUserSavedDatasets.fulfilled, (state, action) => { + state.userSavedDatasets = action.payload; + state.loading = false; + }) + .addCase(getUserSavedDatasets.rejected, (state, action) => { + state.loading = false; + state.error = action.payload as string; + }) + + // Get User Liked Datasets + .addCase(getUserLikedDatasets.pending, (state) => { + state.loading = true; + state.error = null; + }) + .addCase(getUserLikedDatasets.fulfilled, (state, action) => { + state.userLikedDatasets = action.payload; + state.loading = false; + }) + .addCase(getUserLikedDatasets.rejected, (state, action) => { + state.loading = false; + state.error = action.payload as string; + }); + }, +}); + +export const { clearError, initializeDatasetStatus } = activitiesSlice.actions; + +export default activitiesSlice.reducer; diff --git a/src/redux/activities/types/activities.interface.ts b/src/redux/activities/types/activities.interface.ts new file mode 100644 index 0000000..7776035 --- /dev/null +++ b/src/redux/activities/types/activities.interface.ts @@ -0,0 +1,200 @@ +// Comment from the database +export interface Comment { + id: number; + user_id: number; + dataset_id: number; + body: string; + created_at: string; + updated_at: string; + // When comments are fetched with user info included + User?: { + id: number; + username: string; + }; +} + +// Dataset from the database +export interface Dataset { + id: number; + couch_db: string; + ds_id: string; + views_count: number; +} + +// Like relationship +export interface DatasetLike { + id: number; + user_id: number; + dataset_id: number; + created_at: string; +} + +// Saved dataset relationship +export interface SavedDataset { + id: number; + user_id: number; + dataset_id: number; + created_at: string; +} + +// View history +export interface ViewHistory { + id: number; + user_id: number; + dataset_id: number; + viewed_at: string; +} + +// Activity status for a specific dataset (frontend state) +export interface DatasetActivityStatus { + isLiked: boolean; + isSaved: boolean; + comments: Comment[]; + viewsCount: number; + likesCount: number; + isLoadingLike: boolean; + isLoadingSave: boolean; + isLoadingComments: boolean; + isLoadingStats: boolean; +} + +// Redux state (for initial state and update) +export interface ActivitiesState { + // Key format: "dbName:datasetId" + datasetActivities: Record; + mostViewedDatasets: MostViewedDataset[]; + userSavedDatasets: UserSavedDataset[]; + userLikedDatasets: UserLikedDataset[]; + error: string | null; + loading: boolean; +} + +// Action payloads +export interface LikeDatasetPayload { + dbName: string; + datasetId: string; +} + +export interface SaveDatasetPayload { + dbName: string; + datasetId: string; +} + +export interface AddCommentPayload { + dbName: string; + datasetId: string; + body: string; +} + +export interface UpdateCommentPayload { + commentId: number; + body: string; +} + +export interface DeleteCommentPayload { + commentId: number; +} + +export interface GetCommentsPayload { + dbName: string; + datasetId: string; +} + +export interface GetDatasetStatsPayload { + dbName: string; + datasetId: string; +} + +export interface GetMostViewedDatasetsPayload { + limit?: number; +} + +// API Response interfaces +export interface GetCommentsResponse { + comments: Comment[]; +} + +export interface AddCommentResponse { + message: string; + comment: Comment; +} + +export interface UpdateCommentResponse { + message: string; + comment: Comment; +} + +export interface DeleteCommentResponse { + message: string; +} + +export interface LikeResponse { + message: string; +} + +export interface UnlikeResponse { + message: string; +} + +export interface SaveResponse { + message: string; +} + +export interface UnsaveResponse { + message: string; +} + +export interface GetDatasetStatsResponse { + viewsCount: number; + likesCount: number; + dataset: Dataset | null; +} + +export interface MostViewedDataset { + id: number; + couch_db: string; + ds_id: string; + views_count: number; +} + +export interface GetMostViewedDatasetsResponse { + mostViewed: MostViewedDataset[]; + datasetsCount: number; +} + +// Add +export interface CheckUserActivityPayload { + dbName: string; + datasetId: string; +} + +export interface CheckUserActivityResponse { + isLiked: boolean; + isSaved: boolean; +} + +export interface UserSavedDataset { + id: number; + couch_db: string; + ds_id: string; + views_count: number; + saved_at: string; +} + +export interface UserLikedDataset { + id: number; + couch_db: string; + ds_id: string; + views_count: number; + liked_at: string; +} + +export interface GetUserSavedDatasetsResponse { + savedDatasets: UserSavedDataset[]; + count: number; +} + +export interface GetUserLikedDatasetsResponse { + likedDatasets: UserLikedDataset[]; + count: number; +} diff --git a/src/redux/auth/auth.action.ts b/src/redux/auth/auth.action.ts index dac57a3..c8241f6 100644 --- a/src/redux/auth/auth.action.ts +++ b/src/redux/auth/auth.action.ts @@ -4,6 +4,7 @@ import { ChangePasswordData, ForgotPasswordData, ResetPasswordData, + UpdateProfileData, } from "./types/auth.interface"; import { createAsyncThunk } from "@reduxjs/toolkit"; import { AuthService } from "services/auth.service"; @@ -96,3 +97,15 @@ export const resetPassword = createAsyncThunk( } } ); + +export const updateProfile = createAsyncThunk( + "auth/updateProfile", + async (profileData: UpdateProfileData, { rejectWithValue }) => { + try { + const response = await AuthService.updateProfile(profileData); + return response.user; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to update profile"); + } + } +); diff --git a/src/redux/auth/auth.slice.ts b/src/redux/auth/auth.slice.ts index 51373e7..b06f7cd 100644 --- a/src/redux/auth/auth.slice.ts +++ b/src/redux/auth/auth.slice.ts @@ -6,6 +6,7 @@ import { changePassword, forgotPassword, resetPassword, + updateProfile, } from "./auth.action"; import { IAuthState, @@ -175,6 +176,18 @@ const authSlice = createSlice({ .addCase(resetPassword.rejected, (state, action) => { state.loading = false; state.error = action.payload as string; + }) + .addCase(updateProfile.pending, (state) => { + state.loading = true; + state.error = null; + }) + .addCase(updateProfile.fulfilled, (state, action) => { + state.user = action.payload; + state.loading = false; + }) + .addCase(updateProfile.rejected, (state, action) => { + state.loading = false; + state.error = action.payload as string; }); }, }); diff --git a/src/redux/auth/types/auth.interface.ts b/src/redux/auth/types/auth.interface.ts index f4e4e65..3abc7e4 100644 --- a/src/redux/auth/types/auth.interface.ts +++ b/src/redux/auth/types/auth.interface.ts @@ -76,3 +76,15 @@ export interface ResetPasswordData { export interface ResetPasswordResponse { message: string; } + +export interface UpdateProfileData { + firstName: string; + lastName: string; + company: string; + interests?: string; +} + +export interface UpdateProfileResponse { + message: string; + user: User; +} diff --git a/src/redux/collections/collections.action.ts b/src/redux/collections/collections.action.ts new file mode 100644 index 0000000..29f5e17 --- /dev/null +++ b/src/redux/collections/collections.action.ts @@ -0,0 +1,123 @@ +import { + CreateCollectionPayload, + UpdateCollectionPayload, + DeleteCollectionPayload, + GetCollectionPayload, + AddDatasetToCollectionPayload, + RemoveDatasetFromCollectionPayload, + GetDatasetCollectionsPayload, +} from "./types/collections.interface"; +import { createAsyncThunk } from "@reduxjs/toolkit"; +import { CollectionsService } from "services/collections.service"; + +// Get all user's collections +export const getUserCollections = createAsyncThunk( + "collections/getUserCollections", + async (_, { rejectWithValue }) => { + try { + const response = await CollectionsService.getUserCollections(); + return response.collections; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to fetch collections"); + } + } +); + +// Create new collection +export const createCollection = createAsyncThunk( + "collections/createCollection", + async (payload: CreateCollectionPayload, { rejectWithValue }) => { + try { + const response = await CollectionsService.createCollection(payload); + return response.collection; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to create collection"); + } + } +); + +// Get specific collection +export const getCollection = createAsyncThunk( + "collections/getCollection", + async (payload: GetCollectionPayload, { rejectWithValue }) => { + try { + const response = await CollectionsService.getCollection( + payload.collectionId + ); + return response.collection; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to fetch collection"); + } + } +); + +// Add dataset to collection +export const addDatasetToCollection = createAsyncThunk( + "collections/addDatasetToCollection", + async (payload: AddDatasetToCollectionPayload, { rejectWithValue }) => { + try { + await CollectionsService.addDatasetToCollection(payload); + return payload; + } catch (error: any) { + return rejectWithValue( + error.message || "Failed to add dataset to collection" + ); + } + } +); + +// Remove dataset from collection +export const removeDatasetFromCollection = createAsyncThunk( + "collections/removeDatasetFromCollection", + async (payload: RemoveDatasetFromCollectionPayload, { rejectWithValue }) => { + try { + await CollectionsService.removeDatasetFromCollection(payload); + return payload; + } catch (error: any) { + return rejectWithValue( + error.message || "Failed to remove dataset from collection" + ); + } + } +); + +// Update collection +export const updateCollection = createAsyncThunk( + "collections/updateCollection", + async (payload: UpdateCollectionPayload, { rejectWithValue }) => { + try { + const response = await CollectionsService.updateCollection(payload); + return response.collection; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to update collection"); + } + } +); + +// Delete collection +export const deleteCollection = createAsyncThunk( + "collections/deleteCollection", + async (payload: DeleteCollectionPayload, { rejectWithValue }) => { + try { + await CollectionsService.deleteCollection(payload.collectionId); + return payload.collectionId; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to delete collection"); + } + } +); + +// Get which collections contain a specific dataset (for "Add to Collection" menu) +export const getDatasetCollections = createAsyncThunk( + "collections/getDatasetCollections", + async (payload: GetDatasetCollectionsPayload, { rejectWithValue }) => { + try { + const response = await CollectionsService.getDatasetCollections(payload); + return response.collections; + } catch (error: any) { + return rejectWithValue( + error.message || "Failed to fetch dataset collections" + ); + } + } +); diff --git a/src/redux/collections/collections.selector.ts b/src/redux/collections/collections.selector.ts new file mode 100644 index 0000000..b6fd05f --- /dev/null +++ b/src/redux/collections/collections.selector.ts @@ -0,0 +1,45 @@ +import { RootState } from "../store"; + +// Main selector +export const CollectionsSelector = (state: RootState) => state.collections; + +// Get all user's collections +export const selectUserCollections = (state: RootState) => { + return state.collections.collections; +}; + +// Get current collection being viewed +export const selectCurrentCollection = (state: RootState) => { + return state.collections.currentCollection; +}; + +// Get collections that contain a specific dataset (for menu) +export const selectDatasetCollections = (state: RootState) => { + return state.collections.datasetCollections; +}; + +// Get loading states +export const selectCollectionsLoading = (state: RootState): boolean => { + return state.collections.loading; +}; + +export const selectIsCreatingCollection = (state: RootState): boolean => { + return state.collections.isCreating; +}; + +export const selectIsAddingToCollection = (state: RootState): boolean => { + return state.collections.isAdding; +}; + +// Get error +export const selectCollectionsError = (state: RootState): string | null => { + return state.collections.error; +}; + +// Get collection by ID (from cached list) +export const selectCollectionById = ( + state: RootState, + collectionId: number +) => { + return state.collections.collections.find((c) => c.id === collectionId); +}; diff --git a/src/redux/collections/collections.slice.ts b/src/redux/collections/collections.slice.ts new file mode 100644 index 0000000..1e6324f --- /dev/null +++ b/src/redux/collections/collections.slice.ts @@ -0,0 +1,153 @@ +import { + getUserCollections, + createCollection, + getCollection, + addDatasetToCollection, + removeDatasetFromCollection, + updateCollection, + deleteCollection, + getDatasetCollections, +} from "./collections.action"; +import { CollectionsState } from "./types/collections.interface"; +import { createSlice } from "@reduxjs/toolkit"; + +const initialState: CollectionsState = { + collections: [], + currentCollection: null, + datasetCollections: [], + error: null, + loading: false, + isCreating: false, + isAdding: false, +}; + +const collectionsSlice = createSlice({ + name: "collections", + initialState, + reducers: { + clearError: (state) => { + state.error = null; + }, + clearCurrentCollection: (state) => { + state.currentCollection = null; + }, + }, + extraReducers: (builder) => { + builder + // Get User Collections + .addCase(getUserCollections.pending, (state) => { + state.loading = true; + state.error = null; + }) + .addCase(getUserCollections.fulfilled, (state, action) => { + state.collections = action.payload; + state.loading = false; + }) + .addCase(getUserCollections.rejected, (state, action) => { + state.loading = false; + state.error = action.payload as string; + }) + + // Create Collection + .addCase(createCollection.pending, (state) => { + state.isCreating = true; + state.error = null; + }) + .addCase(createCollection.fulfilled, (state, action) => { + state.collections = [action.payload, ...state.collections]; + state.isCreating = false; + }) + .addCase(createCollection.rejected, (state, action) => { + state.isCreating = false; + state.error = action.payload as string; + }) + + // Get Collection + .addCase(getCollection.pending, (state) => { + state.loading = true; + state.error = null; + }) + .addCase(getCollection.fulfilled, (state, action) => { + state.currentCollection = action.payload; + state.loading = false; + }) + .addCase(getCollection.rejected, (state, action) => { + state.loading = false; + state.error = action.payload as string; + }) + + // Add Dataset to Collection + .addCase(addDatasetToCollection.pending, (state) => { + state.isAdding = true; + state.error = null; + }) + .addCase(addDatasetToCollection.fulfilled, (state, action) => { + state.isAdding = false; + // Component will refetch collections list + }) + .addCase(addDatasetToCollection.rejected, (state, action) => { + state.isAdding = false; + state.error = action.payload as string; + }) + + // Remove Dataset from Collection + .addCase(removeDatasetFromCollection.pending, (state) => { + state.loading = true; + state.error = null; + }) + .addCase(removeDatasetFromCollection.fulfilled, (state) => { + state.loading = false; + // Component will refetch collections list + }) + .addCase(removeDatasetFromCollection.rejected, (state, action) => { + state.loading = false; + state.error = action.payload as string; + }) + + // Update Collection + .addCase(updateCollection.pending, (state) => { + state.loading = true; + state.error = null; + }) + .addCase(updateCollection.fulfilled, (state) => { + state.loading = false; + // Component will refetch collections list + }) + .addCase(updateCollection.rejected, (state, action) => { + state.loading = false; + state.error = action.payload as string; + }) + + // Delete Collection + .addCase(deleteCollection.pending, (state) => { + state.loading = true; + state.error = null; + }) + .addCase(deleteCollection.fulfilled, (state) => { + state.loading = false; + // Component will refetch or navigate away + }) + .addCase(deleteCollection.rejected, (state, action) => { + state.loading = false; + state.error = action.payload as string; + }) + + // Get Dataset Collections (for "Add to Collection" menu) + .addCase(getDatasetCollections.pending, (state) => { + state.loading = true; + state.error = null; + }) + .addCase(getDatasetCollections.fulfilled, (state, action) => { + state.datasetCollections = action.payload; + state.loading = false; + }) + .addCase(getDatasetCollections.rejected, (state, action) => { + state.loading = false; + state.error = action.payload as string; + }); + }, +}); + +export const { clearError, clearCurrentCollection } = collectionsSlice.actions; + +export default collectionsSlice.reducer; diff --git a/src/redux/collections/types/collections.interface.ts b/src/redux/collections/types/collections.interface.ts new file mode 100644 index 0000000..1475ecc --- /dev/null +++ b/src/redux/collections/types/collections.interface.ts @@ -0,0 +1,109 @@ +// Collection from the database +export interface Collection { + id: number; + user_id: number; + name: string; + description: string | null; + is_public: boolean; + created_at: string; + updated_at: string; + datasets_count?: number; + datasets?: CollectionDataset[]; +} + +// Dataset within a collection (with junction table data) +export interface CollectionDataset { + id: number; + couch_db: string; + ds_id: string; + views_count: number; + CollectionDataset?: { + created_at: string; // When added to collection + }; +} + +// Redux state +export interface CollectionsState { + collections: Collection[]; // All user's collections + currentCollection: Collection | null; // Currently viewing collection + datasetCollections: Collection[]; // Collections containing a specific dataset (for menu) + error: string | null; + loading: boolean; + isCreating: boolean; + isAdding: boolean; +} + +// Action payloads +export interface CreateCollectionPayload { + name: string; + description?: string; + is_public?: boolean; +} + +export interface UpdateCollectionPayload { + collectionId: number; + name?: string; + description?: string; + is_public?: boolean; +} + +export interface DeleteCollectionPayload { + collectionId: number; +} + +export interface GetCollectionPayload { + collectionId: number; +} + +export interface AddDatasetToCollectionPayload { + collectionId: number; + dbName: string; + datasetId: string; +} + +export interface RemoveDatasetFromCollectionPayload { + collectionId: number; + datasetId: number; // Dataset.id (not ds_id) +} + +export interface GetDatasetCollectionsPayload { + dbName: string; + datasetId: string; +} + +// API Response interfaces +export interface GetUserCollectionsResponse { + collections: Collection[]; + count: number; +} + +export interface CreateCollectionResponse { + message: string; + collection: Collection; +} + +export interface GetCollectionResponse { + collection: Collection; +} + +export interface AddDatasetResponse { + message: string; +} + +export interface RemoveDatasetResponse { + message: string; +} + +export interface UpdateCollectionResponse { + message: string; + collection: Collection; +} + +export interface DeleteCollectionResponse { + message: string; +} + +export interface GetDatasetCollectionsResponse { + collections: Collection[]; + count: number; +} diff --git a/src/redux/projects/projects.action.ts b/src/redux/projects/projects.action.ts new file mode 100644 index 0000000..6ab5226 --- /dev/null +++ b/src/redux/projects/projects.action.ts @@ -0,0 +1,73 @@ +import { + CreateProjectPayload, + UpdateProjectPayload, + DeleteProjectPayload, + GetProjectPayload, +} from "./types/projects.interface"; +import { createAsyncThunk } from "@reduxjs/toolkit"; +import { ProjectsService } from "services/projects.service"; + +// Get all user's projects +export const getUserProjects = createAsyncThunk( + "projects/getUserProjects", + async (_, { rejectWithValue }) => { + try { + const response = await ProjectsService.getUserProjects(); + return response.projects; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to fetch projects"); + } + } +); + +// Create new project +export const createProject = createAsyncThunk( + "projects/createProject", + async (payload: CreateProjectPayload, { rejectWithValue }) => { + try { + const response = await ProjectsService.createProject(payload); + return response.project; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to create project"); + } + } +); + +// Get specific project +export const getProject = createAsyncThunk( + "projects/getProject", + async (payload: GetProjectPayload, { rejectWithValue }) => { + try { + const response = await ProjectsService.getProject(payload.projectId); + return response.project; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to fetch project"); + } + } +); + +// Update project +export const updateProject = createAsyncThunk( + "projects/updateProject", + async (payload: UpdateProjectPayload, { rejectWithValue }) => { + try { + const response = await ProjectsService.updateProject(payload); + return response.project; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to update project"); + } + } +); + +// Delete project +export const deleteProject = createAsyncThunk( + "projects/deleteProject", + async (payload: DeleteProjectPayload, { rejectWithValue }) => { + try { + await ProjectsService.deleteProject(payload.projectId); + return payload.projectId; + } catch (error: any) { + return rejectWithValue(error.message || "Failed to delete project"); + } + } +); diff --git a/src/redux/projects/projects.selector.ts b/src/redux/projects/projects.selector.ts new file mode 100644 index 0000000..74ac593 --- /dev/null +++ b/src/redux/projects/projects.selector.ts @@ -0,0 +1,37 @@ +import { RootState } from "../store"; + +// Main selector +export const ProjectsSelector = (state: RootState) => state.projects; + +// Get all user's projects +export const selectUserProjects = (state: RootState) => { + return state.projects.projects; +}; + +// Get current project being viewed +export const selectCurrentProject = (state: RootState) => { + return state.projects.currentProject; +}; + +// Get loading states +export const selectProjectsLoading = (state: RootState): boolean => { + return state.projects.loading; +}; + +export const selectIsCreatingProject = (state: RootState): boolean => { + return state.projects.isCreating; +}; + +export const selectIsUpdatingProject = (state: RootState): boolean => { + return state.projects.isUpdating; +}; + +// Get error +export const selectProjectsError = (state: RootState): string | null => { + return state.projects.error; +}; + +// Get project by ID (from cached list) +export const selectProjectById = (state: RootState, projectId: string) => { + return state.projects.projects.find((p) => p.public_id === projectId); +}; diff --git a/src/redux/projects/projects.slice.ts b/src/redux/projects/projects.slice.ts new file mode 100644 index 0000000..4e7ed3a --- /dev/null +++ b/src/redux/projects/projects.slice.ts @@ -0,0 +1,109 @@ +// src/redux/projects/projects.slice.ts +import { + getUserProjects, + createProject, + getProject, + updateProject, + deleteProject, +} from "./projects.action"; +import { ProjectsState } from "./types/projects.interface"; +import { createSlice } from "@reduxjs/toolkit"; + +const initialState: ProjectsState = { + projects: [], + currentProject: null, + error: null, + loading: false, + isCreating: false, + isUpdating: false, +}; + +const projectsSlice = createSlice({ + name: "projects", + initialState, + reducers: { + clearError: (state) => { + state.error = null; + }, + clearCurrentProject: (state) => { + state.currentProject = null; + }, + }, + extraReducers: (builder) => { + builder + // Get User Projects + .addCase(getUserProjects.pending, (state) => { + state.loading = true; + state.error = null; + }) + .addCase(getUserProjects.fulfilled, (state, action) => { + state.projects = action.payload; + state.loading = false; + }) + .addCase(getUserProjects.rejected, (state, action) => { + state.loading = false; + state.error = action.payload as string; + }) + + // Create Project + .addCase(createProject.pending, (state) => { + state.isCreating = true; + state.error = null; + }) + .addCase(createProject.fulfilled, (state, action) => { + state.projects = [action.payload, ...state.projects]; + // state.currentProject = action.payload; + state.isCreating = false; + }) + .addCase(createProject.rejected, (state, action) => { + state.isCreating = false; + state.error = action.payload as string; + }) + + // Get Project + .addCase(getProject.pending, (state) => { + state.loading = true; + state.error = null; + }) + .addCase(getProject.fulfilled, (state, action) => { + state.currentProject = action.payload; + state.loading = false; + }) + .addCase(getProject.rejected, (state, action) => { + state.loading = false; + state.error = action.payload as string; + }) + + // Update Project + .addCase(updateProject.pending, (state) => { + state.isUpdating = true; + state.error = null; + }) + .addCase(updateProject.fulfilled, (state) => { + state.isUpdating = false; + // Component will refetch projects list + }) + .addCase(updateProject.rejected, (state, action) => { + state.isUpdating = false; + state.error = action.payload as string; + }) + + // Delete Project + .addCase(deleteProject.pending, (state) => { + state.loading = true; + state.error = null; + }) + .addCase(deleteProject.fulfilled, (state) => { + state.loading = false; + // Component will refetch or navigate away + }) + .addCase(deleteProject.rejected, (state, action) => { + state.loading = false; + state.error = action.payload as string; + }); + }, +}); + +export const { clearError, clearCurrentProject } = projectsSlice.actions; + +export default projectsSlice.reducer; diff --git a/src/redux/projects/types/projects.interface.ts b/src/redux/projects/types/projects.interface.ts new file mode 100644 index 0000000..af9f319 --- /dev/null +++ b/src/redux/projects/types/projects.interface.ts @@ -0,0 +1,133 @@ +export interface FileItem { + id: string; + name: string; + type: "file" | "folder" | "zip"; + parentId: string | null; + fileType?: + | "text" + | "nifti" + | "hdf5" + | "neurojsonText" + | "neurojsonBinary" + | "office" + | "meta" + | "matlab" + | "dicom" + | "nirs" + | "array" + | "other"; + content?: string; + contentType?: string; + sourcePath?: string; + isUserMeta?: boolean; + source?: "user" | "ai" | "output"; + generatedAt?: string; + note?: string; + loading?: boolean; +} + +// Extractor State +export interface ExtractorState { + files: FileItem[]; + selectedIds: string[]; + expandedIds: string[]; + baseDirectoryPath?: string; + evidenceBundle?: any; // ✅ Add this + trioGenerated?: boolean; // ✅ Add this +} + +// add +export interface EvidenceBundle { + root: string; + counts_by_ext: Record; + all_files: string[]; + documents: Array<{ + relpath: string; + filename: string; + type: string; + content: string; + }>; + user_hints: { + user_text: string; + modality_hint: string; + n_subjects: number | null; + }; +} + +// Project Interface +export interface Project { + id: number; + public_id: string; // ← ADD + user_id: number; + name: string; + description: string | null; + extractor_state: ExtractorState; + created_at: string; + updated_at: string; + file_count?: number; // Added by backend (not included in database) +} + +// Redux State +export interface ProjectsState { + projects: Project[]; + currentProject: Project | null; + error: string | null; + loading: boolean; + isCreating: boolean; + isUpdating: boolean; +} + +// API Response Types +export interface GetUserProjectsResponse { + projects: Project[]; + count: number; +} + +export interface CreateProjectResponse { + message: string; + project: Project; +} + +export interface GetProjectResponse { + project: Project; +} + +export interface UpdateProjectResponse { + message: string; + project: Project; +} + +export interface DeleteProjectResponse { + message: string; +} + +// Payload Types +export interface CreateProjectPayload { + name?: string; + description?: string; +} + +export interface GetProjectPayload { + // projectId: number; + projectId: string; +} + +export interface UpdateProjectPayload { + projectId: string; // ← was number + name?: string; + description?: string; + extractor_state?: ExtractorState; +} + +export interface DeleteProjectPayload { + projectId: string; // ← was number +} + +// export interface LLMProvider { +// name: string; +// baseUrl: string; +// models: Array<{ id: string; name: string }>; +// noApiKey?: boolean; +// customUrl?: boolean; +// isAnthropic?: boolean; +// } diff --git a/src/redux/store.ts b/src/redux/store.ts index fa6f436..f0a53da 100644 --- a/src/redux/store.ts +++ b/src/redux/store.ts @@ -1,5 +1,8 @@ +import activitiesReducer from "./activities/activities.slice"; import authReducer from "./auth/auth.slice"; +import collectionsReducer from "./collections/collections.slice"; import neurojsonReducer from "./neurojson/neurojson.slice"; +import projectsReducer from "./projects/projects.slice"; import { configureStore, combineReducers, @@ -10,6 +13,9 @@ import { const appReducer = combineReducers({ neurojson: neurojsonReducer, // Add other slices here as needed auth: authReducer, + activities: activitiesReducer, + collections: collectionsReducer, + projects: projectsReducer, }); export const rootReducer = ( diff --git a/src/services/activities.service.ts b/src/services/activities.service.ts new file mode 100644 index 0000000..9d406d2 --- /dev/null +++ b/src/services/activities.service.ts @@ -0,0 +1,305 @@ +import { + Comment, + Dataset, + GetCommentsResponse, + AddCommentResponse, + UpdateCommentResponse, + DeleteCommentResponse, + LikeResponse, + UnlikeResponse, + SaveResponse, + UnsaveResponse, + GetDatasetStatsResponse, + GetMostViewedDatasetsResponse, + CheckUserActivityResponse, + GetUserSavedDatasetsResponse, + GetUserLikedDatasetsResponse, +} from "../redux/activities/types/activities.interface"; + +const API_URL = process.env.REACT_APP_API_URL || "http://localhost:5000/api/v1"; + +export const ActivitiesService = { + // Like a dataset + likeDataset: async ( + dbName: string, + datasetId: string + ): Promise => { + const response = await fetch( + `${API_URL}/activities/datasets/${dbName}/${datasetId}/like`, + { + method: "POST", + credentials: "include", + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to like dataset"); + } + + return data; + }, + + // Unlike a dataset + unlikeDataset: async ( + dbName: string, + datasetId: string + ): Promise => { + const response = await fetch( + `${API_URL}/activities/datasets/${dbName}/${datasetId}/like`, + { + method: "DELETE", + credentials: "include", + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to unlike dataset"); + } + + return data; + }, + + // Save a dataset + saveDataset: async ( + dbName: string, + datasetId: string + ): Promise => { + const response = await fetch( + `${API_URL}/activities/datasets/${dbName}/${datasetId}/save`, + { + method: "POST", + credentials: "include", + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to save dataset"); + } + + return data; + }, + + // Unsave a dataset + unsaveDataset: async ( + dbName: string, + datasetId: string + ): Promise => { + const response = await fetch( + `${API_URL}/activities/datasets/${dbName}/${datasetId}/save`, + { + method: "DELETE", + credentials: "include", + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to unsave dataset"); + } + + return data; + }, + + // Get comments for a dataset + getComments: async ( + dbName: string, + datasetId: string + ): Promise => { + const response = await fetch( + `${API_URL}/activities/datasets/${dbName}/${datasetId}/comments`, + { + method: "GET", + credentials: "include", + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to fetch comments"); + } + + return data; + }, + + // Add a comment + addComment: async ( + dbName: string, + datasetId: string, + body: string + ): Promise => { + const response = await fetch( + `${API_URL}/activities/datasets/${dbName}/${datasetId}/comments`, + { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + credentials: "include", + body: JSON.stringify({ body }), + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to add comment"); + } + + return data; + }, + + // Update a comment + updateComment: async ( + commentId: number, + body: string + ): Promise => { + const response = await fetch( + `${API_URL}/activities/comments/${commentId}`, + { + method: "PUT", + headers: { + "Content-Type": "application/json", + }, + credentials: "include", + body: JSON.stringify({ body }), + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to update comment"); + } + + return data; + }, + + // Delete a comment + deleteComment: async (commentId: number): Promise => { + const response = await fetch( + `${API_URL}/activities/comments/${commentId}`, + { + method: "DELETE", + credentials: "include", + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to delete comment"); + } + + return data; + }, + + // Get dataset statistics (views count and likes count) + getDatasetStats: async ( + dbName: string, + datasetId: string + ): Promise => { + const response = await fetch( + `${API_URL}/activities/datasets/${dbName}/${datasetId}/stats`, + { + method: "GET", + credentials: "include", + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to fetch dataset statistics"); + } + + return data; + }, + + // Get most viewed datasets + getMostViewedDatasets: async ( + limit: number = 10 + ): Promise => { + const response = await fetch( + `${API_URL}/activities/datasets/most-viewed?limit=${limit}`, + { + method: "GET", + credentials: "include", + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to fetch most viewed datasets"); + } + + return data; + }, + // check user liked or saved a dataset already or not + checkUserActivity: async ( + dbName: string, + datasetId: string + ): Promise => { + const response = await fetch( + `${API_URL}/activities/datasets/${dbName}/${datasetId}/user-activity`, + { + method: "GET", + credentials: "include", + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to check user activity"); + } + + return data; + }, + + // Get current user's saved datasets + getUserSavedDatasets: async (): Promise => { + const response = await fetch( + `${API_URL}/activities/users/me/saved-datasets`, + { + method: "GET", + credentials: "include", + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to fetch saved datasets"); + } + + return data; + }, + + // Get current user's liked datasets + getUserLikedDatasets: async (): Promise => { + const response = await fetch( + `${API_URL}/activities/users/me/liked-datasets`, + { + method: "GET", + credentials: "include", + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to fetch liked datasets"); + } + + return data; + }, +}; diff --git a/src/services/auth.service.ts b/src/services/auth.service.ts index 524b49c..bdd90b7 100644 --- a/src/services/auth.service.ts +++ b/src/services/auth.service.ts @@ -10,6 +10,8 @@ import { ForgotPasswordResponse, ResetPasswordData, ResetPasswordResponse, + UpdateProfileData, + UpdateProfileResponse, } from "redux/auth/types/auth.interface"; const API_URL = process.env.REACT_APP_API_URL || "http://localhost:5000/api/v1"; @@ -147,4 +149,24 @@ export const AuthService = { return responseData; }, + updateProfile: async ( + profileData: UpdateProfileData + ): Promise => { + const response = await fetch(`${API_URL}/auth/update-profile`, { + method: "PUT", + headers: { + "Content-Type": "application/json", + }, + credentials: "include", + body: JSON.stringify(profileData), + }); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to update profile"); + } + + return data; + }, }; diff --git a/src/services/collections.service.ts b/src/services/collections.service.ts new file mode 100644 index 0000000..be7bb47 --- /dev/null +++ b/src/services/collections.service.ts @@ -0,0 +1,198 @@ +import { + GetUserCollectionsResponse, + CreateCollectionResponse, + CreateCollectionPayload, + GetCollectionResponse, + AddDatasetResponse, + AddDatasetToCollectionPayload, + RemoveDatasetResponse, + UpdateCollectionResponse, + UpdateCollectionPayload, + DeleteCollectionResponse, + GetDatasetCollectionsResponse, + RemoveDatasetFromCollectionPayload, + GetDatasetCollectionsPayload, +} from "../redux/collections/types/collections.interface"; + +const API_URL = process.env.REACT_APP_API_URL || "http://localhost:5000/api/v1"; + +export const CollectionsService = { + // Get all user's collections + getUserCollections: async (): Promise => { + const response = await fetch(`${API_URL}/collections/me/collections`, { + method: "GET", + credentials: "include", + }); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to fetch collections"); + } + + return data; + }, + + // Create new collection + createCollection: async ( + payload: CreateCollectionPayload + ): Promise => { + const response = await fetch(`${API_URL}/collections/collections`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + credentials: "include", + body: JSON.stringify(payload), + }); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to create collection"); + } + + return data; + }, + + // Get specific collection with datasets + getCollection: async ( + collectionId: number + ): Promise => { + const response = await fetch( + `${API_URL}/collections/collections/${collectionId}`, + { + method: "GET", + credentials: "include", + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to fetch collection"); + } + + return data; + }, + + // Add dataset to collection + addDatasetToCollection: async ( + payload: AddDatasetToCollectionPayload + ): Promise => { + const response = await fetch( + `${API_URL}/collections/collections/${payload.collectionId}/datasets`, + { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + credentials: "include", + body: JSON.stringify({ + dbName: payload.dbName, + datasetId: payload.datasetId, + }), + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to add dataset to collection"); + } + + return data; + }, + + // Remove dataset from collection + removeDatasetFromCollection: async ( + payload: RemoveDatasetFromCollectionPayload + ): Promise => { + const response = await fetch( + `${API_URL}/collections/collections/${payload.collectionId}/datasets/${payload.datasetId}`, + { + method: "DELETE", + credentials: "include", + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error( + data.message || "Failed to remove dataset from collection" + ); + } + + return data; + }, + + // Update collection + updateCollection: async ( + payload: UpdateCollectionPayload + ): Promise => { + const { collectionId, ...updates } = payload; + + const response = await fetch( + `${API_URL}/collections/collections/${collectionId}`, + { + method: "PUT", + headers: { + "Content-Type": "application/json", + }, + credentials: "include", + body: JSON.stringify(updates), + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to update collection"); + } + + return data; + }, + + // Delete collection + deleteCollection: async ( + collectionId: number + ): Promise => { + const response = await fetch( + `${API_URL}/collections/collections/${collectionId}`, + { + method: "DELETE", + credentials: "include", + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to delete collection"); + } + + return data; + }, + + // Get which collections contain a specific dataset + getDatasetCollections: async ( + payload: GetDatasetCollectionsPayload + ): Promise => { + const response = await fetch( + `${API_URL}/collections/datasets/${payload.dbName}/${payload.datasetId}/collections`, + { + method: "GET", + credentials: "include", + } + ); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to fetch dataset collections"); + } + + return data; + }, +}; diff --git a/src/services/ollama.service.ts b/src/services/ollama.service.ts new file mode 100644 index 0000000..9d8b368 --- /dev/null +++ b/src/services/ollama.service.ts @@ -0,0 +1,48 @@ +const API_URL = process.env.REACT_APP_API_URL || "http://localhost:5000/api/v1"; + +const getQwenTemperature = (modelName: string): number => { + if (modelName.includes("next") || modelName.includes("fast")) return 0.4; + if (modelName.includes("careful") || modelName.includes("think")) return 0.15; + return 0.3; +}; + +export const OllamaService = { + chat: async ( + model: string, + messages: { role: string; content: string }[] + ): Promise => { + const temperature = getQwenTemperature(model); + const response = await fetch(`${API_URL}/ollama/chat`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + model, + messages, + stream: false, + options: { + temperature, // ← pass to Ollama + }, + }), + }); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.error || "Failed to call Ollama"); + } + + return data; + }, + + getTags: async (): Promise => { + const response = await fetch(`${API_URL}/ollama/tags`); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.error || "Failed to fetch Ollama models"); + } + + return data; + }, +}; diff --git a/src/services/projects.service.ts b/src/services/projects.service.ts new file mode 100644 index 0000000..6b11cda --- /dev/null +++ b/src/services/projects.service.ts @@ -0,0 +1,109 @@ +import { + GetUserProjectsResponse, + CreateProjectResponse, + CreateProjectPayload, + GetProjectResponse, + UpdateProjectResponse, + UpdateProjectPayload, + DeleteProjectResponse, +} from "../redux/projects/types/projects.interface"; + +const API_URL = process.env.REACT_APP_API_URL || "http://localhost:5000/api/v1"; + +export const ProjectsService = { + // Get all user's projects + getUserProjects: async (): Promise => { + const response = await fetch(`${API_URL}/projects/me/projects`, { + method: "GET", + credentials: "include", + }); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to fetch projects"); + } + + return data; + }, + + // Create new project + createProject: async ( + payload: CreateProjectPayload + ): Promise => { + const response = await fetch(`${API_URL}/projects`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + credentials: "include", + body: JSON.stringify(payload), + }); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to create project"); + } + + return data; + }, + + // Get specific project + getProject: async (projectId: string): Promise => { + // ← was number + const response = await fetch(`${API_URL}/projects/${projectId}`, { + method: "GET", + credentials: "include", + }); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to fetch project"); + } + + return data; + }, + + // Update project + updateProject: async ( + payload: UpdateProjectPayload + ): Promise => { + const { projectId, ...updates } = payload; + + const response = await fetch(`${API_URL}/projects/${projectId}`, { + method: "PUT", + headers: { + "Content-Type": "application/json", + }, + credentials: "include", + body: JSON.stringify(updates), + }); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to update project"); + } + + return data; + }, + + // Delete project + deleteProject: async (projectId: string): Promise => { + // ← was number + const response = await fetch(`${API_URL}/projects/${projectId}`, { + method: "DELETE", + credentials: "include", + }); + + const data = await response.json(); + + if (!response.ok) { + throw new Error(data.message || "Failed to delete project"); + } + + return data; + }, +}; diff --git a/src/types/jsfive.d.ts b/src/types/jsfive.d.ts new file mode 100644 index 0000000..585e835 --- /dev/null +++ b/src/types/jsfive.d.ts @@ -0,0 +1,23 @@ +declare module "jsfive" { + export class File { + constructor(buffer: ArrayBuffer); + keys?: string[] | (() => string[]); + attrs?: Record; + get(key: string): any; + } + + export interface Dataset { + shape?: number[]; + dtype?: string; + value?: any; + attrs?: Record; + keys?: string[] | (() => string[]); + get?(key: string): any; + } + + export interface Group { + keys?: string[] | (() => string[]); + attrs?: Record; + get(key: string): any; + } +} diff --git a/yarn.lock b/yarn.lock index ec7f415..b946d6d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2268,6 +2268,21 @@ resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.5.tgz#4fc56c15c580b9adb7dc3c333a134e540b44bfb1" integrity sha512-Vo+PSpZG2/fmgmiNzYK9qWRh8h/CHrwD0mo1h1DzL4yzHNSfWYujGTYsWGreD000gcgmZ7K4Ys6Tx9TxtsKdDw== +"@mapbox/node-pre-gyp@^1.0.0": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz#417db42b7f5323d79e93b34a6d7a2a12c0df43fa" + integrity sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ== + dependencies: + detect-libc "^2.0.0" + https-proxy-agent "^5.0.0" + make-dir "^3.1.0" + node-fetch "^2.6.7" + nopt "^5.0.0" + npmlog "^5.0.1" + rimraf "^3.0.2" + semver "^7.3.5" + tar "^6.1.11" + "@mui/core-downloads-tracker@^5.17.1": version "5.17.1" resolved "https://registry.yarnpkg.com/@mui/core-downloads-tracker/-/core-downloads-tracker-5.17.1.tgz#49b88ecb68b800431b5c2f2bfb71372d1f1478fa" @@ -3446,6 +3461,11 @@ resolved "https://registry.yarnpkg.com/@webgpu/types/-/types-0.1.61.tgz#60ac1756bbeeae778b5357a94d4e6e160592d6f1" integrity sha512-w2HbBvH+qO19SB5pJOJFKs533CdZqxl3fcGonqL321VHkW7W/iBo6H8bjDy6pr/+pbMwIu5dnuaAxH7NxBqUrQ== +"@xmldom/xmldom@^0.8.6": + version "0.8.11" + resolved "https://registry.yarnpkg.com/@xmldom/xmldom/-/xmldom-0.8.11.tgz#b79de2d67389734c57c52595f7a7305e30c2d608" + integrity sha512-cQzWCtO6C8TQiYl1ruKNn2U6Ao4o4WBBcbL61yJl84x+j5sOWWFU9X7DpND8XZG3daDppSsigMdfAIl2upQBRw== + "@xtuc/ieee754@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" @@ -3461,6 +3481,11 @@ abab@^2.0.3, abab@^2.0.5: resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291" integrity sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA== +abbrev@1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" + integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== + accepts@~1.3.4, accepts@~1.3.8: version "1.3.8" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" @@ -3522,6 +3547,11 @@ adjust-sourcemap-loader@^4.0.0: loader-utils "^2.0.0" regex-parser "^2.2.11" +adler-32@~1.3.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/adler-32/-/adler-32-1.3.1.tgz#1dbf0b36dda0012189a32b3679061932df1821e2" + integrity sha512-ynZ4w/nUUv5rrsR8UUGoe1VC9hZj6V5hU9Qw1HlMDJGEJw5S7TfTErWTjMys6M7vr0YWcPqs3qAr4ss0nDfP+A== + agent-base@6: version "6.0.2" resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" @@ -3656,6 +3686,19 @@ aproba@^1.0.3: resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== +"aproba@^1.0.3 || ^2.0.0": + version "2.1.0" + resolved "https://registry.yarnpkg.com/aproba/-/aproba-2.1.0.tgz#75500a190313d95c64e871e7e4284c6ac219f0b1" + integrity sha512-tLIEcj5GuR2RSTnxNKdkK0dJ/GrC7P38sUkiDmDuHfsHmbagTFAxDVIBltoklXEVIQ/f14IL8IMJ5pn9Hez1Ew== + +are-we-there-yet@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz#372e0e7bd279d8e94c653aaa1f67200884bf3e1c" + integrity sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw== + dependencies: + delegates "^1.0.0" + readable-stream "^3.6.0" + are-we-there-yet@~1.1.2: version "1.1.7" resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.7.tgz#b15474a932adab4ff8a50d9adfa7e4e926f21146" @@ -3674,7 +3717,7 @@ arg@^5.0.2: resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== -argparse@^1.0.7: +argparse@^1.0.7, argparse@~1.0.3: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== @@ -4046,7 +4089,7 @@ base64-js@0.0.7: resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-0.0.7.tgz#54400dc91d696cec32a8a47902f971522fee8f48" integrity sha512-0nMfGOwe+glKQmfi9trLwlSMeLuTkupKQ6scwrlRP4TdfZR87kwZwMBNYOz8xdtXqefa2uI7rQy6n8GxxtYFvw== -base64-js@^1.3.1: +base64-js@^1.3.1, base64-js@^1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== @@ -4115,6 +4158,11 @@ bluebird@^3.7.2: resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== +bluebird@~3.4.0: + version "3.4.7" + resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.4.7.tgz#f72d760be09b7f76d08ed8fae98b289a8d05fab3" + integrity sha512-iD3898SR7sWVRHbiQv+sHUtHnMvC1o3nW5rAcqnq3uOn07DSAppZYUkIGslDz6gXC7HfunPe7YVBgoEJASPcHA== + body-parser@1.20.3: version "1.20.3" resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.3.tgz#1953431221c6fb5cd63c4b36d53fab0928e548c6" @@ -4304,6 +4352,15 @@ caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001702, caniuse-lite@^1.0.30001718: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001723.tgz#c4f3174f02089720736e1887eab345e09bb10944" integrity sha512-1R/elMjtehrFejxwmexeXAtae5UO9iSyFn6G/I806CYC/BLyyBk1EPhrKBkWhy6wM6Xnm47dSJQec+tLJ39WHw== +canvas@^2.11.0: + version "2.11.2" + resolved "https://registry.yarnpkg.com/canvas/-/canvas-2.11.2.tgz#553d87b1e0228c7ac0fc72887c3adbac4abbd860" + integrity sha512-ItanGBMrmRV7Py2Z+Xhs7cT+FNt5K0vPL4p9EZ/UX/Mu7hFbkxSjKF2KVtPwX7UYWp7dRKnrTvReflgrItJbdw== + dependencies: + "@mapbox/node-pre-gyp" "^1.0.0" + nan "^2.17.0" + simple-get "^3.0.3" + case-sensitive-paths-webpack-plugin@^2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" @@ -4317,6 +4374,14 @@ center-align@^0.1.1: align-text "^0.1.3" lazy-cache "^1.0.3" +cfb@~1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/cfb/-/cfb-1.2.2.tgz#94e687628c700e5155436dac05f74e08df23bc44" + integrity sha512-KfdUZsSOw19/ObEWasvBP/Ac4reZvAGauZhs6S/gqNhXhI7cKwvlH7ulj+dOEYnca4bm4SGo8C1bTAQvnTjgQA== + dependencies: + adler-32 "~1.3.0" + crc-32 "~1.2.0" + chalk-template@^1.1.0: version "1.1.2" resolved "https://registry.yarnpkg.com/chalk-template/-/chalk-template-1.1.2.tgz#88ff13e75a333d232304e13abc48c5b5be15f1ce" @@ -4404,6 +4469,11 @@ chownr@^1.1.1: resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== +chownr@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece" + integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ== + chrome-trace-event@^1.0.2: version "1.0.4" resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.4.tgz#05bffd7ff928465093314708c93bdfa9bd1f0f5b" @@ -4485,6 +4555,11 @@ code-point-at@^1.0.0: resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" integrity sha512-RpAVKQA5T63xEj6/giIbUEtZwJ4UFIc3ZtvEkiaUERylqe8xb5IvqcgOurZLahv93CLKfxcw5YI+DZcUBRyLXA== +codepage@~1.15.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/codepage/-/codepage-1.15.0.tgz#2e00519024b39424ec66eeb3ec07227e692618ab" + integrity sha512-3g6NUTPd/YtuuGrhMnOMRjFc+LJw/bnMp3+0r/Wcz3IXUuCosKRJvMphm5+Q+bvTVGcJJuRvVLuYba+WojaFaA== + collect-v8-coverage@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz#c0b29bcd33bcd0779a1344c2136051e6afd3d9e9" @@ -4522,6 +4597,11 @@ color-string@^1.6.0, color-string@^1.9.0: color-name "^1.0.0" simple-swizzle "^0.2.2" +color-support@^1.1.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2" + integrity sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg== + color@^3.1.3: version "3.2.1" resolved "https://registry.yarnpkg.com/color/-/color-3.2.1.tgz#3544dc198caf4490c3ecc9a790b54fe9ff45e164" @@ -4679,7 +4759,7 @@ connect-history-api-fallback@^2.0.0: resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz#647264845251a0daf25b97ce87834cace0f5f1c8" integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== -console-control-strings@^1.0.0, console-control-strings@~1.1.0: +console-control-strings@^1.0.0, console-control-strings@^1.1.0, console-control-strings@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" integrity sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ== @@ -4778,6 +4858,11 @@ cosmiconfig@^7, cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: path-type "^4.0.0" yaml "^1.10.0" +crc-32@~1.2.0, crc-32@~1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/crc-32/-/crc-32-1.2.2.tgz#3cad35a934b8bf71f25ca524b6da51fb7eace2ff" + integrity sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ== + create-require@^1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" @@ -5447,6 +5532,11 @@ detect-libc@^1.0.3: resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" integrity sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg== +detect-libc@^2.0.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.1.2.tgz#689c5dcdc1900ef5583a4cb9f6d7b473742074ad" + integrity sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ== + detect-libc@^2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.4.tgz#f04715b8ba815e53b4d8109655b6508a6865a7e8" @@ -5470,6 +5560,11 @@ detect-port-alt@^1.1.6: address "^1.0.1" debug "^2.6.0" +dicom-parser@^1.8.21: + version "1.8.21" + resolved "https://registry.yarnpkg.com/dicom-parser/-/dicom-parser-1.8.21.tgz#916fdc77776367976b8457cad462b5b7cf74eaea" + integrity sha512-lYCweHQDsC8UFpXErPlg86Px2A8bay0HiUY+wzoG3xv5GzgqVHU3lziwSc/Gzn7VV7y2KeP072SzCviuOoU02w== + didyoumean@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" @@ -5490,6 +5585,11 @@ diff@^4.0.1: resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== +dingbat-to-unicode@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/dingbat-to-unicode/-/dingbat-to-unicode-1.0.1.tgz#5091dd673241453e6b5865e26e5a4452cdef5c83" + integrity sha512-98l0sW87ZT58pU4i61wa2OHwxbiYSbuxsCBozaVnYX2iCnr3bLM3fIes1/ej7h1YdOKuKt/MLs706TVnALA65w== + dir-glob@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" @@ -5626,6 +5726,13 @@ dotenv@^10.0.0: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== +duck@^0.1.12: + version "0.1.12" + resolved "https://registry.yarnpkg.com/duck/-/duck-0.1.12.tgz#de7adf758421230b6d7aee799ce42670586b9efa" + integrity sha512-wkctla1O6VfP89gQ+J/yDesM0S7B7XLXjKGzXxMDVFg7uEn706niAtyYovKbyq1oT9YwDcly721/iUWoc8MVRg== + dependencies: + underscore "^1.13.1" + dunder-proto@^1.0.0, dunder-proto@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/dunder-proto/-/dunder-proto-1.0.1.tgz#d7ae667e1dc83482f8b70fd0f6eefc50da30f58a" @@ -6714,6 +6821,11 @@ forwarded@0.2.0: resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== +frac@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/frac/-/frac-1.1.2.tgz#3d74f7f6478c88a1b5020306d747dc6313c74d0b" + integrity sha512-w/XBfkibaTl3YDqASwfDUqkna4Z2p9cFSr1aHDt0WoMTECnRfBOv2WArlZILlqgWlmdIlALXGpM2AOhEk5W3IA== + fraction.js@^4.3.7: version "4.3.7" resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.3.7.tgz#06ca0085157e42fda7f9e726e79fefc4068840f7" @@ -6748,6 +6860,13 @@ fs-extra@^9.0.0, fs-extra@^9.0.1: jsonfile "^6.0.1" universalify "^2.0.0" +fs-minipass@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" + integrity sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg== + dependencies: + minipass "^3.0.0" + fs-monkey@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.6.tgz#8ead082953e88d992cf3ff844faa907b26756da2" @@ -6785,6 +6904,21 @@ functions-have-names@^1.2.3: resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== +gauge@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/gauge/-/gauge-3.0.2.tgz#03bf4441c044383908bcfa0656ad91803259b395" + integrity sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q== + dependencies: + aproba "^1.0.3 || ^2.0.0" + color-support "^1.1.2" + console-control-strings "^1.0.0" + has-unicode "^2.0.1" + object-assign "^4.1.1" + signal-exit "^3.0.0" + string-width "^4.2.3" + strip-ansi "^6.0.1" + wide-align "^1.1.2" + gauge@~2.7.3: version "2.7.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" @@ -7043,7 +7177,7 @@ has-tostringtag@^1.0.2: dependencies: has-symbols "^1.0.3" -has-unicode@^2.0.0: +has-unicode@^2.0.0, has-unicode@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" integrity sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ== @@ -7278,6 +7412,11 @@ ignore@^5.2.0: resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.2.tgz#3cd40e729f3643fd87cb04e50bf0eb722bc596f5" integrity sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g== +immediate@~3.0.5: + version "3.0.6" + resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.0.6.tgz#9db1dbd0faf8de6fbe0f5dd5e56bb606280de69b" + integrity sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ== + immer@^9.0.21, immer@^9.0.7: version "9.0.21" resolved "https://registry.yarnpkg.com/immer/-/immer-9.0.21.tgz#1e025ea31a40f24fb064f1fef23e931496330176" @@ -8412,6 +8551,13 @@ jsesc@~3.0.2: resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-3.0.2.tgz#bb8b09a6597ba426425f2e4a07245c3d00b9343e" integrity sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g== +jsfive@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/jsfive/-/jsfive-0.4.0.tgz#b10683afbc7d6f6da2f188d7df6f74080b451209" + integrity sha512-3I3feGAFleVsChR4o4g9McAOa3bLlJ8af3oimjHt4nJNnmnbGROqRghb7m4VXNSZO8UbImN3UPc0uY2EZGIw6Q== + dependencies: + pako "^2.0.4" + json-buffer@3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" @@ -8508,6 +8654,16 @@ jsonpointer@^5.0.0, jsonpointer@^5.0.1: object.assign "^4.1.4" object.values "^1.1.6" +jszip@^3.10.1, jszip@^3.7.1: + version "3.10.1" + resolved "https://registry.yarnpkg.com/jszip/-/jszip-3.10.1.tgz#34aee70eb18ea1faec2f589208a157d1feb091c2" + integrity sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g== + dependencies: + lie "~3.3.0" + pako "~1.0.2" + readable-stream "~2.3.6" + setimmediate "^1.0.5" + jwt-decode@^3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/jwt-decode/-/jwt-decode-3.1.2.tgz#3fb319f3675a2df0c2895c8f5e9fa4b67b04ed59" @@ -8595,6 +8751,13 @@ levn@~0.3.0: prelude-ls "~1.1.2" type-check "~0.3.2" +lie@~3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/lie/-/lie-3.3.0.tgz#dcf82dee545f46074daf200c7c1c5a08e0f40f6a" + integrity sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ== + dependencies: + immediate "~3.0.5" + lilconfig@^2.0.3: version "2.1.0" resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.1.0.tgz#78e23ac89ebb7e1bfbf25b18043de756548e7f52" @@ -8715,6 +8878,15 @@ loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.4.0: dependencies: js-tokens "^3.0.0 || ^4.0.0" +lop@^0.4.2: + version "0.4.2" + resolved "https://registry.yarnpkg.com/lop/-/lop-0.4.2.tgz#c9c2f958a39b9da1c2f36ca9ad66891a9fe84640" + integrity sha512-RefILVDQ4DKoRZsJ4Pj22TxE3omDO47yFpkIBoDKzkqPRISs5U1cnAdg/5583YPkWPaLIYHOKRMQSvjFsO26cw== + dependencies: + duck "^0.1.12" + option "~0.2.1" + underscore "^1.13.1" + lower-case@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" @@ -8785,6 +8957,22 @@ makeerror@1.0.12: dependencies: tmpl "1.0.5" +mammoth@^1.11.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/mammoth/-/mammoth-1.11.0.tgz#f6c68624eaffcf56728a792fcccd3495d688bac5" + integrity sha512-BcEqqY/BOwIcI1iR5tqyVlqc3KIaMRa4egSoK83YAVrBf6+yqdAAbtUcFDCWX8Zef8/fgNZ6rl4VUv+vVX8ddQ== + dependencies: + "@xmldom/xmldom" "^0.8.6" + argparse "~1.0.3" + base64-js "^1.5.1" + bluebird "~3.4.0" + dingbat-to-unicode "^1.0.1" + jszip "^3.7.1" + lop "^0.4.2" + path-is-absolute "^1.0.0" + underscore "^1.13.1" + xmlbuilder "^10.0.0" + markdown-to-jsx@^7.4.1: version "7.7.6" resolved "https://registry.yarnpkg.com/markdown-to-jsx/-/markdown-to-jsx-7.7.6.tgz#db67500bbc381b2a03cc6485aecf916adc56fe7f" @@ -8936,16 +9124,41 @@ minimist@^1.2.0, minimist@^1.2.3, minimist@^1.2.6: resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== +minipass@^3.0.0: + version "3.3.6" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a" + integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw== + dependencies: + yallist "^4.0.0" + +minipass@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-5.0.0.tgz#3e9788ffb90b694a5d0ec94479a45b5d8738133d" + integrity sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ== + "minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.1.2: version "7.1.2" resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707" integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== +minizlib@^2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931" + integrity sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg== + dependencies: + minipass "^3.0.0" + yallist "^4.0.0" + mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3: version "0.5.3" resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== +mkdirp@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" + integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== + mkdirp@~0.5.1: version "0.5.6" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" @@ -8980,6 +9193,11 @@ mz@^2.7.0: object-assign "^4.0.1" thenify-all "^1.0.0" +nan@^2.17.0: + version "2.25.0" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.25.0.tgz#937ed345e63d9481362a7942d49c4860d27eeabd" + integrity sha512-0M90Ag7Xn5KMLLZ7zliPWP3rT90P6PN+IzVFS0VqmnPktBk3700xUVv8Ikm9EUaUE5SDWdp/BIxdENzVznpm1g== + nanoid@^3.3.11, nanoid@^3.3.7: version "3.3.11" resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.11.tgz#4f4f112cefbe303202f2199838128936266d185b" @@ -9102,7 +9320,7 @@ node-addon-api@^3.1.0: resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.2.1.tgz#81325e0a2117789c0128dab65e7e38f07ceba161" integrity sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A== -node-fetch@^2.7.0: +node-fetch@^2.6.7, node-fetch@^2.7.0: version "2.7.0" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== @@ -9124,6 +9342,13 @@ node-releases@^2.0.19: resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.19.tgz#9e445a52950951ec4d177d843af370b411caf314" integrity sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw== +nopt@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-5.0.0.tgz#530942bb58a512fccafe53fe210f13a25355dc88" + integrity sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ== + dependencies: + abbrev "1" + normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" @@ -9156,6 +9381,16 @@ npmlog@^4.0.1, npmlog@^4.1.2: gauge "~2.7.3" set-blocking "~2.0.0" +npmlog@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-5.0.1.tgz#f06678e80e29419ad67ab964e0fa69959c1eb8b0" + integrity sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw== + dependencies: + are-we-there-yet "^2.0.0" + console-control-strings "^1.1.0" + gauge "^3.0.0" + set-blocking "^2.0.0" + nth-check@^1.0.2, nth-check@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" @@ -9330,6 +9565,11 @@ open@^8.0.9, open@^8.4.0: is-docker "^2.1.1" is-wsl "^2.2.0" +option@~0.2.1: + version "0.2.4" + resolved "https://registry.yarnpkg.com/option/-/option-0.2.4.tgz#fd475cdf98dcabb3cb397a3ba5284feb45edbfe4" + integrity sha512-pkEqbDyl8ou5cpq+VsnQbe/WlEy5qS7xPzMS1U55OCG9KPvwFD46zDbxQIj3egJSFc3D+XhYOPUzz49zQAVy7A== + optionator@^0.8.1: version "0.8.3" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" @@ -9430,11 +9670,16 @@ package-json-from-dist@^1.0.0: resolved "https://registry.yarnpkg.com/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz#4f1471a010827a86f94cfd9b0727e36d267de505" integrity sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw== -pako@1.0.11: +pako@1.0.11, pako@~1.0.2: version "1.0.11" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== +pako@^2.0.4: + version "2.1.0" + resolved "https://registry.yarnpkg.com/pako/-/pako-2.1.0.tgz#266cc37f98c7d883545d11335c00fbd4062c9a86" + integrity sha512-w+eufiZ1WuJYgPXbV/PO3NCMEc3xqylkKHzp8bxp1uW4qaSNQUkwmLLEc3kKsfz8lpV1F8Ht3U1Cm+9Srog2ug== + param-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" @@ -9550,6 +9795,28 @@ path-type@^4.0.0: resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== +path2d-polyfill@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/path2d-polyfill/-/path2d-polyfill-2.1.1.tgz#6098b7bf2fc24c306c6377bcd558b17ba437ea27" + integrity sha512-4Rka5lN+rY/p0CdD8+E+BFv51lFaFvJOrlOhyQ+zjzyQrzyh3ozmxd1vVGGDdIbUFSBtIZLSnspxTgPT0iJhvA== + dependencies: + path2d "0.1.1" + +path2d@0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/path2d/-/path2d-0.1.1.tgz#d3c3886cd2252fb2a7830c27ea7bb9a862d937ea" + integrity sha512-/+S03c8AGsDYKKBtRDqieTJv2GlkMb0bWjnqOgtF6MkjdUQ9a8ARAtxWf9NgKLGm2+WQr6+/tqJdU8HNGsIDoA== + +pdfjs-dist@3.4.120: + version "3.4.120" + resolved "https://registry.yarnpkg.com/pdfjs-dist/-/pdfjs-dist-3.4.120.tgz#6f4222117157498f179c95dc4569fad6336a8fdd" + integrity sha512-B1hw9ilLG4m/jNeFA0C2A0PZydjxslP8ylU+I4XM7Bzh/xWETo9EiBV848lh0O0hLut7T6lK1V7cpAXv5BhxWw== + dependencies: + path2d-polyfill "^2.0.1" + web-streams-polyfill "^3.2.1" + optionalDependencies: + canvas "^2.11.0" + performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" @@ -10641,7 +10908,7 @@ read-cache@^1.0.0: dependencies: pify "^2.3.0" -readable-stream@^2.0.1, readable-stream@^2.0.6, readable-stream@^2.2.2: +readable-stream@^2.0.1, readable-stream@^2.0.6, readable-stream@^2.2.2, readable-stream@~2.3.6: version "2.3.8" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b" integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== @@ -10654,7 +10921,7 @@ readable-stream@^2.0.1, readable-stream@^2.0.6, readable-stream@^2.2.2: string_decoder "~1.1.1" util-deprecate "~1.0.1" -readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.4.0: +readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0: version "3.6.2" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== @@ -11139,7 +11406,7 @@ serve-static@1.16.2: parseurl "~1.3.3" send "0.19.0" -set-blocking@~2.0.0: +set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw== @@ -11467,6 +11734,13 @@ sprintf-js@~1.0.2: resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== +ssf@~0.11.2: + version "0.11.2" + resolved "https://registry.yarnpkg.com/ssf/-/ssf-0.11.2.tgz#0b99698b237548d088fc43cdf2b70c1a7512c06c" + integrity sha512-+idbmIXoYET47hH+d7dfm2epdOMUDjqcB4648sTZ+t2JwoyBFL/insLfB/racrDmsKB3diwsDA696pZMieAC5g== + dependencies: + frac "~1.1.2" + stable@^0.1.8: version "0.1.8" resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" @@ -11582,7 +11856,7 @@ string-width@^1.0.1: is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" -"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.0: +"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -11934,6 +12208,18 @@ tar-stream@^2.1.4: inherits "^2.0.3" readable-stream "^3.1.1" +tar@^6.1.11: + version "6.2.1" + resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a" + integrity sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A== + dependencies: + chownr "^2.0.0" + fs-minipass "^2.0.0" + minipass "^5.0.0" + minizlib "^2.1.1" + mkdirp "^1.0.3" + yallist "^4.0.0" + temp-dir@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-2.0.0.tgz#bde92b05bdfeb1516e804c9c00ad45177f31321e" @@ -12337,6 +12623,11 @@ underscore@1.12.1: resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.12.1.tgz#7bb8cc9b3d397e201cf8553336d262544ead829e" integrity sha512-hEQt0+ZLDVUMhebKxL4x1BTtDY7bavVofhZ9KZ4aI26X9SRaE+Y3m83XUL1UP2jn8ynjndwCCpEHdUG+9pP1Tw== +underscore@^1.13.1: + version "1.13.7" + resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.13.7.tgz#970e33963af9a7dda228f17ebe8399e5fbe63a10" + integrity sha512-GMXzWtsc57XAtguZgaQViUOzs0KTkk8ojr3/xAxXLITqf/3EMwxC0inyETfDFjH/Krbhuep0HNbbjI9i/q3F3g== + undici-types@~6.21.0: version "6.21.0" resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.21.0.tgz#691d00af3909be93a7faa13be61b3a5b50ef12cb" @@ -12589,6 +12880,11 @@ wbuf@^1.1.0, wbuf@^1.7.3: dependencies: minimalistic-assert "^1.0.0" +web-streams-polyfill@^3.2.1: + version "3.3.3" + resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz#2073b91a2fdb1fbfbd401e7de0ac9f8214cecb4b" + integrity sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw== + web-vitals@^2.1.0: version "2.1.4" resolved "https://registry.yarnpkg.com/web-vitals/-/web-vitals-2.1.4.tgz#76563175a475a5e835264d373704f9dde718290c" @@ -12853,7 +13149,7 @@ which@^2.0.1: dependencies: isexe "^2.0.0" -wide-align@^1.1.0: +wide-align@^1.1.0, wide-align@^1.1.2: version "1.1.5" resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.5.tgz#df1d4c206854369ecf3c9a4898f1b23fbd9d15d3" integrity sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg== @@ -12870,11 +13166,21 @@ window-size@0.1.0: resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d" integrity sha512-1pTPQDKTdd61ozlKGNCjhNRd+KPmgLSGa3mZTHoOliaGcESD8G1PXhh7c1fgiPjVbNVfgy2Faw4BI8/m0cC8Mg== +wmf@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wmf/-/wmf-1.0.2.tgz#7d19d621071a08c2bdc6b7e688a9c435298cc2da" + integrity sha512-/p9K7bEh0Dj6WbXg4JG0xvLQmIadrner1bi45VMJTfnbVHsc7yIajZyoSoK60/dtVBs12Fm6WkUI5/3WAVsNMw== + word-wrap@^1.2.5, word-wrap@~1.2.3: version "1.2.5" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.5.tgz#d2c45c6dd4fbce621a66f136cbe328afd0410b34" integrity sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA== +word@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/word/-/word-0.3.0.tgz#8542157e4f8e849f4a363a288992d47612db9961" + integrity sha512-OELeY0Q61OXpdUfTp+oweA/vtLVg5VDOXh+3he3PNzLGG/y0oylSOC1xRVj0+l4vQ3tj/bB1HVHv1ocXkQceFA== + wordwrap@0.0.2: version "0.0.2" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f" @@ -13106,11 +13412,29 @@ xdg-basedir@^5.0.1: resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-5.1.0.tgz#1efba19425e73be1bc6f2a6ceb52a3d2c884c0c9" integrity sha512-GCPAHLvrIH13+c0SuacwvRYj2SxJXQ4kaVTT5xgL3kPrz56XxkF21IGhjSE1+W0aw7gpBWRGXLCPnPby6lSpmQ== +xlsx@^0.18.5: + version "0.18.5" + resolved "https://registry.yarnpkg.com/xlsx/-/xlsx-0.18.5.tgz#16711b9113c848076b8a177022799ad356eba7d0" + integrity sha512-dmg3LCjBPHZnQp5/F/+nnTa+miPJxUXB6vtk42YjBBKayDNagxGEeIdWApkYPOf3Z3pm3k62Knjzp7lMeTEtFQ== + dependencies: + adler-32 "~1.3.0" + cfb "~1.2.1" + codepage "~1.15.0" + crc-32 "~1.2.1" + ssf "~0.11.2" + wmf "~1.0.1" + word "~0.3.0" + xml-name-validator@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== +xmlbuilder@^10.0.0: + version "10.1.1" + resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-10.1.1.tgz#8cae6688cc9b38d850b7c8d3c0a4161dcaf475b0" + integrity sha512-OyzrcFLL/nb6fMGHbiRDuPup9ljBycsdCypwuyg5AAHvyWzGfChJpCXMG88AGTIMFhGZ9RccFN1e6lhg3hkwKg== + xmlchars@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" @@ -13138,6 +13462,11 @@ yallist@^3.0.2: resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: version "1.10.2" resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b"