From f161790387f00c5c63628f66683800b0fae9e201 Mon Sep 17 00:00:00 2001 From: Davide Silvestri <75379892+silvestrid@users.noreply.github.com> Date: Fri, 16 Jan 2026 11:34:19 +0100 Subject: [PATCH] chore: switch to ruff (#4387) * Switch to ruff * Fix flaky test * Remove frontend-lint changes * Fix all-in-one image creation in CI * Update saas target * Add changelog entry * Fix PYTHONPATH for lint-migrations * Run check-migrations in the CI --- .github/workflows/ci.yml | 35 ++- .gitlab/ci_includes/jobs.yml | 4 +- backend/.flake8 | 22 -- backend/.isort.cfg | 6 - backend/docker/docker-entrypoint.sh | 3 + backend/flake8_plugins/__init__.py | 1 - .../flake8_plugins/flake8_baserow/__init__.py | 4 - .../flake8_baserow/docstring.py | 173 ----------- .../flake8_plugins/flake8_baserow/psycopg.py | 31 -- .../tests/test_flake8_baserow_docstring.py | 85 ------ .../tests/test_flake8_baserow_psycopg.py | 41 --- backend/justfile | 47 ++- backend/pyproject.toml | 56 ++-- backend/src/baserow/api/admin/views.py | 3 +- backend/src/baserow/api/decorators.py | 8 +- backend/src/baserow/api/extensions.py | 2 +- backend/src/baserow/api/jobs/serializers.py | 6 +- .../src/baserow/api/settings/serializers.py | 6 +- backend/src/baserow/api/user_sources/views.py | 3 +- backend/src/baserow/api/utils.py | 4 +- .../baserow/api/workspaces/users/errors.py | 3 +- .../src/baserow/api/workspaces/users/views.py | 3 +- backend/src/baserow/config/settings/base.py | 3 +- backend/src/baserow/config/settings/heroku.py | 7 +- backend/src/baserow/config/settings/test.py | 2 +- .../automation/automation_dispatch_context.py | 3 +- .../data_providers/data_provider_types.py | 3 +- .../contrib/automation/nodes/models.py | 51 ++-- .../contrib/automation/nodes/registries.py | 6 +- .../contrib/automation/workflows/handler.py | 11 +- .../builder/api/data_sources/serializers.py | 6 +- .../contrib/builder/api/data_sources/views.py | 3 +- .../builder/api/domains/public_views.py | 3 +- .../builder/api/workflow_actions/views.py | 4 +- .../contrib/builder/data_sources/handler.py | 6 +- .../contrib/builder/data_sources/service.py | 12 +- .../elements/collection_field_types.py | 6 +- .../contrib/builder/elements/element_types.py | 3 +- .../contrib/builder/elements/handler.py | 16 +- .../contrib/builder/elements/registries.py | 12 +- .../builder/theme/theme_config_block_types.py | 8 +- .../builder/workflow_actions/models.py | 21 +- .../workflow_actions/workflow_action_types.py | 13 +- .../dashboard/api/data_sources/views.py | 3 +- .../data_sources/dispatch_context.py | 3 +- .../contrib/dashboard/data_sources/handler.py | 6 +- .../airtable/airtable_column_types.py | 12 +- .../contrib/database/airtable/handler.py | 10 +- .../contrib/database/airtable/registry.py | 14 +- .../baserow/contrib/database/api/constants.py | 28 +- .../database/api/fields/serializers.py | 6 +- .../database/api/tokens/serializers.py | 2 +- .../database/api/views/form/serializers.py | 6 +- .../contrib/database/api/views/serializers.py | 2 +- .../contrib/database/api/views/views.py | 8 +- .../contrib/database/application_types.py | 22 +- backend/src/baserow/contrib/database/apps.py | 2 +- .../contrib/database/data_sync/handler.py | 15 +- .../circular_reference_checker.py | 2 +- .../database/fields/dependencies/handler.py | 8 +- .../contrib/database/fields/field_types.py | 73 ++--- .../database/fields/filter_support/base.py | 15 +- .../baserow/contrib/database/fields/models.py | 6 +- .../contrib/database/fields/registries.py | 8 +- .../contrib/database/fields/utils/duration.py | 2 +- .../database/formula/ast/function_defs.py | 4 +- .../contrib/database/formula/ast/tree.py | 2 +- .../django_expressions.py | 10 +- .../database/formula/types/formula_types.py | 11 +- .../management/commands/copy_tables.py | 7 +- .../management/commands/fill_row_history.py | 6 +- .../management/commands/fill_workspace.py | 2 +- .../commands/install_airtable_templates.py | 2 +- .../migrations/0028_fix_negative_date.py | 2 +- .../baserow/contrib/database/rows/handler.py | 29 +- .../baserow/contrib/database/rows/helpers.py | 6 +- .../database/rows/history_providers.py | 10 +- .../contrib/database/search/handler.py | 8 +- .../baserow/contrib/database/search_types.py | 6 +- .../baserow/contrib/database/table/cache.py | 3 +- .../baserow/contrib/database/table/models.py | 6 +- .../contrib/database/tokens/handler.py | 6 +- .../baserow/contrib/database/views/actions.py | 4 +- .../contrib/database/views/exceptions.py | 3 +- .../contrib/database/views/registries.py | 41 ++- .../contrib/database/views/view_filters.py | 12 +- .../contrib/database/views/view_types.py | 22 +- .../contrib/database/ws/views/rows/signals.py | 6 +- .../integrations/ai/integration_types.py | 6 +- .../integrations/core/service_types.py | 2 +- .../integrations/local_baserow/utils.py | 2 +- backend/src/baserow/core/action/handler.py | 1 - .../core/app_auth_providers/handler.py | 6 +- .../baserow/core/app_auth_providers/types.py | 3 +- .../src/baserow/core/auth_provider/models.py | 3 +- backend/src/baserow/core/constants.py | 1 + backend/src/baserow/core/handler.py | 4 +- backend/src/baserow/core/jobs/mixins.py | 16 +- backend/src/baserow/core/jobs/registries.py | 1 - .../core/management/backup/backup_runner.py | 6 +- .../commands/export_workspace_applications.py | 2 +- backend/src/baserow/core/management/utils.py | 7 +- backend/src/baserow/core/output_parsers.py | 7 +- backend/src/baserow/core/populate.py | 14 +- backend/src/baserow/core/psycopg.py | 7 +- backend/src/baserow/core/registry.py | 4 +- .../baserow/core/services/formula_importer.py | 3 +- .../core/telemetry/env_overrides_parser.py | 9 +- .../src/baserow/core/telemetry/telemetry.py | 2 +- backend/src/baserow/core/trash/handler.py | 7 +- .../core/two_factor_auth/exceptions.py | 18 +- backend/src/baserow/core/types.py | 6 +- .../src/baserow/core/user_files/handler.py | 2 +- backend/src/baserow/core/utils.py | 4 +- backend/src/baserow/manage.py | 2 +- .../baserow/test_utils/fixtures/element.py | 18 +- .../src/baserow/test_utils/fixtures/job.py | 3 +- backend/src/baserow/test_utils/helpers.py | 40 +-- .../src/baserow/test_utils/pytest_conftest.py | 42 +-- backend/src/baserow/throttling.py | 2 +- .../groups/test_workspaces_admin_views.py | 4 +- .../applications/test_application_views.py | 6 +- backend/tests/baserow/api/test_api_utils.py | 3 +- .../test_user_source_auth_views.py | 14 +- ...test_user_source_authentication_backend.py | 5 +- .../baserow/api/users/test_user_views.py | 4 +- .../test_data_provider_types.py | 8 +- .../data_sources/test_data_source_service.py | 30 +- .../builder/domains/test_domain_service.py | 15 +- .../test_collection_element_type_mixin.py | 1 + .../builder/elements/test_element_handler.py | 12 +- .../builder/elements/test_element_service.py | 30 +- .../builder/test_builder_application_type.py | 2 +- .../builder/test_element_formula_mixin.py | 7 +- .../builder/test_runtime_formula_results.py | 16 +- .../database/airtable/test_airtable_utils.py | 3 +- .../api/airtable/test_airtable_views.py | 9 +- .../api/rows/test_batch_rows_views.py | 14 +- .../database/api/rows/test_row_views.py | 26 +- .../database/api/tables/test_table_views.py | 7 +- .../api/views/form/test_form_view_views.py | 32 +- .../views/gallery/test_gallery_view_views.py | 54 ++-- .../api/views/grid/test_grid_view_views.py | 82 +++--- .../database/field/test_field_actions.py | 7 +- .../database/field/test_field_handler.py | 15 +- .../database/field/test_field_tasks.py | 9 +- .../database/field/test_field_types.py | 6 +- .../database/field/test_file_field_type.py | 6 +- .../database/field/test_lookup_field_type.py | 24 +- .../test_multiple_collaborators_field_type.py | 2 +- .../field/test_multiple_select_field_type.py | 42 ++- .../database/field/test_uuid_field_type.py | 4 +- .../formula/test_rename_field_references.py | 10 +- .../import_export/test_export_handler.py | 10 +- .../database/management/test_fill_table.py | 2 +- .../contrib/database/rows/test_row_history.py | 18 +- .../database/rows/test_rows_handler.py | 2 +- .../search/test_search_compatibility.py | 8 +- .../database/table/test_table_handler.py | 5 +- .../database/table/test_table_models.py | 10 +- .../test_database_application_type.py | 4 +- .../database/view/test_view_aggregations.py | 6 +- .../database/view/test_view_array_filters.py | 6 +- .../database/view/test_view_filters.py | 6 +- .../database/view/test_view_signals.py | 41 +-- .../database/webhooks/test_webhook_tasks.py | 26 +- .../core/test_smtp_email_service_type.py | 15 +- .../test_get_row_service_type.py | 5 +- .../test_list_rows_service_type.py | 5 +- .../local_baserow/test_migrations.py | 24 +- .../test_generative_ai_model_types.py | 6 +- .../integrations/test_integration_service.py | 25 +- .../baserow/core/jobs/test_jobs_handler.py | 6 +- .../baserow/core/service/test_service_type.py | 2 +- .../baserow/core/test_basic_permissions.py | 43 +-- backend/tests/baserow/core/test_core_utils.py | 2 +- .../user_sources/test_user_source_handler.py | 9 +- .../user_sources/test_user_source_service.py | 25 +- .../performance/test_formula_performance.py | 8 +- backend/uv.lock | 277 ++++++------------ ...and_formatting_toolchain_autopep8_bla.json | 9 + .../baserow_enterprise/api/audit_log/views.py | 2 +- .../api/builder/custom_code/views.py | 2 +- .../baserow_enterprise/api/data_sync/views.py | 2 +- .../api/field_permissions/views.py | 2 +- .../api/integrations/common/sso/saml/views.py | 6 +- .../src/baserow_enterprise/api/role/views.py | 2 +- .../api/secure_file_serve/views.py | 4 +- .../backend/src/baserow_enterprise/apps.py | 9 +- .../baserow_enterprise/assistant/assistant.py | 11 +- .../assistant/signatures.py | 8 +- .../assistant/tools/core/tools.py | 2 +- .../assistant/tools/database/tools.py | 2 +- .../assistant/tools/database/types/fields.py | 6 +- .../assistant/tools/database/types/views.py | 4 +- .../assistant/tools/database/utils.py | 7 +- .../assistant/tools/search_user_docs/tools.py | 2 +- .../src/baserow_enterprise/audit_log/utils.py | 2 +- .../builder/application_types.py | 6 +- .../custom_code/application_type_mixin.py | 3 +- .../builder/elements/element_types.py | 5 +- .../config/settings/settings.py | 9 +- .../data_sync/baserow_table_data_sync.py | 6 +- .../data_sync/github_issues_data_sync.py | 2 +- .../data_sync/gitlab_issues_data_sync.py | 2 +- .../baserow_enterprise/data_sync/handler.py | 5 +- .../data_sync/hubspot_contacts_data_sync.py | 2 +- .../data_sync/jira_issues_data_sync.py | 2 +- .../baserow_enterprise/data_sync/models.py | 2 +- .../data_sync/two_way_sync_strategy_types.py | 3 +- .../date_dependency/field_rule_types.py | 4 +- .../emails_context_types.py | 3 +- .../field_permissions/permission_manager.py | 5 +- .../sso/oauth2/app_auth_provider_types.py | 3 +- .../sso/saml/app_auth_provider_types.py | 3 +- .../local_baserow/user_source_types.py | 6 +- .../src/baserow_enterprise/license_types.py | 7 +- ...ame_auditlogentry_group_id_workspace_id.py | 1 - .../migrations/0044_migrate_app_labels.py | 4 +- ...dicdatasyncinterval_deactivation_reason.py | 5 +- .../migrations/0057_role_hidden.py | 2 +- .../baserow_enterprise/role/default_roles.py | 15 +- .../src/baserow_enterprise/role/handler.py | 15 +- .../role/member_data_types.py | 2 +- .../role/permission_manager.py | 3 +- .../role/seat_usage_calculator.py | 3 +- .../sso/oauth2/auth_provider_types.py | 6 +- .../src/baserow_enterprise/sso/utils.py | 3 +- .../src/baserow_enterprise/structure_types.py | 3 +- .../src/baserow_enterprise/teams/handler.py | 3 +- .../src/baserow_enterprise/teams/models.py | 2 +- .../view_ownership_types.py | 3 +- .../baserow_enterprise/webhook_event_types.py | 3 +- .../audit_log/test_audit_log_admin_views.py | 10 +- .../test_audit_log_workspace_views.py | 5 +- .../api/role/test_other_views_with_rbac.py | 2 +- .../api/sso/test_oauth_views.py | 15 +- .../api/teams/test_teams_views.py | 2 +- .../assistant/test_assistant.py | 9 +- .../test_assistant_database_table_tools.py | 6 +- ...t_assistant_database_view_filters_tools.py | 6 +- .../assistant/test_telemetry.py | 7 +- .../audit_log/test_audit_log_export_job.py | 2 +- .../baserow_enterprise_tests/conftest.py | 6 +- .../test_enterprise_data_sync_handler.py | 2 +- .../data_sync/test_github_issues_data_sync.py | 4 +- .../data_sync/test_gitlab_issues_data_sync.py | 4 +- .../test_hubspot_contacts_data_sync.py | 4 +- .../data_sync/test_jira_issues_data_sync.py | 4 +- ...test_local_baserow_table_data_sync_type.py | 6 +- .../data_sync/test_postgresql_data_sync.py | 4 +- ...est_realtime_push_two_way_sync_strategy.py | 2 +- .../test_date_dependency_handler.py | 4 +- .../enterprise/test_enterprise_license.py | 12 +- .../enterprise_fixtures.py | 2 +- .../fields/test_link_row_field_rbac.py | 1 + .../local_baserow/test_user_source_types.py | 6 +- .../role/test_role_permission_manager.py | 4 +- .../sso/oauth2/test_auth_provider_types.py | 5 +- .../views/test_restricted_view.py | 10 +- .../api/builder/serializers.py | 2 +- .../api/dashboard/widgets/serializers.py | 3 +- .../baserow_premium/api/fields/exceptions.py | 2 +- .../src/baserow_premium/api/fields/views.py | 12 +- .../integrations/local_baserow/serializers.py | 3 +- .../api/license/serializers.py | 5 +- .../src/baserow_premium/api/license/views.py | 24 +- .../api/row_comments/serializers.py | 8 +- .../baserow_premium/api/row_comments/views.py | 32 +- .../api/user/user_data_types.py | 3 +- .../api/views/calendar/serializers.py | 2 +- .../api/views/calendar/views.py | 28 +- .../api/views/kanban/serializers.py | 2 +- .../baserow_premium/api/views/kanban/views.py | 14 +- .../api/views/timeline/serializers.py | 3 +- .../api/views/timeline/views.py | 22 +- .../src/baserow_premium/api/views/views.py | 16 +- premium/backend/src/baserow_premium/apps.py | 24 +- .../dashboard/widgets/models.py | 3 +- .../dashboard/widgets/widget_types.py | 20 +- .../baserow_premium/export/exporter_types.py | 4 +- .../src/baserow_premium/fields/actions.py | 3 +- .../src/baserow_premium/fields/field_types.py | 12 +- .../src/baserow_premium/fields/handler.py | 5 +- .../src/baserow_premium/fields/job_types.py | 2 +- .../src/baserow_premium/fields/visitors.py | 2 +- .../backend/src/baserow_premium/ical_utils.py | 6 +- .../local_baserow/service_types.py | 40 +-- .../src/baserow_premium/license/handler.py | 12 +- .../baserow_premium/license/license_types.py | 3 +- .../src/baserow_premium/license/models.py | 3 +- .../src/baserow_premium/license/plugin.py | 5 +- .../src/baserow_premium/license/registries.py | 3 +- .../0016_rowcommentsnotificationmode.py | 3 +- .../src/baserow_premium/permission_manager.py | 7 +- .../backend/src/baserow_premium/plugins.py | 3 +- .../backend/src/baserow_premium/populate.py | 3 +- .../baserow_premium/row_comments/handler.py | 19 +- .../baserow_premium/row_comments/models.py | 2 +- .../row_comments/notification_types.py | 11 +- .../baserow_premium/row_comments/receivers.py | 3 +- .../row_comments/row_metadata_types.py | 8 +- .../row_comments/trash_types.py | 7 +- .../src/baserow_premium/usage/handler.py | 5 +- .../src/baserow_premium/usage/tasks.py | 3 +- .../src/baserow_premium/views/actions.py | 3 +- .../baserow_premium/views/decorator_types.py | 3 +- .../views/decorator_value_provider_types.py | 2 +- .../views/form_view_mode_types.py | 3 +- .../src/baserow_premium/views/handler.py | 6 +- .../views/view_ownership_types.py | 5 +- .../src/baserow_premium/views/view_types.py | 60 ++-- .../ws/row_comments/signals.py | 5 +- .../dashboard/test_chart_widget_type_views.py | 8 +- ...grouped_aggregate_rows_data_source_type.py | 12 +- .../test_pie_chart_widget_type_views.py | 8 +- .../fields/test_generate_formula_prompt.py | 3 +- .../api/license/test_premium_license_views.py | 3 +- .../test_row_comment_trashable_type.py | 6 +- .../row_comments/test_row_comments_views.py | 4 +- .../api/views/views/test_calendar_views.py | 24 +- .../api/views/views/test_kanban_views.py | 68 ++--- .../api/views/views/test_premium_views.py | 6 +- .../views/test_preview_public_view_export.py | 16 +- .../api/views/views/test_timeline_views.py | 54 ++-- .../tests/baserow_premium_tests/conftest.py | 6 +- .../dashboard/test_chart_widget_type.py | 6 +- ...test_dashboard_application_types_charts.py | 14 +- .../dashboard/test_pie_chart_widget_type.py | 16 +- .../export/test_premium_export_types.py | 2 +- .../fields/test_ai_field_filters.py | 12 +- .../fields/test_ai_field_handler.py | 4 +- .../fields/test_ai_field_output_types.py | 2 +- .../fields/test_ai_field_type.py | 4 +- .../fields/test_ai_field_visitors.py | 2 +- .../test_generate_ai_values_job_execution.py | 9 +- .../test_generate_ai_values_job_type.py | 3 +- .../tests/baserow_premium_tests/fixtures.py | 13 +- .../test_ai_parallel_execution.py | 2 +- .../generative_ai/test_managers.py | 2 +- ...est_grouped_aggregate_rows_service_type.py | 18 +- .../license/test_license_handler.py | 16 +- .../license/test_license_models.py | 3 +- .../license/test_license_tasks.py | 1 + .../row_comments/test_row_comments_actions.py | 2 +- .../row_comments/test_row_comments_handler.py | 8 +- .../test_row_comments_notification_types.py | 16 +- .../test_row_comments_trash_types.py | 2 +- .../baserow_premium_tests/test_ical_utils.py | 4 +- .../views/test_calendar_view_type.py | 18 +- .../views/test_kanban_view_type.py | 4 +- .../test_premium_form_view_mode_types.py | 2 +- .../test_premium_view_decorator_types.py | 2 +- ...ium_view_decorator_value_provider_types.py | 8 +- .../views/test_premium_view_handler.py | 4 +- .../views/test_premium_view_models.py | 1 + .../test_premium_view_notification_types.py | 2 +- .../views/test_premium_ws_view_signals.py | 2 +- .../views/test_timeline_view_type.py | 6 +- .../views/test_view_change_ownership_type.py | 2 +- ..._view_ownership_type_permission_manager.py | 2 +- .../ws/test_ws_row_comments_signals.py | 8 +- 362 files changed, 1705 insertions(+), 2162 deletions(-) delete mode 100644 backend/.flake8 delete mode 100644 backend/.isort.cfg delete mode 100644 backend/flake8_plugins/__init__.py delete mode 100644 backend/flake8_plugins/flake8_baserow/__init__.py delete mode 100644 backend/flake8_plugins/flake8_baserow/docstring.py delete mode 100644 backend/flake8_plugins/flake8_baserow/psycopg.py delete mode 100644 backend/flake8_plugins/tests/test_flake8_baserow_docstring.py delete mode 100644 backend/flake8_plugins/tests/test_flake8_baserow_psycopg.py create mode 100644 changelog/entries/unreleased/refactor/replace_python_linting_and_formatting_toolchain_autopep8_bla.json diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0c41a05e02..6940125c3f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -200,22 +200,21 @@ jobs: name: Backend Lint runs-on: ubuntu-latest needs: - - build-backend - detect-changes if: needs.detect-changes.outputs.backend == 'true' || github.ref_name == 'develop' || github.ref_name == 'master' - permissions: - contents: read - packages: read steps: - - name: Log in to GitHub Container Registry - uses: docker/login-action@v3 + - uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v4 with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} + version: "0.5.6" - - name: Run backend lint - run: docker run --rm ${{ needs.build-backend.outputs.image }} lint + - name: Run ruff + working-directory: backend + run: | + uvx ruff check src/ ../premium/backend/src/ ../enterprise/backend/src/ tests/ ../premium/backend/tests/ ../enterprise/backend/tests/ + uvx ruff format --check src/ ../premium/backend/src/ ../enterprise/backend/src/ tests/ ../premium/backend/tests/ ../enterprise/backend/tests/ frontend-lint: name: Web-Frontend Lint @@ -361,12 +360,22 @@ jobs: -e DATABASE_USER=baserow \ -e DATABASE_PASSWORD=baserow \ ${{ needs.build-backend.outputs.image }} ci-check-startup-oss-only + - name: Check backend migrations + run: | + docker run --rm --network="${{ job.services.db.network }}" \ + -e DATABASE_HOST=db \ + -e DATABASE_PORT=5432 \ + -e DATABASE_NAME=baserow \ + -e DATABASE_USER=baserow \ + -e DATABASE_PASSWORD=baserow \ + ${{ needs.build-backend.outputs.image }} ci-check-migrations test-backend: name: Backend Tests (Group ${{ matrix.group }}) runs-on: ubuntu-latest needs: - build-backend + - backend-lint - detect-changes if: needs.detect-changes.outputs.backend == 'true' || github.ref_name == 'develop' || github.ref_name == 'master' permissions: @@ -975,8 +984,8 @@ jobs: file: deploy/all-in-one/Dockerfile push: true build-args: | - FROM_BACKEND_IMAGE=${{ env.REGISTRY }}/${{ env.IMAGE_REPO }}/backend:ci-tested-${{ env.REAL_GITHUB_SHA }} - FROM_WEBFRONTEND_IMAGE=${{ env.REGISTRY }}/${{ env.IMAGE_REPO }}/web-frontend:ci-tested-${{ env.REAL_GITHUB_SHA }} + BACKEND_IMAGE=${{ env.REGISTRY }}/${{ env.IMAGE_REPO }}/backend:ci-tested-${{ env.REAL_GITHUB_SHA }} + WEBFRONTEND_IMAGE=${{ env.REGISTRY }}/${{ env.IMAGE_REPO }}/web-frontend:ci-tested-${{ env.REAL_GITHUB_SHA }} tags: | ${{ env.REGISTRY }}/${{ env.IMAGE_REPO }}/baserow:ci-tested-${{ env.REAL_GITHUB_SHA }} cache-from: type=gha,scope=all-in-one diff --git a/.gitlab/ci_includes/jobs.yml b/.gitlab/ci_includes/jobs.yml index 14686b0303..49a3518963 100644 --- a/.gitlab/ci_includes/jobs.yml +++ b/.gitlab/ci_includes/jobs.yml @@ -1,6 +1,6 @@ # ============== "Abstract" ci stages used by real stages ======================= -# Builds a dev version of a specific Dockerfile (--target dev) using a previous CI +# Builds a dev version of a specific Dockerfile (--target ci) using a previous CI # image or the latest develop image as a cache to speed up the build. Tags and pushes # the resulting dev image for later stages in the pipeline to use. # @@ -149,7 +149,7 @@ $EXTRA_BUILD_ARGS \ $IMAGE_LABELS \ --push \ - --target dev \ + --target ci \ --tag $CI_IMAGE_PATH \ -f $DOCKERFILE_PATH .; diff --git a/backend/.flake8 b/backend/.flake8 deleted file mode 100644 index eb50f5ee63..0000000000 --- a/backend/.flake8 +++ /dev/null @@ -1,22 +0,0 @@ -[flake8] -extend-ignore = E203, W503, F541, E501 -max-doc-length = 88 -per-file-ignores = - tests/*: F841 - ../premium/backend/tests/*: F841 - ../enterprise/backend/tests/*: F841 - src/baserow/contrib/database/migrations/*: BDC001 - src/baserow/core/migrations/*: BDC001 - src/baserow/core/psycopg.py: BPG001 -exclude = - .git, - __pycache__, - src/baserow/config/settings/local.py, - src/baserow/core/formula/parser/generated - - -[flake8:local-plugins] -extension = - BDC001 = flake8_baserow:DocstringPlugin - BPG001 = flake8_baserow:BaserowPsycopgChecker -paths = ./flake8_plugins diff --git a/backend/.isort.cfg b/backend/.isort.cfg deleted file mode 100644 index 2a2ecd8c67..0000000000 --- a/backend/.isort.cfg +++ /dev/null @@ -1,6 +0,0 @@ -[settings] -profile = black -default_section = THIRDPARTY -known_first_party = baserow,baserow_enterprise -known_django = django -sections = FUTURE,STDLIB,DJANGO,THIRDPARTY,FIRSTPARTY,LOCALFOLDER diff --git a/backend/docker/docker-entrypoint.sh b/backend/docker/docker-entrypoint.sh index f2cebd3849..1760e2f84c 100755 --- a/backend/docker/docker-entrypoint.sh +++ b/backend/docker/docker-entrypoint.sh @@ -363,6 +363,9 @@ case "$1" in ci-check-startup-oss-only) exec just ci-check-startup-oss-only ;; + ci-check-migrations) + exec just check-migrations + ;; celery-worker) if [[ -n "${BASEROW_RUN_MINIMAL}" && $BASEROW_AMOUNT_OF_WORKERS == "1" ]]; then export OTEL_SERVICE_NAME="celery-worker-combined" diff --git a/backend/flake8_plugins/__init__.py b/backend/flake8_plugins/__init__.py deleted file mode 100644 index 119c6443b7..0000000000 --- a/backend/flake8_plugins/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .flake8_baserow import DocstringPlugin, BaserowPsycopgChecker, BaserowAIImportsChecker diff --git a/backend/flake8_plugins/flake8_baserow/__init__.py b/backend/flake8_plugins/flake8_baserow/__init__.py deleted file mode 100644 index 1caa7e9514..0000000000 --- a/backend/flake8_plugins/flake8_baserow/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .docstring import Plugin as DocstringPlugin -from .psycopg import BaserowPsycopgChecker - -__all__ = ["DocstringPlugin", "BaserowPsycopgChecker"] diff --git a/backend/flake8_plugins/flake8_baserow/docstring.py b/backend/flake8_plugins/flake8_baserow/docstring.py deleted file mode 100644 index 342468b0b2..0000000000 --- a/backend/flake8_plugins/flake8_baserow/docstring.py +++ /dev/null @@ -1,173 +0,0 @@ -from functools import partial - -try: - from functools import cached_property # only present in python >= 3.8 -except ImportError: - from backports.cached_property import cached_property - -import ast -from tokenize import COMMENT, TokenInfo, generate_tokens -from typing import Any, Dict, Generator, Iterable, List, Optional, Tuple, Type, Union - -import pycodestyle - -try: - from flake8.engine import pep8 as stdin_utils -except ImportError: - from flake8 import utils as stdin_utils - - -DocstringType = Union[ast.Constant, ast.Str] -ERR_MSG = "BDC001 - Baserow plugin: missing empty line after docstring" - - -class Token: - def __init__(self, token: TokenInfo): - self.token = token - - @property # noqa: A003 - def type(self) -> int: - return self.token[0] - - @property - def start(self) -> Tuple[int, int]: - return self.token[2] - - @property - def start_row(self) -> int: - return self.start[0] - - @property - def col_offset(self) -> int: - return self.start[1] - - -class FunctionNodeHelper: - def __init__(self, node: ast.FunctionDef, comments: Dict[int, Token]): - self.function_node = node - self.comments = comments - - @cached_property - def docstring(self) -> Optional[DocstringType]: - if not self.function_node.body: - return None - - first_node = self.function_node.body[0] - if isinstance(first_node, ast.Expr) and isinstance( - first_node.value, (ast.Constant, ast.Str) - ): - return first_node.value - - return None - - @cached_property - def docstring_end_lineno(self) -> int: - docstring = self.docstring - return ( - docstring.end_lineno - if hasattr(docstring, "end_lineno") - else docstring.lineno - ) - - @cached_property - def element_after_docstring(self) -> Optional[Union[Token, ast.AST]]: - """ - Returns a node (comment or AST node) if it is in the line immediately after - the docstring, otherwise returns None. - """ - - dostring_end_lineno = self.docstring_end_lineno - comment = self.comments.get(dostring_end_lineno + 1, None) - if comment is not None: - return comment - - function_node = self.function_node - second_node = function_node.body[1] if len(function_node.body) > 1 else None - if second_node and second_node.lineno == dostring_end_lineno + 1: - return second_node - - return None - - -def missing_empty_line_after_docstring( - node: ast.FunctionDef, - comments: Dict[int, Token], -) -> List[Tuple[int, int, str]]: - """ - Check if there is at least one empty line after the docstring. - - NOTE: ast in python3.7 see docstrings as ast.Str and has no end_lineno attr, - while in python3.10 it has end_lineno attr and is a ast.Constant. - - :param node: The function node to check. - :return: A list of errors (if any) in the form [(line_no, column_no, error_msg)]. - """ - - function_helper = FunctionNodeHelper(node, comments) - if function_helper.docstring is None: - return [] - - elem = function_helper.element_after_docstring - if elem is None: - return [] - - return [(function_helper.docstring_end_lineno, elem.col_offset, ERR_MSG)] - - -class Visitor(ast.NodeVisitor): - def __init__(self, tokens) -> None: - self.tokens = tokens - self.errors: List[Tuple[int, int, str]] = [] - - def visit_FunctionDef(self, node: ast.FunctionDef) -> None: - self.errors += missing_empty_line_after_docstring(node, self.tokens) - self.generic_visit(node) - - -class Plugin: - name = "flake8_baserow_docstring" - version = "0.1.0" - - _tokens = None - - def __init__( - self, - tree: ast.AST, - filename: str = None, - lines: Iterable[str] = None, - file_tokens: Iterable[TokenInfo] = None, - ): - self._tree = tree - self.filename = "stdin" if filename in ("stdin", "-", None) else filename - if lines: - if isinstance(lines, str): - lines = lines.splitlines(True) - self.lines = tuple(lines) - self._tokens = file_tokens - - @cached_property - def lines(self) -> Tuple[str, ...]: - if self.filename == "stdin": - return stdin_utils.stdin_get_value().splitlines(True) - return pycodestyle.readlines(self.filename) - - @cached_property - def tokens(self) -> Dict[int, Token]: - if self._tokens is not None: - tokens = self._tokens - else: - getter = partial(next, iter(self.lines)) - tokens = generate_tokens(getter) # type: ignore - comments = [] - for tkn in tokens: - token = Token(tkn) - if token.type == COMMENT: - comments.append(token) - return {comment.start_row: comment for comment in comments} - - def run(self) -> Generator[Tuple[int, int, str, Type[Any]], None, None]: - visitor = Visitor(self.tokens) - visitor.visit(self._tree) - - for line, col, msg in visitor.errors: - yield line, col, msg, type(self) diff --git a/backend/flake8_plugins/flake8_baserow/psycopg.py b/backend/flake8_plugins/flake8_baserow/psycopg.py deleted file mode 100644 index 08cadbdceb..0000000000 --- a/backend/flake8_plugins/flake8_baserow/psycopg.py +++ /dev/null @@ -1,31 +0,0 @@ -import ast -from typing import Iterator, Tuple, Any - - -class BaserowPsycopgChecker: - name = "flake8-baserow-psycopg" - version = "0.1.0" - - def __init__(self, tree: ast.AST, filename: str): - self.tree = tree - self.filename = filename - - def run(self) -> Iterator[Tuple[int, int, str, Any]]: - for node in ast.walk(self.tree): - if isinstance(node, ast.Import): - for alias in node.names: - if alias.name in ("psycopg", "psycopg2"): - yield ( - node.lineno, - node.col_offset, - "BPG001 Import psycopg/psycopg2 from baserow.core.psycopg instead", - type(self), - ) - elif isinstance(node, ast.ImportFrom): - if node.module in ("psycopg", "psycopg2"): - yield ( - node.lineno, - node.col_offset, - "BPG001 Import psycopg/psycopg2 from baserow.core.psycopg instead", - type(self), - ) diff --git a/backend/flake8_plugins/tests/test_flake8_baserow_docstring.py b/backend/flake8_plugins/tests/test_flake8_baserow_docstring.py deleted file mode 100644 index 6f9549e34c..0000000000 --- a/backend/flake8_plugins/tests/test_flake8_baserow_docstring.py +++ /dev/null @@ -1,85 +0,0 @@ -# Core Library -import ast -from typing import Set - -from flake8_baserow.docstring import ERR_MSG -from flake8_baserow.docstring import Plugin as DocstringPlugin - - -def _results(s: str) -> Set[str]: - tree = ast.parse(s) - plugin = DocstringPlugin(tree, lines=s) - return {f"{line}:{col} {msg}" for line, col, msg, _ in plugin.run()} - - -def test_trivial_case(): - assert _results("\n") == set() - - -def test_plugin_version(): - assert isinstance(DocstringPlugin.version, str) - assert "." in DocstringPlugin.version - - -def test_plugin_name(): - assert isinstance(DocstringPlugin.name, str) - - -ERR_FUNC = """ -def foo(): - ''' - foo - ''' - print("hello") -""" - -ERR_FUNC_2 = """ -def foo(): - '''foo''' - print("hello") -""" - -ERR_FUNC_3 = """ -def foo(): - '''foo''' - # print hello - print("hello") -""" - - -def test_missing_empty_line_after_docstring(): - for func, lineno in [(ERR_FUNC, 5), (ERR_FUNC_2, 3), (ERR_FUNC_3, 3)]: - assert _results(func) == {f"{lineno}:4 {ERR_MSG}"} - - -OK_FUNC = """ -def foo(): - ''' - foo - - ''' - - print("hello") -""" - -OK_FUNC_2 = """ -def foo(): - '''foo''' - - print("hello") -""" - -OK_FUNC_3 = """ -def foo(): - ''' - foo - ''' - - # print hello - print("hello") -""" - - -def test_noerrors_if_empty_line_after_docstring_is_present(): - for func in [OK_FUNC, OK_FUNC_2, OK_FUNC_3]: - assert _results(func) == set() diff --git a/backend/flake8_plugins/tests/test_flake8_baserow_psycopg.py b/backend/flake8_plugins/tests/test_flake8_baserow_psycopg.py deleted file mode 100644 index 725224739b..0000000000 --- a/backend/flake8_plugins/tests/test_flake8_baserow_psycopg.py +++ /dev/null @@ -1,41 +0,0 @@ -import ast -from flake8_baserow.psycopg import BaserowPsycopgChecker - - -def run_checker(code: str): - tree = ast.parse(code) - checker = BaserowPsycopgChecker(tree, "test.py") - return list(checker.run()) - - -def test_direct_import(): - code = """ -import psycopg -import psycopg2 -from psycopg import connect -from psycopg2 import connect as pg_connect - """ - errors = run_checker(code) - assert len(errors) == 4 - assert all(error[2].startswith("BPG001") for error in errors) - - -def test_allowed_import(): - code = """ -from baserow.core.psycopg import connect -from baserow.core.psycopg import psycopg2 - """ - errors = run_checker(code) - assert len(errors) == 0 - - -def test_mixed_imports(): - code = """ -import psycopg -from baserow.core.psycopg import connect -from psycopg2 import connect as pg_connect - """ - errors = run_checker(code) - assert len(errors) == 2 - assert errors[0][2].startswith("BPG001") - assert errors[1][2].startswith("BPG001") diff --git a/backend/justfile b/backend/justfile index 4b96dfbba7..e367a73cd6 100644 --- a/backend/justfile +++ b/backend/justfile @@ -59,8 +59,6 @@ backend *ARGS: alias b := backend # Common shortcuts -alias l := lint -alias f := fix alias t := test alias dev := run-dev-server alias serve := run-dev-server @@ -82,7 +80,7 @@ repo_root := clean(justfile_directory() / "..") # Helper to load .env.local if present and set PYTHONPATH with absolute paths # Include this at the start of bash recipes that need env vars -_load_env := 'if [ -f "../.env.local" ]; then set -a; source "../.env.local"; set +a; fi; export PYTHONPATH="' + repo_root / 'backend/src:' + repo_root / 'premium/backend/src:' + repo_root / 'enterprise/backend/src:' + repo_root / 'backend/flake8_plugins:' + repo_root / 'backend/tests:' + repo_root / 'premium/backend/tests:' + repo_root / 'enterprise/backend/tests${PYTHONPATH:+:$PYTHONPATH}"' +_load_env := 'if [ -f "../.env.local" ]; then set -a; source "../.env.local"; set +a; fi; export PYTHONPATH="' + repo_root / 'backend/src:' + repo_root / 'premium/backend/src:' + repo_root / 'enterprise/backend/src:' + repo_root / 'backend/tests:' + repo_root / 'premium/backend/tests:' + repo_root / 'enterprise/backend/tests${PYTHONPATH:+:$PYTHONPATH}"' # Source directories backend_source_dirs := "src/ ../premium/backend/src/ ../enterprise/backend/src/" @@ -201,36 +199,25 @@ uv *ARGS: # Run all lint checks [group('4 - code-quality')] -lint: _check-dev - #!/usr/bin/env bash - set -euo pipefail - {{ _load_env }} - {{ uv_run }} flake8 {{ backend_source_dirs }} {{ backend_tests_dirs }} - {{ uv_run }} black {{ backend_source_dirs }} {{ backend_tests_dirs }} --check --config=pyproject.toml - {{ uv_run }} isort --check --skip generated {{ backend_source_dirs }} {{ backend_tests_dirs }} - DJANGO_SETTINGS_MODULE={{ django_settings }} {{ uv_run }} baserow makemigrations --dry-run --check - {{ uv_run }} bandit -r --exclude src/baserow/test_utils,src/baserow/config/settings/local.py {{ backend_source_dirs }} - - -# Format code with black -[group('4 - code-quality')] -format: _check-dev +check: _check-dev #!/usr/bin/env bash set -euo pipefail {{ _load_env }} - {{ uv_run }} black --config=pyproject.toml {{ backend_source_dirs }} {{ backend_tests_dirs }} + + {{ uv_run }} ruff check {{ backend_source_dirs }} {{ backend_tests_dirs }} + {{ uv_run }} ruff format --check {{ backend_source_dirs }} {{ backend_tests_dirs }} -# Sort imports with isort -[group('4 - code-quality')] -sort: _check-dev - #!/usr/bin/env bash - set -euo pipefail - {{ _load_env }} - {{ uv_run }} isort --skip generated --overwrite-in-place {{ backend_source_dirs }} {{ backend_tests_dirs }} +alias lint := check +alias l := check # Fix code style (sort imports + format) [group('4 - code-quality')] -fix: sort format +fix: _check-dev + {{ uv_run }} ruff check --fix {{ backend_source_dirs }} {{ backend_tests_dirs }} + {{ uv_run }} ruff format {{ backend_source_dirs }} {{ backend_tests_dirs }} + +alias format := fix +alias f := fix # ============================================================================= # Testing @@ -344,6 +331,14 @@ ci-check-startup-oss-only: _check-dev {{ uv_run }} gunicorn --workers=1 -b 0.0.0.0:8002 \ -k uvicorn.workers.UvicornWorker baserow.config.asgi:application +[group('8 - ci')] +check-migrations: _check-dev + #!/usr/bin/env bash + set -euo pipefail + {{ _load_env }} + + DJANGO_SETTINGS_MODULE={{ django_settings }} {{ uv_run }} baserow makemigrations --dry-run --check + # ============================================================================= # Translations # ============================================================================= diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 91baef0fc7..f30d172570 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -117,7 +117,7 @@ baserow = "baserow.manage:main" [dependency-groups] dev = [ - "flake8==7.0.0", + "ruff>=0.8.0", "pytest==8.2.0", "pytest-django==4.8.0", "pytest-env==1.1.3", @@ -129,7 +129,6 @@ dev = [ "responses==0.25.0", "watchdog==4.0.0", "argh==0.31.2", - "black==23.3.0", "pyinstrument==4.6.2", "pyfakefs==5.4.1", "pytest-xdist==3.6.1", @@ -140,8 +139,6 @@ dev = [ "pytest-html==4.1.1", "coverage==7.5.1", "pytest-split==0.8.2", - "bandit==1.7.8", - "autopep8==2.1.0", "pytest-unordered==0.6.0", "debugpy==1.8.1", "backports.cached-property==1.0.2", @@ -152,7 +149,6 @@ dev = [ "django-stubs-ext==0.8.0", "mypy==1.10.0", "mypy-extensions==1.0.0", - "isort==5.13.2", "ipython", "fakeredis[lua]==2.23.2", "pytest-retry==1.7.0", @@ -172,19 +168,43 @@ pattern = "VERSION = \"(?P[^\"]+)\"" [tool.hatch.build.targets.wheel] packages = ["src/baserow"] -[tool.black] -exclude = """ -/( - | migrations - | generated -)/ -""" -target-version = ["py311"] -required-version = "23" - -[tool.isort] -profile = "black" -skip = ["migrations", "generated"] +[tool.ruff] +target-version = "py311" +line-length = 88 +exclude = ["**/migrations/**", "**/generated/**"] + +[tool.ruff.lint] +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "S", # flake8-bandit (security) +] +ignore = [ + "E501", # line too long (handled by formatter) + "E731", # lambda assignment (common pattern) + "S101", # use of assert (fine in tests) + "S105", # possible hardcoded password (false positives on variable names) + "S110", # try-except-pass (intentional in some cases) + "S112", # try-except-continue (intentional in some cases) + "S311", # pseudo-random generators (not for crypto) + "S611", # use of exec (intentional in some places) + "F541", # f-string without placeholders (common pattern) +] + +[tool.ruff.lint.per-file-ignores] +"tests/**/*" = ["S101", "S105", "S106", "S608", "F841"] +"../**/tests/**/*" = ["S101", "S105", "S106", "S608", "F841"] +"src/baserow/test_utils/**/*" = ["S101", "S105", "S106", "S608", "F841"] +"src/baserow/config/settings/local.py" = ["S105"] + +[tool.ruff.lint.isort] +known-first-party = ["baserow", "baserow_premium", "baserow_enterprise"] +section-order = ["future", "standard-library", "django", "third-party", "first-party", "local-folder"] + +[tool.ruff.lint.isort.sections] +django = ["django"] [tool.uv] package = true diff --git a/backend/src/baserow/api/admin/views.py b/backend/src/baserow/api/admin/views.py index be9905c334..4ee8097977 100755 --- a/backend/src/baserow/api/admin/views.py +++ b/backend/src/baserow/api/admin/views.py @@ -132,8 +132,7 @@ def get_extend_schema_parameters( name="size", location=OpenApiParameter.QUERY, type=OpenApiTypes.INT, - description=f"Defines how many {name} should be returned per " - f"page.", + description=f"Defines how many {name} should be returned per page.", ), *(extra_parameters or []), ], diff --git a/backend/src/baserow/api/decorators.py b/backend/src/baserow/api/decorators.py index f7047a7d2f..3438895d8f 100755 --- a/backend/src/baserow/api/decorators.py +++ b/backend/src/baserow/api/decorators.py @@ -21,9 +21,13 @@ ) from .exceptions import QueryParameterValidationException -from .utils import ExceptionMappingType, get_request +from .utils import ( + ExceptionMappingType, + get_request, + validate_data, + validate_data_custom_fields, +) from .utils import map_exceptions as map_exceptions_utility -from .utils import validate_data, validate_data_custom_fields def map_exceptions(exceptions: ExceptionMappingType = None): diff --git a/backend/src/baserow/api/extensions.py b/backend/src/baserow/api/extensions.py index 1aa23493ef..ffa5299e98 100644 --- a/backend/src/baserow/api/extensions.py +++ b/backend/src/baserow/api/extensions.py @@ -72,7 +72,7 @@ def get_name(self): def map_serializer(self, auto_schema, direction): try: - base_ref_name = getattr(getattr(self.target.base_class, "Meta"), "ref_name") + base_ref_name = self.target.base_class.Meta.ref_name except AttributeError: base_ref_name = None diff --git a/backend/src/baserow/api/jobs/serializers.py b/backend/src/baserow/api/jobs/serializers.py index 2e0159670f..76d767c125 100644 --- a/backend/src/baserow/api/jobs/serializers.py +++ b/backend/src/baserow/api/jobs/serializers.py @@ -204,9 +204,9 @@ def map_serializer(self, auto_schema, direction): field_schema = auto_schema._map_serializer_field(field, direction) help_text = field_schema.get("description", "") - field_schema[ - "description" - ] = f"**[Only for type='{job_type.type}']** {help_text}" + field_schema["description"] = ( + f"**[Only for type='{job_type.type}']** {help_text}" + ) if field_name not in properties: properties[field_name] = field_schema diff --git a/backend/src/baserow/api/settings/serializers.py b/backend/src/baserow/api/settings/serializers.py index 902ce64c58..fce2ec6835 100644 --- a/backend/src/baserow/api/settings/serializers.py +++ b/backend/src/baserow/api/settings/serializers.py @@ -41,9 +41,9 @@ def to_representation(self, instance): representation = super().to_representation(instance) # TODO Remove in a future release once email_verification is null=False if representation["email_verification"] is None: - representation[ - "email_verification" - ] = Settings.EmailVerificationOptions.NO_VERIFICATION + representation["email_verification"] = ( + Settings.EmailVerificationOptions.NO_VERIFICATION + ) return representation diff --git a/backend/src/baserow/api/user_sources/views.py b/backend/src/baserow/api/user_sources/views.py index 2139c4d1a7..d9e16dffc9 100644 --- a/backend/src/baserow/api/user_sources/views.py +++ b/backend/src/baserow/api/user_sources/views.py @@ -576,8 +576,7 @@ class UserSourceForceObtainJSONWebToken(APIView): responses={ 200: authenticate_schema, 401: { - "description": "An active user with the provided ID " - "could not be found." + "description": "An active user with the provided ID could not be found." }, }, ) diff --git a/backend/src/baserow/api/utils.py b/backend/src/baserow/api/utils.py index d0a572379d..323945e93a 100644 --- a/backend/src/baserow/api/utils.py +++ b/backend/src/baserow/api/utils.py @@ -23,7 +23,7 @@ from baserow.core.exceptions import InstanceTypeDoesNotExist if TYPE_CHECKING: - from baserow.core.registry import Registry, Instance + from baserow.core.registry import Instance, Registry from .exceptions import RequestBodyValidationException @@ -366,7 +366,7 @@ def get_serializer_class( meta_extra_kwargs = meta_extra_kwargs or {} if hasattr(base_class, "Meta"): - extends_meta = getattr(base_class, "Meta") + extends_meta = base_class.Meta field_names = list(extends_meta.fields) + list(field_names) meta_extra_kwargs.update(getattr(extends_meta, "extra_kwargs", {})) diff --git a/backend/src/baserow/api/workspaces/users/errors.py b/backend/src/baserow/api/workspaces/users/errors.py index 30f757da66..4cb7e75467 100644 --- a/backend/src/baserow/api/workspaces/users/errors.py +++ b/backend/src/baserow/api/workspaces/users/errors.py @@ -13,6 +13,5 @@ ERROR_CANNOT_DELETE_YOURSELF_FROM_GROUP = ( "ERROR_CANNOT_DELETE_YOURSELF_FROM_GROUP", HTTP_400_BAD_REQUEST, - "You cannot delete yourself from the group. Use the leave group endpoint for " - "that.", + "You cannot delete yourself from the group. Use the leave group endpoint for that.", ) diff --git a/backend/src/baserow/api/workspaces/users/views.py b/backend/src/baserow/api/workspaces/users/views.py index 8bbf68578f..c38807308f 100755 --- a/backend/src/baserow/api/workspaces/users/views.py +++ b/backend/src/baserow/api/workspaces/users/views.py @@ -214,8 +214,7 @@ def patch(self, request, data, workspace_user_id): name="workspace_user_id", location=OpenApiParameter.PATH, type=OpenApiTypes.INT, - description="Deletes the workspace user related to the provided " - "value.", + description="Deletes the workspace user related to the provided value.", ) ], tags=["Workspaces"], diff --git a/backend/src/baserow/config/settings/base.py b/backend/src/baserow/config/settings/base.py index 8ccddc0b2d..c64116d69d 100644 --- a/backend/src/baserow/config/settings/base.py +++ b/backend/src/baserow/config/settings/base.py @@ -296,8 +296,7 @@ ) BUILDER_DISPATCH_ACTION_CACHE_TTL_SECONDS = int( # Default TTL is 5 minutes - os.getenv("BASEROW_BUILDER_DISPATCH_ACTION_CACHE_TTL_SECONDS") - or 300 + os.getenv("BASEROW_BUILDER_DISPATCH_ACTION_CACHE_TTL_SECONDS") or 300 ) diff --git a/backend/src/baserow/config/settings/heroku.py b/backend/src/baserow/config/settings/heroku.py index 76d5046dac..adbfdd39b5 100644 --- a/backend/src/baserow/config/settings/heroku.py +++ b/backend/src/baserow/config/settings/heroku.py @@ -1,3 +1,4 @@ +import os import ssl from typing import Optional from urllib.parse import urlparse @@ -63,10 +64,12 @@ def get(self): # started with a limit of 10, which is the default value. This is needed because the # `heroku-redis:mini` doesn't accept more than 20 connections. CELERY_BROKER_POOL_LIMIT = min( - 4 * int(os.getenv("BASEROW_AMOUNT_OF_WORKERS", "1")), 10 # noqa: F405 + 4 * int(os.getenv("BASEROW_AMOUNT_OF_WORKERS", "1")), # noqa: F405 + 10, ) CELERY_REDIS_MAX_CONNECTIONS = min( - 4 * int(os.getenv("BASEROW_AMOUNT_OF_WORKERS", "1")), 10 # noqa: F405 + 4 * int(os.getenv("BASEROW_AMOUNT_OF_WORKERS", "1")), # noqa: F405 + 10, ) HEROKU_ENABLED = True diff --git a/backend/src/baserow/config/settings/test.py b/backend/src/baserow/config/settings/test.py index 1c81786942..8245a298ce 100644 --- a/backend/src/baserow/config/settings/test.py +++ b/backend/src/baserow/config/settings/test.py @@ -79,8 +79,8 @@ def getenv_for_tests(key: str, default: str = "") -> str: # Make sure that we are not using the `MEDIA_URL` environment variable because that # could break the tests. They are expecting it to be 'http://localhost:8000/media/' # because that is default value in `base.py`. -MEDIA_ROOT = "/tmp/media-test/" # nosec MEDIA_URL = "http://localhost:8000/media/" +MEDIA_ROOT = "/tmp/media-test/" # noqa: S108 CACHES = { diff --git a/backend/src/baserow/contrib/automation/automation_dispatch_context.py b/backend/src/baserow/contrib/automation/automation_dispatch_context.py index a93237060b..a6e32ef28b 100644 --- a/backend/src/baserow/contrib/automation/automation_dispatch_context.py +++ b/backend/src/baserow/contrib/automation/automation_dispatch_context.py @@ -123,5 +123,4 @@ def searchable_fields(self): def validate_filter_search_sort_fields( self, fields: List[str], refinement: ServiceAdhocRefinements - ): - ... + ): ... diff --git a/backend/src/baserow/contrib/automation/data_providers/data_provider_types.py b/backend/src/baserow/contrib/automation/data_providers/data_provider_types.py index 448646ec11..49283c1e53 100644 --- a/backend/src/baserow/contrib/automation/data_providers/data_provider_types.py +++ b/backend/src/baserow/contrib/automation/data_providers/data_provider_types.py @@ -13,8 +13,7 @@ SENTINEL = "__no_results__" -class AutomationDataProviderType(DataProviderType, ABC): - ... +class AutomationDataProviderType(DataProviderType, ABC): ... class PreviousNodeProviderType(AutomationDataProviderType): diff --git a/backend/src/baserow/contrib/automation/nodes/models.py b/backend/src/baserow/contrib/automation/nodes/models.py index 1e80bc5d10..84bd88b39e 100644 --- a/backend/src/baserow/contrib/automation/nodes/models.py +++ b/backend/src/baserow/contrib/automation/nodes/models.py @@ -166,69 +166,52 @@ class Meta: abstract = True -class LocalBaserowRowsCreatedTriggerNode(AutomationTriggerNode): - ... +class LocalBaserowRowsCreatedTriggerNode(AutomationTriggerNode): ... -class LocalBaserowRowsUpdatedTriggerNode(AutomationTriggerNode): - ... +class LocalBaserowRowsUpdatedTriggerNode(AutomationTriggerNode): ... -class LocalBaserowRowsDeletedTriggerNode(AutomationTriggerNode): - ... +class LocalBaserowRowsDeletedTriggerNode(AutomationTriggerNode): ... -class CorePeriodicTriggerNode(AutomationTriggerNode): - ... +class CorePeriodicTriggerNode(AutomationTriggerNode): ... -class CoreHTTPTriggerNode(AutomationTriggerNode): - ... +class CoreHTTPTriggerNode(AutomationTriggerNode): ... -class LocalBaserowCreateRowActionNode(AutomationActionNode): - ... +class LocalBaserowCreateRowActionNode(AutomationActionNode): ... -class LocalBaserowUpdateRowActionNode(AutomationActionNode): - ... +class LocalBaserowUpdateRowActionNode(AutomationActionNode): ... -class LocalBaserowDeleteRowActionNode(AutomationActionNode): - ... +class LocalBaserowDeleteRowActionNode(AutomationActionNode): ... -class LocalBaserowGetRowActionNode(AutomationActionNode): - ... +class LocalBaserowGetRowActionNode(AutomationActionNode): ... -class LocalBaserowListRowsActionNode(AutomationActionNode): - ... +class LocalBaserowListRowsActionNode(AutomationActionNode): ... -class LocalBaserowAggregateRowsActionNode(AutomationActionNode): - ... +class LocalBaserowAggregateRowsActionNode(AutomationActionNode): ... -class CoreHTTPRequestActionNode(AutomationActionNode): - ... +class CoreHTTPRequestActionNode(AutomationActionNode): ... -class CoreSMTPEmailActionNode(AutomationActionNode): - ... +class CoreSMTPEmailActionNode(AutomationActionNode): ... -class CoreRouterActionNode(AutomationActionNode): - ... +class CoreRouterActionNode(AutomationActionNode): ... -class CoreIteratorActionNode(AutomationActionNode): - ... +class CoreIteratorActionNode(AutomationActionNode): ... -class AIAgentActionNode(AutomationActionNode): - ... +class AIAgentActionNode(AutomationActionNode): ... -class SlackWriteMessageActionNode(AutomationActionNode): - ... +class SlackWriteMessageActionNode(AutomationActionNode): ... diff --git a/backend/src/baserow/contrib/automation/nodes/registries.py b/backend/src/baserow/contrib/automation/nodes/registries.py index 0c741a3950..0800daa126 100644 --- a/backend/src/baserow/contrib/automation/nodes/registries.py +++ b/backend/src/baserow/contrib/automation/nodes/registries.py @@ -46,8 +46,7 @@ class AutomationNodeType( is_container = False - class SerializedDict(AutomationNodeDict): - ... + class SerializedDict(AutomationNodeDict): ... @property def allowed_fields(self): @@ -282,8 +281,7 @@ def prepare_values( return values - def get_pytest_params(self, pytest_data_fixture) -> Dict[str, Any]: - ... + def get_pytest_params(self, pytest_data_fixture) -> Dict[str, Any]: ... def dispatch( self, diff --git a/backend/src/baserow/contrib/automation/workflows/handler.py b/backend/src/baserow/contrib/automation/workflows/handler.py index 96d3650e91..222c88ee5c 100644 --- a/backend/src/baserow/contrib/automation/workflows/handler.py +++ b/backend/src/baserow/contrib/automation/workflows/handler.py @@ -580,9 +580,9 @@ def import_workflow_only( graph=serialized_workflow.get("graph", {}), ) - id_mapping["automation_workflows"][ - serialized_workflow["id"] - ] = workflow_instance.id + id_mapping["automation_workflows"][serialized_workflow["id"]] = ( + workflow_instance.id + ) if progress is not None: progress.increment(state=IMPORT_SERIALIZED_IMPORTING) @@ -740,9 +740,8 @@ def _check_too_many_errors(self, workflow: AutomationWorkflow) -> None: max_errors = settings.AUTOMATION_WORKFLOW_MAX_CONSECUTIVE_ERRORS statuses = ( - AutomationWorkflowHistory.objects.filter(workflow=workflow).order_by( - "-started_on" - ) + AutomationWorkflowHistory.objects.filter(workflow=workflow) + .order_by("-started_on") # +1 because we will ignore the latest entry, since the workflow may # have just started. .values_list("status", flat=True)[: max_errors + 1] diff --git a/backend/src/baserow/contrib/builder/api/data_sources/serializers.py b/backend/src/baserow/contrib/builder/api/data_sources/serializers.py index 013a5fe813..e349c82e1a 100644 --- a/backend/src/baserow/contrib/builder/api/data_sources/serializers.py +++ b/backend/src/baserow/contrib/builder/api/data_sources/serializers.py @@ -175,9 +175,9 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) for data_provider_type in builder_data_provider_type_registry.get_all(): - self.fields[ - data_provider_type.type - ] = data_provider_type.get_request_serializer() + self.fields[data_provider_type.type] = ( + data_provider_type.get_request_serializer() + ) def to_internal_value(self, data): # Accept either a string or a dict diff --git a/backend/src/baserow/contrib/builder/api/data_sources/views.py b/backend/src/baserow/contrib/builder/api/data_sources/views.py index 2baf35824f..c0db804981 100644 --- a/backend/src/baserow/contrib/builder/api/data_sources/views.py +++ b/backend/src/baserow/contrib/builder/api/data_sources/views.py @@ -466,8 +466,7 @@ class DispatchDataSourceView(APIView): tags=["Builder data sources"], operation_id="dispatch_builder_page_data_source", description=( - "Dispatches the service of the related data_source and returns " - "the result." + "Dispatches the service of the related data_source and returns the result." ), request=DispatchDataSourceRequestSerializer, responses={ diff --git a/backend/src/baserow/contrib/builder/api/domains/public_views.py b/backend/src/baserow/contrib/builder/api/domains/public_views.py index 411557923c..43ecf40d6b 100644 --- a/backend/src/baserow/contrib/builder/api/domains/public_views.py +++ b/backend/src/baserow/contrib/builder/api/domains/public_views.py @@ -463,8 +463,7 @@ class PublicDispatchDataSourceView(APIView): tags=["Builder data sources"], operation_id="dispatch_public_builder_page_data_source", description=( - "Dispatches the service of the related data_source and returns " - "the result." + "Dispatches the service of the related data_source and returns the result." ), request=DispatchDataSourceRequestSerializer, responses={ diff --git a/backend/src/baserow/contrib/builder/api/workflow_actions/views.py b/backend/src/baserow/contrib/builder/api/workflow_actions/views.py index 36fe4b334a..f35488a91d 100644 --- a/backend/src/baserow/contrib/builder/api/workflow_actions/views.py +++ b/backend/src/baserow/contrib/builder/api/workflow_actions/views.py @@ -410,7 +410,9 @@ def post(self, request, workflow_action_id: int): ) response = BuilderWorkflowActionService().dispatch_action( - request.user, workflow_action, dispatch_context # type: ignore + request.user, + workflow_action, + dispatch_context, # type: ignore ) return Response(response.data, status=response.status) diff --git a/backend/src/baserow/contrib/builder/data_sources/handler.py b/backend/src/baserow/contrib/builder/data_sources/handler.py index 0cd65a13d0..9de3b323b9 100644 --- a/backend/src/baserow/contrib/builder/data_sources/handler.py +++ b/backend/src/baserow/contrib/builder/data_sources/handler.py @@ -689,8 +689,8 @@ def import_data_source( name=serialized_data_source["name"], ) - id_mapping["builder_data_sources"][ - serialized_data_source["id"] - ] = data_source.id + id_mapping["builder_data_sources"][serialized_data_source["id"]] = ( + data_source.id + ) return data_source diff --git a/backend/src/baserow/contrib/builder/data_sources/service.py b/backend/src/baserow/contrib/builder/data_sources/service.py index e9d3cc82ac..37df31de7f 100644 --- a/backend/src/baserow/contrib/builder/data_sources/service.py +++ b/backend/src/baserow/contrib/builder/data_sources/service.py @@ -322,12 +322,12 @@ def dispatch_data_sources( "external", {} ).get(data_source.service.id, []) - new_results[ - data_source.id - ] = data_source.service.get_type().sanitize_result( - data_source.service.specific, - results[data_source.id], - allowed_field_names, + new_results[data_source.id] = ( + data_source.service.get_type().sanitize_result( + data_source.service.specific, + results[data_source.id], + allowed_field_names, + ) ) return new_results diff --git a/backend/src/baserow/contrib/builder/elements/collection_field_types.py b/backend/src/baserow/contrib/builder/elements/collection_field_types.py index 5e6601fda0..11bf6b1bd3 100644 --- a/backend/src/baserow/contrib/builder/elements/collection_field_types.py +++ b/backend/src/baserow/contrib/builder/elements/collection_field_types.py @@ -200,9 +200,9 @@ def formula_generator( query_parameter.get("value") ) if new_formula is not None: - collection_field.config["query_parameters"][index][ - "value" - ] = new_formula + collection_field.config["query_parameters"][index]["value"] = ( + new_formula + ) yield collection_field def deserialize_property( diff --git a/backend/src/baserow/contrib/builder/elements/element_types.py b/backend/src/baserow/contrib/builder/elements/element_types.py index 46a05485ce..1c386bfd22 100644 --- a/backend/src/baserow/contrib/builder/elements/element_types.py +++ b/backend/src/baserow/contrib/builder/elements/element_types.py @@ -2158,8 +2158,7 @@ class MultiPageContainerElementType( class SerializedDict( MultiPageElementTypeMixin.SerializedDict, ContainerElementTypeMixin.SerializedDict, - ): - ... + ): ... class HeaderElementType(MultiPageContainerElementType): diff --git a/backend/src/baserow/contrib/builder/elements/handler.py b/backend/src/baserow/contrib/builder/elements/handler.py index 8b5c9232cf..ea43d94f2e 100644 --- a/backend/src/baserow/contrib/builder/elements/handler.py +++ b/backend/src/baserow/contrib/builder/elements/handler.py @@ -280,8 +280,8 @@ def get_elements( # both cache keys in case the specific argument has changed. if base_queryset is not None: use_cache = False - setattr(page, "_page_elements", None) - setattr(page, "_page_elements_specific", None) + page._page_elements = None + page._page_elements_specific = None elements_cache = getattr(page, cache_key, None) if use_cache and elements_cache is not None: @@ -637,9 +637,9 @@ def _duplicate_element_recursive( elements_and_workflow_actions_duplicated["elements"] += children_duplicated[ "elements" ] - elements_and_workflow_actions_duplicated[ - "workflow_actions" - ] += children_duplicated["workflow_actions"] + elements_and_workflow_actions_duplicated["workflow_actions"] += ( + children_duplicated["workflow_actions"] + ) return elements_and_workflow_actions_duplicated @@ -768,8 +768,8 @@ def import_element( **kwargs, ) - id_mapping["builder_page_elements"][ - serialized_element["id"] - ] = created_instance.id + id_mapping["builder_page_elements"][serialized_element["id"]] = ( + created_instance.id + ) return created_instance diff --git a/backend/src/baserow/contrib/builder/elements/registries.py b/backend/src/baserow/contrib/builder/elements/registries.py index eff2677124..9affde4b55 100644 --- a/backend/src/baserow/contrib/builder/elements/registries.py +++ b/backend/src/baserow/contrib/builder/elements/registries.py @@ -257,9 +257,9 @@ def import_serialized( [m.save() for m in updated_models] # Add created instance to an element cache - cache.setdefault("imported_element_map", {})[ - created_instance.id - ] = created_instance + cache.setdefault("imported_element_map", {})[created_instance.id] = ( + created_instance + ) return created_instance @@ -541,9 +541,9 @@ def import_serialized( # Map the old uid to the new uid. This ensures that any workflow # actions with an `event` pointing to the old uid will have the # pointer to the new uid. - id_mapping["builder_element_event_uids"][ - serialized_values["uid"] - ] = deserialized_uid + id_mapping["builder_element_event_uids"][serialized_values["uid"]] = ( + deserialized_uid + ) deserialized_values = { "uid": deserialized_uid, diff --git a/backend/src/baserow/contrib/builder/theme/theme_config_block_types.py b/backend/src/baserow/contrib/builder/theme/theme_config_block_types.py index 9b349bd67e..4a5563d25b 100644 --- a/backend/src/baserow/contrib/builder/theme/theme_config_block_types.py +++ b/backend/src/baserow/contrib/builder/theme/theme_config_block_types.py @@ -79,10 +79,10 @@ def import_serialized( ): # Translate from old color property names to new names for compat with templates for level in range(3): - if f"heading_{level+1}_color" in serialized_values: - serialized_values[ - f"heading_{level+1}_text_color" - ] = serialized_values.pop(f"heading_{level+1}_color") + if f"heading_{level + 1}_color" in serialized_values: + serialized_values[f"heading_{level + 1}_text_color"] = ( + serialized_values.pop(f"heading_{level + 1}_color") + ) return super().import_serialized( parent, serialized_values, id_mapping, files_zip, storage, cache diff --git a/backend/src/baserow/contrib/builder/workflow_actions/models.py b/backend/src/baserow/contrib/builder/workflow_actions/models.py index 617db002db..5fb66a6ae9 100644 --- a/backend/src/baserow/contrib/builder/workflow_actions/models.py +++ b/backend/src/baserow/contrib/builder/workflow_actions/models.py @@ -103,29 +103,22 @@ class Meta: abstract = True -class LocalBaserowCreateRowWorkflowAction(BuilderWorkflowServiceAction): - ... +class LocalBaserowCreateRowWorkflowAction(BuilderWorkflowServiceAction): ... -class LocalBaserowUpdateRowWorkflowAction(BuilderWorkflowServiceAction): - ... +class LocalBaserowUpdateRowWorkflowAction(BuilderWorkflowServiceAction): ... -class LocalBaserowDeleteRowWorkflowAction(BuilderWorkflowServiceAction): - ... +class LocalBaserowDeleteRowWorkflowAction(BuilderWorkflowServiceAction): ... -class CoreHTTPRequestWorkflowAction(BuilderWorkflowServiceAction): - ... +class CoreHTTPRequestWorkflowAction(BuilderWorkflowServiceAction): ... -class CoreSMTPEmailWorkflowAction(BuilderWorkflowServiceAction): - ... +class CoreSMTPEmailWorkflowAction(BuilderWorkflowServiceAction): ... -class AIAgentWorkflowAction(BuilderWorkflowServiceAction): - ... +class AIAgentWorkflowAction(BuilderWorkflowServiceAction): ... -class SlackWriteMessageWorkflowAction(BuilderWorkflowServiceAction): - ... +class SlackWriteMessageWorkflowAction(BuilderWorkflowServiceAction): ... diff --git a/backend/src/baserow/contrib/builder/workflow_actions/workflow_action_types.py b/backend/src/baserow/contrib/builder/workflow_actions/workflow_action_types.py index 4d2427a534..6320f77dd5 100644 --- a/backend/src/baserow/contrib/builder/workflow_actions/workflow_action_types.py +++ b/backend/src/baserow/contrib/builder/workflow_actions/workflow_action_types.py @@ -122,8 +122,7 @@ def serializer_field_overrides(self): class SerializedDict( BuilderWorkflowActionDict, NavigationElementManager.SerializedDict, - ): - ... + ): ... def get_pytest_params(self, pytest_data_fixture): return NavigationElementManager().get_pytest_params(pytest_data_fixture) @@ -183,8 +182,7 @@ class LogoutWorkflowActionType(BuilderWorkflowActionType): type = "logout" model_class = LogoutWorkflowAction - class SerializedDict(BuilderWorkflowActionDict): - ... + class SerializedDict(BuilderWorkflowActionDict): ... def get_pytest_params(self, pytest_data_fixture) -> Dict[str, Any]: return {} @@ -417,11 +415,10 @@ def enhance_queryset(self, queryset): queryset=specific_queryset( Service.objects.all(), per_content_type_queryset_hook=( - lambda service, queryset: service_type_registry.get_by_model( + lambda service, + queryset: service_type_registry.get_by_model( service - ).enhance_queryset( - queryset - ) + ).enhance_queryset(queryset) ), ), ) diff --git a/backend/src/baserow/contrib/dashboard/api/data_sources/views.py b/backend/src/baserow/contrib/dashboard/api/data_sources/views.py index 82058efa34..12fd1aa762 100644 --- a/backend/src/baserow/contrib/dashboard/api/data_sources/views.py +++ b/backend/src/baserow/contrib/dashboard/api/data_sources/views.py @@ -213,8 +213,7 @@ class DispatchDashboardDataSourceView(APIView): tags=["Dashboard data sources"], operation_id="dispatch_dashboard_data_source", description=( - "Dispatches the service of the related data source and returns " - "the result." + "Dispatches the service of the related data source and returns the result." ), request=None, responses={ diff --git a/backend/src/baserow/contrib/dashboard/data_sources/dispatch_context.py b/backend/src/baserow/contrib/dashboard/data_sources/dispatch_context.py index 5947f76f95..df6e64aa2a 100644 --- a/backend/src/baserow/contrib/dashboard/data_sources/dispatch_context.py +++ b/backend/src/baserow/contrib/dashboard/data_sources/dispatch_context.py @@ -51,5 +51,4 @@ def sortings(self): def public_allowed_properties(self): return None - def validate_filter_search_sort_fields(self): - ... + def validate_filter_search_sort_fields(self): ... diff --git a/backend/src/baserow/contrib/dashboard/data_sources/handler.py b/backend/src/baserow/contrib/dashboard/data_sources/handler.py index c4de9f88f0..264d7311b6 100644 --- a/backend/src/baserow/contrib/dashboard/data_sources/handler.py +++ b/backend/src/baserow/contrib/dashboard/data_sources/handler.py @@ -394,8 +394,8 @@ def import_data_source( name=serialized_data_source["name"], ) - id_mapping["dashboard_data_sources"][ - serialized_data_source["id"] - ] = data_source.id + id_mapping["dashboard_data_sources"][serialized_data_source["id"]] = ( + data_source.id + ) return data_source diff --git a/backend/src/baserow/contrib/database/airtable/airtable_column_types.py b/backend/src/baserow/contrib/database/airtable/airtable_column_types.py index b8175af27f..3a8b354e0c 100644 --- a/backend/src/baserow/contrib/database/airtable/airtable_column_types.py +++ b/backend/src/baserow/contrib/database/airtable/airtable_column_types.py @@ -92,7 +92,7 @@ def to_baserow_export_serialized_value( raw_airtable_table, raw_airtable_row ) import_report.add_failed( - f"Row: \"{row_name}\", field: \"{raw_airtable_column['name']}\"", + f'Row: "{row_name}", field: "{raw_airtable_column["name"]}"', SCOPE_CELL, raw_airtable_table["name"], ERROR_TYPE_DATA_TYPE_MISMATCH, @@ -270,7 +270,7 @@ def to_baserow_export_serialized_value( # 999999999 * 24 * 60 * 60 = 86399999913600. if abs(value) > AIRTABLE_MAX_DURATION_VALUE: import_report.add_failed( - f"Row: \"{row_name}\", field: \"{raw_airtable_column['name']}\"", + f'Row: "{row_name}", field: "{raw_airtable_column["name"]}"', SCOPE_CELL, raw_airtable_table["name"], ERROR_TYPE_DATA_TYPE_MISMATCH, @@ -289,7 +289,7 @@ def to_baserow_export_serialized_value( # If the value can't be parsed as decimal, then it might be corrupt, so we # need to inform the user and skip the import. import_report.add_failed( - f"Row: \"{row_name}\", field: \"{raw_airtable_column['name']}\"", + f'Row: "{row_name}", field: "{raw_airtable_column["name"]}"', SCOPE_CELL, raw_airtable_table["name"], ERROR_TYPE_DATA_TYPE_MISMATCH, @@ -496,7 +496,7 @@ def to_baserow_export_serialized_value( raw_airtable_table, raw_airtable_row ) import_report.add_failed( - f"Row: \"{row_name}\", field: \"{raw_airtable_column['name']}\"", + f'Row: "{row_name}", field: "{raw_airtable_column["name"]}"', SCOPE_CELL, raw_airtable_table["name"], ERROR_TYPE_DATA_TYPE_MISMATCH, @@ -700,7 +700,7 @@ def to_baserow_export_serialized_value( raw_airtable_table, raw_airtable_row ) import_report.add_failed( - f"Row: \"{row_name}\", field: \"{raw_airtable_column['name']}\"", + f'Row: "{row_name}", field: "{raw_airtable_column["name"]}"', SCOPE_CELL, raw_airtable_table["name"], ERROR_TYPE_DATA_TYPE_MISMATCH, @@ -904,7 +904,7 @@ def to_baserow_export_serialized_value( raw_airtable_table, raw_airtable_row ) import_report.add_failed( - f"Row: \"{row_name}\", field: \"{raw_airtable_column['name']}\"", + f'Row: "{row_name}", field: "{raw_airtable_column["name"]}"', SCOPE_CELL, raw_airtable_table["name"], ERROR_TYPE_DATA_TYPE_MISMATCH, diff --git a/backend/src/baserow/contrib/database/airtable/handler.py b/backend/src/baserow/contrib/database/airtable/handler.py index 78c4cfc3da..e2e03f024a 100644 --- a/backend/src/baserow/contrib/database/airtable/handler.py +++ b/backend/src/baserow/contrib/database/airtable/handler.py @@ -109,7 +109,7 @@ def download_airtable_file( """ if download_file.type == AIRTABLE_DOWNLOAD_FILE_TYPE_FETCH: - response = requests.get(download_file.url, headers=headers) # nosec B113 + response = requests.get(download_file.url, headers=headers) # noqa: S113 elif download_file.type == AIRTABLE_DOWNLOAD_FILE_TYPE_ATTACHMENT_ENDPOINT: response = AirtableHandler.fetch_attachment( row_id=download_file.row_id, @@ -197,12 +197,12 @@ def fetch_publicly_shared_base( """ url = f"{AIRTABLE_BASE_URL}/{share_id}" - response = requests.get( + response = requests.get( # noqa: S113 url, headers=BASE_HEADERS, cookies=config.get_session_cookies(), allow_redirects=False, - ) # nosec B113 + ) if response.status_code == 302 and response.headers.get( "Location", "" @@ -720,7 +720,7 @@ def _parse_table_fields( SCOPE_FIELD, table["name"], ERROR_TYPE_UNSUPPORTED_FEATURE, - f"""Field "{column['name']}" with field type {column["type"]} was not imported because it is not supported.""", + f"""Field "{column["name"]}" with field type {column["type"]} was not imported because it is not supported.""", ) continue @@ -896,7 +896,7 @@ def _parse_rows_and_views( SCOPE_VIEW, table["name"], ERROR_TYPE_UNSUPPORTED_FEATURE, - f"View \"{view['name']}\" was not imported because " + f'View "{view["name"]}" was not imported because ' f"{view['type']} is not supported.", ) continue diff --git a/backend/src/baserow/contrib/database/airtable/registry.py b/backend/src/baserow/contrib/database/airtable/registry.py index ad5f0126ef..2c9248d22f 100644 --- a/backend/src/baserow/contrib/database/airtable/registry.py +++ b/backend/src/baserow/contrib/database/airtable/registry.py @@ -232,7 +232,7 @@ def get_sorts( raw_airtable_table["name"], ERROR_TYPE_UNSUPPORTED_FEATURE, f'The sort on field "{column_name}" was ignored in view' - f' {raw_airtable_view["name"]} because the field is not imported.', + f" {raw_airtable_view['name']} because the field is not imported.", ) continue @@ -250,7 +250,7 @@ def get_sorts( raw_airtable_table["name"], ERROR_TYPE_UNSUPPORTED_FEATURE, f'The sort on field "{baserow_field.name}" was ignored in view' - f' {raw_airtable_view["name"]} because it\'s not possible to ' + f" {raw_airtable_view['name']} because it's not possible to " f"order by that field type.", ) continue @@ -295,7 +295,7 @@ def get_group_bys( raw_airtable_table["name"], ERROR_TYPE_UNSUPPORTED_FEATURE, f'The group by on field "{column_name}" was ignored in view' - f' {raw_airtable_view["name"]} because the field was not imported.', + f" {raw_airtable_view['name']} because the field was not imported.", ) continue @@ -366,7 +366,7 @@ def get_filter( ERROR_TYPE_UNSUPPORTED_FEATURE, f'The "{filter_object["operator"]}" filter with value ' f'"{filter_value}" on field "{column_name}" was ignored ' - f'in view {raw_airtable_view["name"]} because the field was not ' + f"in view {raw_airtable_view['name']} because the field was not " f"imported.", ) return None @@ -386,7 +386,7 @@ def get_filter( ERROR_TYPE_UNSUPPORTED_FEATURE, f'The "{filter_object["operator"]}" filter with value ' f'"{filter_value}" on field "{baserow_field.name}" was ' - f'ignored in view {raw_airtable_view["name"]} because it\'s not ' + f"ignored in view {raw_airtable_view['name']} because it's not " f"possible to filter by that field type.", ) return None @@ -422,7 +422,7 @@ def get_filter( ERROR_TYPE_UNSUPPORTED_FEATURE, f'The "{filter_object["operator"]}" filter with value ' f'"{filter_value}" on field "{baserow_field.name}" was ' - f'ignored in view {raw_airtable_view["name"]} because not no ' + f"ignored in view {raw_airtable_view['name']} because not no " f"compatible filter exists.", ) return None @@ -528,7 +528,7 @@ def get_select_column_decoration( SCOPE_VIEW_COLOR, raw_airtable_table["name"], ERROR_TYPE_DATA_TYPE_MISMATCH, - f'The select field coloring was ignored in {raw_airtable_view["name"]} ' + f"The select field coloring was ignored in {raw_airtable_view['name']} " f"because {column_name} does not exist.", ) return None diff --git a/backend/src/baserow/contrib/database/api/constants.py b/backend/src/baserow/contrib/database/api/constants.py index 9d008d7f31..b20f0781fe 100644 --- a/backend/src/baserow/contrib/database/api/constants.py +++ b/backend/src/baserow/contrib/database/api/constants.py @@ -80,7 +80,7 @@ def get_filters_object_description(combine_filters=True, view_is_aggregating=Fal "field to filter on, or the name of the field if " "`user_field_names` is true.\n\n" f"The following filters are available: " - f'{", ".join(view_filter_type_registry.get_types())}.' + f"{', '.join(view_filter_type_registry.get_types())}." "\n\n**Please note that if this parameter is provided, all other " "`filter__{field}__{filter}` will be ignored, " "as well as the `filter_type` parameter.**" @@ -149,7 +149,7 @@ def make_adhoc_filter_api_params(combine_filters=True, view_is_aggregating=False f"`filter__field_1__equal=test` then only rows where the value of " f"field_1 is equal to test are going to be returned.\n\n" f"The following filters are available: " - f'{", ".join(view_filter_type_registry.get_types())}.' + f"{', '.join(view_filter_type_registry.get_types())}." "\n\n**Please note that if the `filters` parameter is provided, " "this parameter will be ignored.** \n\n" + ( @@ -167,19 +167,17 @@ def make_adhoc_filter_api_params(combine_filters=True, view_is_aggregating=False location=OpenApiParameter.QUERY, type=OpenApiTypes.STR, description=( - ( - "`AND`: Indicates that the aggregated rows must match all the " - "provided filters.\n\n" - "`OR`: Indicates that the aggregated rows only have to match one " - "of the filters.\n\n" - if view_is_aggregating - else "`AND`: Indicates that the rows must match all the provided " - "filters.\n\n" - "`OR`: Indicates that the rows only have to match one of the " - "filters.\n\nThis works only if two or more filters are provided." - "\n\n**Please note that if the `filters` parameter is provided, " - "this parameter will be ignored.**" - ) + "`AND`: Indicates that the aggregated rows must match all the " + "provided filters.\n\n" + "`OR`: Indicates that the aggregated rows only have to match one " + "of the filters.\n\n" + if view_is_aggregating + else "`AND`: Indicates that the rows must match all the provided " + "filters.\n\n" + "`OR`: Indicates that the rows only have to match one of the " + "filters.\n\nThis works only if two or more filters are provided." + "\n\n**Please note that if the `filters` parameter is provided, " + "this parameter will be ignored.**" ), ), ) diff --git a/backend/src/baserow/contrib/database/api/fields/serializers.py b/backend/src/baserow/contrib/database/api/fields/serializers.py index 358c6df68c..41d0c8aa28 100644 --- a/backend/src/baserow/contrib/database/api/fields/serializers.py +++ b/backend/src/baserow/contrib/database/api/fields/serializers.py @@ -471,11 +471,7 @@ def to_representation(self, instance): workspace = instance.table.database.workspace if not hasattr(workspace, "available_collaborators"): - setattr( - workspace, - "available_collaborators", - workspace.users.order_by("first_name"), - ) + workspace.available_collaborators = workspace.users.order_by("first_name") return [ CollaboratorSerializer(user).data diff --git a/backend/src/baserow/contrib/database/api/tokens/serializers.py b/backend/src/baserow/contrib/database/api/tokens/serializers.py index 2c2233a1ed..1a926bafd5 100644 --- a/backend/src/baserow/contrib/database/api/tokens/serializers.py +++ b/backend/src/baserow/contrib/database/api/tokens/serializers.py @@ -171,7 +171,7 @@ def to_representation(self, value): class TokenPermissionsFieldFix(OpenApiSerializerFieldExtension): target_class = ( - "baserow.contrib.database.api.tokens.serializers." "TokenPermissionsField" + "baserow.contrib.database.api.tokens.serializers.TokenPermissionsField" ) def map_serializer_field(self, auto_schema, direction): diff --git a/backend/src/baserow/contrib/database/api/views/form/serializers.py b/backend/src/baserow/contrib/database/api/views/form/serializers.py index 6ed2d43ad9..f775fbe5b8 100644 --- a/backend/src/baserow/contrib/database/api/views/form/serializers.py +++ b/backend/src/baserow/contrib/database/api/views/form/serializers.py @@ -133,9 +133,9 @@ def get_field(self, instance): # original select options are not exposed publicly to visitors of the form that # don't have full access to the Baserow table. if not instance.include_all_select_options: - instance.field._prefetched_objects_cache[ - "select_options" - ] = instance._prefetched_objects_cache["allowed_select_options"] + instance.field._prefetched_objects_cache["select_options"] = ( + instance._prefetched_objects_cache["allowed_select_options"] + ) data = field_type_registry.get_serializer( instance.field, PublicFormViewFieldSerializer ).data diff --git a/backend/src/baserow/contrib/database/api/views/serializers.py b/backend/src/baserow/contrib/database/api/views/serializers.py index 1bcd997c97..6849362835 100644 --- a/backend/src/baserow/contrib/database/api/views/serializers.py +++ b/backend/src/baserow/contrib/database/api/views/serializers.py @@ -809,7 +809,7 @@ def validate_api_grouped_filters( def serialize_group_by_metadata( - group_by_metadata: Dict[Field, List[Dict[str, Any]]] + group_by_metadata: Dict[Field, List[Dict[str, Any]]], ) -> Dict[str, List[Dict[str, Any]]]: """ Serializes the structure generated by the `get_group_by_metadata_in_rows` diff --git a/backend/src/baserow/contrib/database/api/views/views.py b/backend/src/baserow/contrib/database/api/views/views.py index 7a7deaacde..9bca76a6cb 100644 --- a/backend/src/baserow/contrib/database/api/views/views.py +++ b/backend/src/baserow/contrib/database/api/views/views.py @@ -1178,8 +1178,7 @@ class ViewDecorationsView(APIView): location=OpenApiParameter.PATH, type=OpenApiTypes.INT, description=( - "Returns only decoration of the view given to the provided " - "value." + "Returns only decoration of the view given to the provided value." ), ) ], @@ -2085,14 +2084,13 @@ class PublicViewInfoView(APIView): location=OpenApiParameter.PATH, type=OpenApiTypes.STR, required=True, - description="The slug of the view to get public information " "about.", + description="The slug of the view to get public information about.", ) ], tags=["Database table views"], operation_id="get_public_view_info", description=( - "Returns the required public information to display a single " - "shared view." + "Returns the required public information to display a single shared view." ), request=None, responses={ diff --git a/backend/src/baserow/contrib/database/application_types.py b/backend/src/baserow/contrib/database/application_types.py index f59f730ed9..e641eb34fc 100755 --- a/backend/src/baserow/contrib/database/application_types.py +++ b/backend/src/baserow/contrib/database/application_types.py @@ -164,10 +164,10 @@ def export_tables_serialized( for field_object in model._field_objects.values(): field_name = field_object["name"] field_type = field_object["type"] - serialized_row[ - field_name - ] = field_type.get_export_serialized_value( - row, field_name, table_cache, files_zip, storage + serialized_row[field_name] = ( + field_type.get_export_serialized_value( + row, field_name, table_cache, files_zip, storage + ) ) serialized_rows.append(serialized_row) row_progress.increment( @@ -279,13 +279,17 @@ def _ops_count_for_import_tables_serialized( + sum( [ # Inserting every field - len(table["fields"]) + + len(table["fields"]) + + # Inserting every field - len(table["views"]) + + len(table["views"]) + + # Converting every row - len(table["rows"]) + + len(table["rows"]) + + # Inserting every row - len(table["rows"]) + + len(table["rows"]) + + # After each field len(table["fields"]) for table in serialized_tables @@ -773,7 +777,7 @@ def _import_serialized_fields_values_to_row( field_type = field_type_registry.get(serialized_field["type"]) new_field_id = id_mapping["database_fields"][serialized_field["id"]] new_field_name = f"field_{new_field_id}" - field_name = f'field_{serialized_field["id"]}' + field_name = f"field_{serialized_field['id']}" if ( field_name in serialized_row diff --git a/backend/src/baserow/contrib/database/apps.py b/backend/src/baserow/contrib/database/apps.py index 57f26d1b64..c4a28c5dae 100755 --- a/backend/src/baserow/contrib/database/apps.py +++ b/backend/src/baserow/contrib/database/apps.py @@ -396,7 +396,7 @@ def ready(self): view_filter_type_registry.register(EqualViewFilterType()) view_filter_type_registry.register(NotEqualViewFilterType()) view_filter_type_registry.register(FilenameContainsViewFilterType()) - view_filter_type_registry.register(FilesLowerThanViewFilterType()), + (view_filter_type_registry.register(FilesLowerThanViewFilterType()),) view_filter_type_registry.register(HasFileTypeViewFilterType()) view_filter_type_registry.register(ContainsViewFilterType()) view_filter_type_registry.register(ContainsNotViewFilterType()) diff --git a/backend/src/baserow/contrib/database/data_sync/handler.py b/backend/src/baserow/contrib/database/data_sync/handler.py index d67ed810bb..5a4d1ec900 100644 --- a/backend/src/baserow/contrib/database/data_sync/handler.py +++ b/backend/src/baserow/contrib/database/data_sync/handler.py @@ -396,8 +396,7 @@ def _do_sync_table(self, user, data_sync, progress_builder): existing_rows_queryset = model.objects.all().values( # There is no need to fetch the rows cell values from the row because we # don't need them. - *["id"] - + list(key_to_field_id.values()) + *["id"] + list(key_to_field_id.values()) ) progress.increment(by=6) # makes the total `9` @@ -643,9 +642,9 @@ def set_data_sync_synced_properties( data_sync_property.unique_primary or not data_sync.two_way_sync ) field_kwargs["immutable_type"] = True - field_kwargs[ - "immutable_properties" - ] = data_sync_property.immutable_properties + field_kwargs["immutable_properties"] = ( + data_sync_property.immutable_properties + ) if data_sync_property.unique_primary and not has_primary: has_primary = True field_kwargs["primary"] = True @@ -682,9 +681,9 @@ def set_data_sync_synced_properties( data_sync_property.unique_primary or not data_sync.two_way_sync ) field_kwargs["immutable_type"] = True - field_kwargs[ - "immutable_properties" - ] = data_sync_property.immutable_properties + field_kwargs["immutable_properties"] = ( + data_sync_property.immutable_properties + ) enabled_property.field = handler.update_field( user=user, field=enabled_property.field.specific, diff --git a/backend/src/baserow/contrib/database/fields/dependencies/circular_reference_checker.py b/backend/src/baserow/contrib/database/fields/dependencies/circular_reference_checker.py index f173efe3ce..f6597b1963 100644 --- a/backend/src/baserow/contrib/database/fields/dependencies/circular_reference_checker.py +++ b/backend/src/baserow/contrib/database/fields/dependencies/circular_reference_checker.py @@ -104,7 +104,7 @@ def get_all_field_dependencies(field: "Field") -> set[int]: GROUP BY id, is_circular ) sub ORDER BY max_depth DESC, id ASC; - """ # nosec b608 + """ # noqa: S608 ) # fmt: on diff --git a/backend/src/baserow/contrib/database/fields/dependencies/handler.py b/backend/src/baserow/contrib/database/fields/dependencies/handler.py index 438a215b43..256fb185df 100644 --- a/backend/src/baserow/contrib/database/fields/dependencies/handler.py +++ b/backend/src/baserow/contrib/database/fields/dependencies/handler.py @@ -150,7 +150,7 @@ def _get_all_dependent_fields( {relationship_table}.dependency_id, {relationship_table}.via_id FROM {relationship_table} {database_prefilter_query} - """ # nosec b608 + """ # noqa: S608 # Raw query that traverses through the dependencies, and will find the # dependants of the provided fields ids recursively. @@ -220,7 +220,7 @@ def _get_all_dependent_fields( WHERE depth <= %(max_depth)s GROUP BY traverse.id, traverse.via_ids, field.content_type_id, field.name, field.table_id ORDER BY MAX(depth) ASC, id ASC - """ # nosec b608 + """ # noqa: S608 queryset = FieldDependency.objects.raw(raw_query, query_parameters) link_row_field_content_type = ContentType.objects.get_for_model(LinkRowField) @@ -776,9 +776,7 @@ def raise_if_user_doesnt_have_operation_on_dependencies_in_other_tables( for ( check, result, - ) in ( - CoreHandler().check_multiple_permissions(perm_checks, workspace).items() - ): + ) in CoreHandler().check_multiple_permissions(perm_checks, workspace).items(): if not result: raise changed_field_type.get_permission_error_when_user_changes_field_to_depend_on_forbidden_field( check.actor, field, check.context diff --git a/backend/src/baserow/contrib/database/fields/field_types.py b/backend/src/baserow/contrib/database/fields/field_types.py index feb1b5bc61..6083b1bf79 100755 --- a/backend/src/baserow/contrib/database/fields/field_types.py +++ b/backend/src/baserow/contrib/database/fields/field_types.py @@ -203,14 +203,15 @@ ) from .field_helpers import prepare_files_for_export from .field_sortings import OptionallyAnnotatedOrderBy -from .fields import BaserowExpressionField, BaserowLastModifiedField -from .fields import DurationField as DurationModelField from .fields import ( + BaserowExpressionField, + BaserowLastModifiedField, IntegerFieldWithSequence, MultipleSelectManyToManyField, SingleSelectForeignKey, SyncedUserForeignKeyField, ) +from .fields import DurationField as DurationModelField from .handler import FieldHandler from .models import ( AbstractSelectOption, @@ -1808,7 +1809,7 @@ def get_alter_column_prepare_new_value(self, connection, from_field, to_field): sql = ( f""" p_in = NULL; - """ # nosec b608 + """ # noqa: S608 ) # fmt: on return sql, {} @@ -1827,7 +1828,7 @@ def get_alter_column_prepare_old_value(self, connection, from_field, to_field): sql = ( f""" p_in = NULL; - """ # nosec b608 + """ # noqa: S608 ) # fmt: on return sql, {} @@ -2034,7 +2035,7 @@ def get_alter_column_prepare_new_value(self, connection, from_field, to_field): sql = ( f""" p_in = NULL; - """ # nosec b608 + """ # noqa: S608 ) # fmt: on return sql, {} @@ -2053,7 +2054,7 @@ def get_alter_column_prepare_old_value(self, connection, from_field, to_field): sql = ( f""" p_in = NULL; - """ # nosec b608 + """ # noqa: S608 ) # fmt: on return sql, {} @@ -3404,13 +3405,13 @@ def export_serialized(self, field): serialized = super().export_serialized(field, False) serialized["link_row_table_id"] = field.link_row_table_id serialized["link_row_related_field_id"] = field.link_row_related_field_id - serialized[ - "link_row_limit_selection_view_id" - ] = field.link_row_limit_selection_view_id + serialized["link_row_limit_selection_view_id"] = ( + field.link_row_limit_selection_view_id + ) serialized["has_related_field"] = field.link_row_table_has_related_field - serialized[ - "link_row_multiple_relationships" - ] = field.link_row_multiple_relationships + serialized["link_row_multiple_relationships"] = ( + field.link_row_multiple_relationships + ) return serialized def import_serialized( @@ -3893,7 +3894,7 @@ def get_export_value(self, value, field_object, rich_value=False): return [{"visible_name": f["visible_name"], "url": f["url"]} for f in files] else: return list_to_comma_separated_string( - [f'{file["visible_name"]} ({file["url"]})' for file in files] + [f"{file['visible_name']} ({file['url']})" for file in files] ) def get_human_readable_value(self, value, field_object): @@ -4190,11 +4191,13 @@ def enhance_queryset_in_bulk(self, queryset, field_objects, **kwargs): break if not select_model_prefetch: - select_model_prefetch = CombinedForeignKeyAndManyToManyMultipleFieldPrefetch( - SelectOption, - # Must skip because the multiple_select works with dynamically - # generated models. - skip_target_check=True, + select_model_prefetch = ( + CombinedForeignKeyAndManyToManyMultipleFieldPrefetch( + SelectOption, + # Must skip because the multiple_select works with dynamically + # generated models. + skip_target_check=True, + ) ) queryset = queryset.multi_field_prefetch(select_model_prefetch) @@ -4236,7 +4239,7 @@ def get_select_options_help_text(self, instance): except ValueError: # Happens when the instance does not yet have a primary key. return self.get_serializer_help_text(instance) - return f"(in format option_id=option_value): " f"{select_option_pair}" + return f"(in format option_id=option_value): {select_option_pair}" class SingleSelectFieldType(CollationSortMixin, SelectOptionBaseFieldType): @@ -4553,7 +4556,7 @@ def get_alter_column_prepare_old_value(self, connection, from_field, to_field): VALUES {','.join(values_mapping)} ) AS values (key, value) WHERE key = p_in); - """ # nosec b608 + """ # noqa: S608 ) # fmt: on return sql, variables @@ -4588,11 +4591,11 @@ def get_alter_column_prepare_new_value(self, connection, from_field, to_field): return ( f"""p_in = ( SELECT value FROM ( - VALUES {','.join(values_mapping)} + VALUES {",".join(values_mapping)} ) AS values (key, value) WHERE key = lower(p_in) ); - """, # nosec + """, # noqa: S608 variables, ) @@ -4953,10 +4956,10 @@ def prepare_value_for_db_in_bulk( # Replace values by error for failing rows for invalid_name in invalid_ids: for row_index in id_map[invalid_name]: - values_by_row[ - row_index - ] = AllProvidedMultipleSelectValuesMustBeSelectOption( - invalid_name + values_by_row[row_index] = ( + AllProvidedMultipleSelectValuesMustBeSelectOption( + invalid_name + ) ) else: # or fail fast @@ -4976,10 +4979,10 @@ def prepare_value_for_db_in_bulk( # Replace values by error for failing rows for invalid_name in invalid_names: for row_index in name_map[invalid_name]: - values_by_row[ - row_index - ] = AllProvidedMultipleSelectValuesMustBeSelectOption( - invalid_name + values_by_row[row_index] = ( + AllProvidedMultipleSelectValuesMustBeSelectOption( + invalid_name + ) ) else: @@ -6841,9 +6844,9 @@ def prepare_value_for_db_in_bulk( if continue_on_error: for invalid_id in invalid_ids: for row_index in rows_by_value[invalid_id]: - values_by_row[ - row_index - ] = AllProvidedCollaboratorIdsMustBeValidUsers(invalid_id) + values_by_row[row_index] = ( + AllProvidedCollaboratorIdsMustBeValidUsers(invalid_id) + ) else: # or fail fast raise AllProvidedCollaboratorIdsMustBeValidUsers(invalid_ids) @@ -7383,7 +7386,7 @@ def update_rows_with_field_sequence( SET {field.db_column} = ordered.row_nr FROM ordered WHERE t.id = ordered.id; - """, # nosec B608 + """, # noqa: S608 params, ) @@ -7417,7 +7420,7 @@ def create_field_sequence( f""" WITH count AS (SELECT COUNT(*) FROM {db_table}) SELECT setval('{db_column}_seq', count) FROM count WHERE count > 0; - """ # nosec B608 + """ # noqa: S608 ) def drop_field_sequence( diff --git a/backend/src/baserow/contrib/database/fields/filter_support/base.py b/backend/src/baserow/contrib/database/fields/filter_support/base.py index 4c583bc1a2..ea8e826ad4 100644 --- a/backend/src/baserow/contrib/database/fields/filter_support/base.py +++ b/backend/src/baserow/contrib/database/fields/filter_support/base.py @@ -4,9 +4,8 @@ from typing import TYPE_CHECKING, Any, Dict, List, Type from django.contrib.postgres.fields import JSONField -from django.db.models import BooleanField, F +from django.db.models import BooleanField, F, Q, Value from django.db.models import Field as DjangoField -from django.db.models import Q, Value from django.db.models.expressions import RawSQL from loguru import logger @@ -278,7 +277,7 @@ def get_jsonb_contains_filter_expr( FROM jsonb_path_query("{field_name}", %s) elem WHERE UPPER(elem::text) LIKE UPPER(%s) ) - """ # nosec B608 {field_name} + """ # noqa: S608 expr = RawSQL(raw_sql, (query_path, f"%{value}%")) # nosec B611 annotation_name = f"{field_name}_contains_{hash(value)}" return AnnotatedQ( @@ -317,7 +316,7 @@ def get_jsonb_contains_word_filter_expr( FROM jsonb_path_query("{field_name}", %s) elem WHERE UPPER(elem::text) ~ %s ) - """ # nosec B608 {field_name} + """ # noqa: S608 expr = RawSQL(raw_sql, (query_path, rf"\m{re_value}\M")) # nosec B611 annotation_name = f"{field_name}_contains_word_{hash(value)}" @@ -370,7 +369,7 @@ def get_jsonb_has_any_in_value_filter_expr( FROM jsonb_path_exists("{field_name}", %s) elem WHERE elem = true ) - """ # nosec B608 {field_name} + """ # noqa: S608 expr = RawSQL(raw_sql, (f"{query_path} ? ({sql_ids})",)) # nosec B611 annotation_name = f"{field_name}_has_any_of_{hash(sql_ids)}" @@ -406,7 +405,7 @@ def get_jsonb_has_exact_value_filter_expr( FROM jsonb_array_elements(top_obj->'value') inner_el ) = %s::int[] ) - """ # nosec B608 {field_name} + """ # noqa: S608 expr = RawSQL(raw_sql, (sql_ids,)) # nosec B611 annotation_name = f"{field_name}_has_any_of_{hash(tuple(sql_ids))}" @@ -463,9 +462,9 @@ def get_jsonb_has_date_value_filter_expr( EXISTS( SELECT 1 FROM jsonb_array_elements("{field_name}") elem - WHERE ({' AND '.join(where_clauses)}) + WHERE ({" AND ".join(where_clauses)}) ) - """ # nosec B608 {field_name} + """ # noqa: S608 expr = RawSQL(raw_sql, where_params) # nosec B611 annotation_name = f"{field_name}_has_date_gte_{hash(gte_of)}_lt_{hash(lt_of)}" diff --git a/backend/src/baserow/contrib/database/fields/models.py b/backend/src/baserow/contrib/database/fields/models.py index 052d4eddd7..a5b36981e2 100644 --- a/backend/src/baserow/contrib/database/fields/models.py +++ b/backend/src/baserow/contrib/database/fields/models.py @@ -749,8 +749,8 @@ def recalculate_internal_fields(self, raise_if_invalid=False, field_cache=None): ) expression_type = expression.expression_type # Update the cached properties - setattr(self, "cached_typed_internal_expression", expression) - setattr(self, "cached_formula_type", expression_type) + self.cached_typed_internal_expression = expression + self.cached_formula_type = expression_type if raise_if_invalid: expression_type.raise_if_invalid() @@ -767,7 +767,7 @@ def mark_as_invalid_and_save(self, error: str): invalid_type = BaserowFormulaInvalidType(error) invalid_type.persist_onto_formula_field(self) - setattr(self, "cached_formula_type", invalid_type) + self.cached_formula_type = invalid_type self.save(recalculate=False, raise_if_invalid=False) def save(self, *args, **kwargs): diff --git a/backend/src/baserow/contrib/database/fields/registries.py b/backend/src/baserow/contrib/database/fields/registries.py index 1985b74ec3..614a62ded5 100644 --- a/backend/src/baserow/contrib/database/fields/registries.py +++ b/backend/src/baserow/contrib/database/fields/registries.py @@ -27,9 +27,6 @@ Expression, ExpressionWrapper, F, -) -from django.db.models import Field as DjangoField -from django.db.models import ( IntegerField, JSONField, Model, @@ -39,6 +36,7 @@ Subquery, Value, ) +from django.db.models import Field as DjangoField from django.db.models.fields.related import ForeignKey, ManyToManyField from django.db.models.functions import Cast, Coalesce @@ -2249,7 +2247,7 @@ def is_applicable(self, from_model, from_field, to_field): """ raise NotImplementedError( - "Each field converter must have an is_applicable " "method." + "Each field converter must have an is_applicable method." ) def alter_field( @@ -2288,7 +2286,7 @@ def alter_field( """ raise NotImplementedError( - "Each field converter must have an alter_field " "method." + "Each field converter must have an alter_field method." ) diff --git a/backend/src/baserow/contrib/database/fields/utils/duration.py b/backend/src/baserow/contrib/database/fields/utils/duration.py index 1cdf4c1de3..88ea6ce511 100644 --- a/backend/src/baserow/contrib/database/fields/utils/duration.py +++ b/backend/src/baserow/contrib/database/fields/utils/duration.py @@ -708,7 +708,7 @@ def text_value_sql_to_duration(field: "DurationField") -> str: if is_psycopg3: - from psycopg.types.datetime import IntervalLoader # noqa: BPG001 + from psycopg.types.datetime import IntervalLoader from baserow.core.psycopg import psycopg diff --git a/backend/src/baserow/contrib/database/formula/ast/function_defs.py b/backend/src/baserow/contrib/database/formula/ast/function_defs.py index 6a8f24c3b9..4921345295 100644 --- a/backend/src/baserow/contrib/database/formula/ast/function_defs.py +++ b/backend/src/baserow/contrib/database/formula/ast/function_defs.py @@ -1263,7 +1263,7 @@ def type_function( ) -> BaserowExpression[BaserowFormulaType]: arg1_type = arg1.expression_type arg2_type = arg2.expression_type - if not (type(arg1_type) is type(arg2_type)): + if type(arg1_type) is not type(arg2_type): # If trying to compare two types which can be compared, but are of different # types, then first cast them to text and then compare. # We to ourselves via the __class__ property here so subtypes of this type @@ -1305,7 +1305,7 @@ def type_function( ) -> BaserowExpression[BaserowFormulaType]: arg2_type = arg2.expression_type arg3_type = arg3.expression_type - if not (type(arg2_type) is type(arg3_type)): + if type(arg2_type) is not type(arg3_type): # Replace the current if func_call with one which casts both args to text # if they are of different types as PostgreSQL requires all cases of a case # statement to be of the same type. diff --git a/backend/src/baserow/contrib/database/formula/ast/tree.py b/backend/src/baserow/contrib/database/formula/ast/tree.py index 522f711f72..3e916c8420 100644 --- a/backend/src/baserow/contrib/database/formula/ast/tree.py +++ b/backend/src/baserow/contrib/database/formula/ast/tree.py @@ -624,7 +624,7 @@ def check_arg_type_valid( ) return typed_arg.with_invalid_type( - f"argument number {arg_index+1} given to {self} was of type " + f"argument number {arg_index + 1} given to {self} was of type " f"{expression_type_name} but {postfix}" ) diff --git a/backend/src/baserow/contrib/database/formula/expression_generator/django_expressions.py b/backend/src/baserow/contrib/database/formula/expression_generator/django_expressions.py index be06b09917..5f8cd3e8ca 100644 --- a/backend/src/baserow/contrib/database/formula/expression_generator/django_expressions.py +++ b/backend/src/baserow/contrib/database/formula/expression_generator/django_expressions.py @@ -187,7 +187,7 @@ class FileNameContainsExpr(BaserowFilterExpression): FROM JSONB_ARRAY_ELEMENTS(%(field_name)s) as attached_files WHERE UPPER(attached_files ->> 'visible_name') LIKE UPPER(%(value)s) ) - """ # nosec B608 + """ # noqa: S608 ) # fmt: on @@ -201,7 +201,7 @@ class JSONArrayContainsValueLengthLowerThanExpr(BaserowFilterExpression): FROM JSONB_ARRAY_ELEMENTS(%(field_name)s) as filtered_field WHERE LENGTH(filtered_field ->> 'value') < %(value)s ) - """ # nosec B608 %(value)s + """ # noqa: S608 ) # fmt: on @@ -214,7 +214,7 @@ class JSONArrayAllAreExpr(BaserowFilterExpression): SELECT upper(filtered_field ->> 'value') FROM JSONB_ARRAY_ELEMENTS(%(field_name)s) as filtered_field ) AND JSONB_ARRAY_LENGTH(%(field_name)s) > 0 - """ # nosec B608 %(value)s + """ # noqa: S608 ) # fmt: on @@ -261,7 +261,7 @@ def __init__( FROM JSONB_ARRAY_ELEMENTS(%(field_name)s) as filtered_field WHERE (filtered_field ->> 'value')::numeric %(comparison_op)s %(value)s::numeric ) - """ # nosec B608 %(value)s %(comparison_op)s + """ # noqa: S608 ) # fmt: on @@ -300,7 +300,7 @@ def __init__( FROM JSONB_ARRAY_ELEMENTS(%(field_name)s) as filtered_field WHERE (filtered_field ->> 'value')::interval %(comparison_op)s make_interval(secs=>%(value)s) ) - """ # nosec B608 %(value)s %(comparison_op)s + """ # noqa: S608 ) # fmt: on diff --git a/backend/src/baserow/contrib/database/formula/types/formula_types.py b/backend/src/baserow/contrib/database/formula/types/formula_types.py index d8237290c1..2be688299e 100644 --- a/backend/src/baserow/contrib/database/formula/types/formula_types.py +++ b/backend/src/baserow/contrib/database/formula/types/formula_types.py @@ -6,9 +6,8 @@ from django.contrib.postgres.fields import ArrayField, JSONField from django.db import models -from django.db.models import Expression, F +from django.db.models import Expression, F, Func, Q, QuerySet, TextField, Value from django.db.models import Field as DjangoField -from django.db.models import Func, Q, QuerySet, TextField, Value from django.db.models.functions import Cast, Concat from dateutil import parser @@ -1106,7 +1105,7 @@ def get_export_value(self, value, field_object, rich_value=False) -> Any: "url": export_file["url"], } else: - return f'{export_file["visible_name"]}({export_file["url"]})' + return f"{export_file['visible_name']}({export_file['url']})" def contains_query(self, field_name, value, model_field, field): value = value.strip() @@ -2027,7 +2026,7 @@ def _lookup_formula_type_from_string(formula_type_string): def literal( - arg: Union[str, int, bool, Decimal] + arg: Union[str, int, bool, Decimal], ) -> BaserowExpression[BaserowFormulaValidType]: """ A helper function for building BaserowExpressions with literals @@ -2060,7 +2059,7 @@ class JSONBSingleKeyArrayExpression(Expression): FROM jsonb_to_recordset({field_name}) as items( {key_name} {data_type}) ) - """ # nosec B608 + """ # noqa: S608 # fmt: on def __init__(self, field_name: str, key_name: str, data_type: str, **kwargs): @@ -2087,7 +2086,7 @@ class JSONBSingleInnerKeyArrayExpression(Expression): FROM jsonb_to_recordset({field_name}) as items( {key_name} {data_type}) ) - """ # nosec B608 + """ # noqa: S608 # fmt: on def __init__( diff --git a/backend/src/baserow/contrib/database/management/commands/copy_tables.py b/backend/src/baserow/contrib/database/management/commands/copy_tables.py index 42950ab2de..2aecf59af3 100644 --- a/backend/src/baserow/contrib/database/management/commands/copy_tables.py +++ b/backend/src/baserow/contrib/database/management/commands/copy_tables.py @@ -11,7 +11,7 @@ def run(command, env): # Ignoring as this is a CLI admin tool calling Popen, we don't need to worry about # shell injection as to call this tool you must already have shell access... - proc = Popen(command, shell=True, env=env) # nosec + proc = Popen(command, shell=True, env=env) # noqa: S602 proc.wait() @@ -76,8 +76,7 @@ def copy_tables( ) else: logger( - f"REAL RUN, ABOUT TO COPY TABLES FROM {source_db_name} to " - f"{target_db_name}" + f"REAL RUN, ABOUT TO COPY TABLES FROM {source_db_name} to {target_db_name}" ) if ssl: logger("Running with sslmode=require") @@ -97,7 +96,7 @@ def copy_tables( if num_to_copy > 0: count += num_to_copy - logger(f"Importing {num_to_copy} tables in " f"one go") + logger(f"Importing {num_to_copy} tables in one go") command = ( f"pg_dump {source_connection_params}{table_str} | " f"psql {target_connection_params}" diff --git a/backend/src/baserow/contrib/database/management/commands/fill_row_history.py b/backend/src/baserow/contrib/database/management/commands/fill_row_history.py index 4165f326e6..2e4d5a7969 100644 --- a/backend/src/baserow/contrib/database/management/commands/fill_row_history.py +++ b/backend/src/baserow/contrib/database/management/commands/fill_row_history.py @@ -135,9 +135,9 @@ def record_row_history(table, model, row, user, use_cache=False, skip_action=Fal serialized_random_value = field_object["type"].random_to_input_value( field_object["field"], random_value ) - row_random_values[ - f"field_{field_object['field'].id}" - ] = serialized_random_value + row_random_values[f"field_{field_object['field'].id}"] = ( + serialized_random_value + ) rows_values = [row_random_values] diff --git a/backend/src/baserow/contrib/database/management/commands/fill_workspace.py b/backend/src/baserow/contrib/database/management/commands/fill_workspace.py index 8fb414d220..0bdcd33206 100755 --- a/backend/src/baserow/contrib/database/management/commands/fill_workspace.py +++ b/backend/src/baserow/contrib/database/management/commands/fill_workspace.py @@ -184,7 +184,7 @@ def fill_workspace_with_data( with tqdm( range(database_count * table_count + token_count), - desc=f"Worker" f" {process_id}", + desc=f"Worker {process_id}", ) as pbar: created_databases_and_tables = {} for _ in range(database_count): diff --git a/backend/src/baserow/contrib/database/management/commands/install_airtable_templates.py b/backend/src/baserow/contrib/database/management/commands/install_airtable_templates.py index fb677e6fc8..105c57369c 100644 --- a/backend/src/baserow/contrib/database/management/commands/install_airtable_templates.py +++ b/backend/src/baserow/contrib/database/management/commands/install_airtable_templates.py @@ -60,7 +60,7 @@ def handle(self, *args, **options): sys.exit(1) html_url = f"{AIRTABLE_BASE_URL}/templates" - html_response = requests.get(html_url, headers=BASE_HEADERS) # nosec + html_response = requests.get(html_url, headers=BASE_HEADERS) # noqa: S113 if not html_response.ok: raise Exception("test") diff --git a/backend/src/baserow/contrib/database/migrations/0028_fix_negative_date.py b/backend/src/baserow/contrib/database/migrations/0028_fix_negative_date.py index 31ef5fe0dd..cc2c6d2b99 100644 --- a/backend/src/baserow/contrib/database/migrations/0028_fix_negative_date.py +++ b/backend/src/baserow/contrib/database/migrations/0028_fix_negative_date.py @@ -14,7 +14,7 @@ def forward(apps, schema_editor): f""" UPDATE {table_name} SET {field_name} = '0001-01-01'::date WHERE {field_name} < '0001-01-01'::date - """ # nosec B608 + """ # noqa: S608 ) diff --git a/backend/src/baserow/contrib/database/rows/handler.py b/backend/src/baserow/contrib/database/rows/handler.py index cb318283ae..15362742dd 100644 --- a/backend/src/baserow/contrib/database/rows/handler.py +++ b/backend/src/baserow/contrib/database/rows/handler.py @@ -325,10 +325,10 @@ def prepare_rows_in_bulk( field_obj = field_objects[field_ids[field_name]] field_type = field_obj["type"] field = field_obj["field"] - prepared_values_by_field[ - field_name - ] = field_type.prepare_value_for_db_in_bulk( - field, batch_values, continue_on_error=generate_error_report + prepared_values_by_field[field_name] = ( + field_type.prepare_value_for_db_in_bulk( + field, batch_values, continue_on_error=generate_error_report + ) ) # replace original values to keep ordering @@ -596,8 +596,8 @@ def get_adjacent_row_in_queryset(self, queryset, row_id, previous=False): WITH ordered AS ({sql}) SELECT id FROM ordered WHERE row_nr = (SELECT row_nr FROM ordered WHERE id = %s) - {'-' if previous else '+'} 1 - """ # nosec B608 + {"-" if previous else "+"} 1 + """ # noqa: S608 return table_model.objects.filter( id=RawSQL(adjacent_id_subquery, (*params, row.id)) # nosec B611 @@ -2351,9 +2351,12 @@ def force_update_rows( to_add = new_set_of_values - original_set_of_values to_delete = original_set_of_values - new_set_of_values - m2m_to_add, ( - row_column_name, - value_column, + ( + m2m_to_add, + ( + row_column_name, + value_column, + ), ) = self._prepare_m2m_field_related_objects( row, field_name, list(filter(lambda v: v in to_add, value_ids)) ) @@ -3298,9 +3301,7 @@ def insert_table_values(self): OVER (PARTITION BY upsert_value ORDER BY id, upsert_value ) AS group_index FROM subq ORDER BY id """ - ).format( - columns, sql.Identifier(self.table_name) - ) # nosec B608 + ).format(columns, sql.Identifier(self.table_name)) # noqa: S608 self.execute(query) @@ -3326,9 +3327,7 @@ def insert_imported_values(self): rows_placeholder = sql.SQL(",\n").join(rows_query) script_template = sql.SQL( "INSERT INTO table_import (id, upsert_value) VALUES {};" - ).format( - rows_placeholder - ) # nosec B608 + ).format(rows_placeholder) # noqa: S608 self.execute(script_template, query_params) def calculate_map(self) -> list[tuple[int, int]]: diff --git a/backend/src/baserow/contrib/database/rows/helpers.py b/backend/src/baserow/contrib/database/rows/helpers.py index 1f19900329..be0ba7cebc 100644 --- a/backend/src/baserow/contrib/database/rows/helpers.py +++ b/backend/src/baserow/contrib/database/rows/helpers.py @@ -150,9 +150,9 @@ def _update_linked_row_diff( ] if not linked_diff: linked_diff = _init_linked_row_diff(linked_field_id) - related_rows_diff[linked_table_id][linked_row_id][ - linked_field_name - ] = linked_diff + related_rows_diff[linked_table_id][linked_row_id][linked_field_name] = ( + linked_diff + ) linked_diff[key].append(row_id) linked_diff["metadata"]["linked_rows"][row_id] = { "value": field_metadata["primary_value"] diff --git a/backend/src/baserow/contrib/database/rows/history_providers.py b/backend/src/baserow/contrib/database/rows/history_providers.py index 79462d4337..618d41eabf 100644 --- a/backend/src/baserow/contrib/database/rows/history_providers.py +++ b/backend/src/baserow/contrib/database/rows/history_providers.py @@ -226,7 +226,7 @@ def get_row_history_entries( table_id=table_id, row_id=row_id, changed_field_names=field_names, - before_values={k: None for k in changed.keys()}, + before_values=dict.fromkeys(changed.keys()), after_values=changed, ) @@ -350,7 +350,7 @@ def get_changed_rows( after = params.row_values metadata = params.fields_metadata - before = {"id": params.row_id, **{k: None for k in metadata.keys()}} + before = {"id": params.row_id, **dict.fromkeys(metadata.keys())} if command_type == ActionCommandType.UNDO: before, after = after, before @@ -374,7 +374,7 @@ def get_changed_rows( for idx, row_id in enumerate(row_ids): after = after_values[idx] metadata = params.fields_metadata[str(row_id)] - before = {"id": row_id, **{k: None for k in metadata.keys()}} + before = {"id": row_id, **dict.fromkeys(metadata.keys())} yield RowChangeData( row_id=row_id, before=before if not swap_before_after else after, @@ -456,7 +456,7 @@ def get_changed_rows( row_id = params.row_id metadata = {k: v for k, v in params.fields_metadata.items() if k != "id"} before = params.values - after = {"id": row_id, **{k: None for k in metadata.keys()}} + after = {"id": row_id, **dict.fromkeys(metadata.keys())} swap_before_after = command_type == ActionCommandType.UNDO yield RowChangeData( @@ -479,7 +479,7 @@ def get_changed_rows( for ridx, row_id in enumerate(params.row_ids): before = params.rows_values[ridx] metadata = params.fields_metadata[str(row_id)] - after = {"id": row_id, **{k: None for k in metadata.keys()}} + after = {"id": row_id, **dict.fromkeys(metadata.keys())} yield RowChangeData( row_id=row_id, before=before if not swap_before_after else after, diff --git a/backend/src/baserow/contrib/database/search/handler.py b/backend/src/baserow/contrib/database/search/handler.py index de7ad05086..26abb403c0 100644 --- a/backend/src/baserow/contrib/database/search/handler.py +++ b/backend/src/baserow/contrib/database/search/handler.py @@ -79,7 +79,7 @@ class SearchMode(str, Enum): FT_WITH_COUNT = "full-text-with-count" -ALL_SEARCH_MODES = [getattr(mode, "value") for mode in SearchMode] +ALL_SEARCH_MODES = [mode.value for mode in SearchMode] @lru_cache(maxsize=1024) @@ -101,7 +101,7 @@ def _workspace_search_table_exists(workspace_id: int) -> bool: WHERE table_schema = current_schema() AND table_name = %s ) - """ # nosec B608 + """ # noqa: S608 cursor.execute(raw_sql, [search_table_name]) return cursor.fetchone()[0] @@ -700,7 +700,7 @@ def _delete_workspace_data_marked_for_deletion(cls, workspace_id: int): WHERE d.field_id = p.field_id AND {row_check} AND p.deletion_workspace_id = %s; - """ # nosec B608 + """ # noqa: S608 cursor.execute(raw_sql, (workspace_id,)) except ProgrammingError as e: # It could be that the workspace search table has already been deleted, @@ -833,7 +833,7 @@ def update_search_data( DO UPDATE SET value = EXCLUDED.value, updated_on = EXCLUDED.updated_on; - """ # nosec B608 + """ # noqa: S608 cursor.execute(raw_sql, params) @classmethod diff --git a/backend/src/baserow/contrib/database/search_types.py b/backend/src/baserow/contrib/database/search_types.py index a936fdf8ae..327dd21ad3 100644 --- a/backend/src/baserow/contrib/database/search_types.py +++ b/backend/src/baserow/contrib/database/search_types.py @@ -519,9 +519,9 @@ def postprocess(self, rows: Iterable[Dict]) -> List[SearchResult]: table_id_to_name[f.table_id] = f.table.name table_id_to_database_id[f.table_id] = f.table.database_id database_id_to_name[f.table.database_id] = f.table.database.name - database_id_to_workspace_id[ - f.table.database_id - ] = f.table.database.workspace_id + database_id_to_workspace_id[f.table.database_id] = ( + f.table.database.workspace_id + ) table_id_to_primary_field[f.table_id] = ( f.table, primary_fields[f.primary_field_id], diff --git a/backend/src/baserow/contrib/database/table/cache.py b/backend/src/baserow/contrib/database/table/cache.py index d6d5077519..64d9053092 100644 --- a/backend/src/baserow/contrib/database/table/cache.py +++ b/backend/src/baserow/contrib/database/table/cache.py @@ -13,6 +13,7 @@ 4. If they differ, re-query for all the fields and save them in the cache. 5. If they are the same use the cached field attrs. """ + import typing import uuid from typing import Any, Dict, Optional @@ -62,7 +63,7 @@ def clear_generated_model_cache(): generated_models_cache.clear() else: raise ImproperlyConfigured( - "Baserow must be run with a redis cache outside of " "tests." + "Baserow must be run with a redis cache outside of tests." ) print("Done clearing cache.") diff --git a/backend/src/baserow/contrib/database/table/models.py b/backend/src/baserow/contrib/database/table/models.py index 6ccd82c0ad..b8950f7abb 100644 --- a/backend/src/baserow/contrib/database/table/models.py +++ b/backend/src/baserow/contrib/database/table/models.py @@ -10,9 +10,8 @@ from django.contrib.auth import get_user_model from django.core.exceptions import FieldDoesNotExist as DjangoFieldDoesNotExist from django.db import models -from django.db.models import BooleanField +from django.db.models import BooleanField, JSONField, Q, QuerySet from django.db.models import Field as DjangoModelFieldClass -from django.db.models import JSONField, Q, QuerySet from django_cte.cte import CTEManager, CTEQuerySet from opentelemetry import trace @@ -531,7 +530,8 @@ def fields_requiring_refresh_after_insert(cls): # from_db_value function applied after performing and INSERT .. RETURNING # Instead for now we force a refresh to ensure these fields are converted # from their db representations correctly. - or isinstance(f, JSONField) and f.db_returning + or isinstance(f, JSONField) + and f.db_returning ] @classmethod diff --git a/backend/src/baserow/contrib/database/tokens/handler.py b/backend/src/baserow/contrib/database/tokens/handler.py index 62fb46b14c..5faa32aa0c 100644 --- a/backend/src/baserow/contrib/database/tokens/handler.py +++ b/backend/src/baserow/contrib/database/tokens/handler.py @@ -158,7 +158,7 @@ def rotate_token_key(self, user, token): if not user.id == token.user_id: raise TokenDoesNotBelongToUser( - "The user is not authorized to rotate the " "key." + "The user is not authorized to rotate the key." ) token.key = self.generate_unique_key() @@ -184,7 +184,7 @@ def update_token(self, user, token, name): if not user.id == token.user_id: raise TokenDoesNotBelongToUser( - "The user is not authorized to rotate the " "key." + "The user is not authorized to rotate the key." ) token.name = name @@ -480,7 +480,7 @@ def delete_token(self, user, token): if not user.id == token.user_id: raise TokenDoesNotBelongToUser( - "The user is not authorized to delete the " "token." + "The user is not authorized to delete the token." ) token.delete() diff --git a/backend/src/baserow/contrib/database/views/actions.py b/backend/src/baserow/contrib/database/views/actions.py index b663730b91..235a571e8d 100755 --- a/backend/src/baserow/contrib/database/views/actions.py +++ b/backend/src/baserow/contrib/database/views/actions.py @@ -1746,7 +1746,7 @@ class UpdateDecorationActionType(UndoableActionType): "original_value_provider_type_name", "original_value_provider_conf", "original_order", - "new_decorator_type_name" "new_value_provider_type_name", + "new_decorator_type_namenew_value_provider_type_name", "new_value_provider_conf", "new_order", ] @@ -2444,7 +2444,7 @@ def are_equal_on_create(field_identifier, before_value, after_value) -> bool: else: before = { **before, - **{field_name: "" for field_name in fields_metadata.keys()}, + **dict.fromkeys(fields_metadata.keys(), ""), } row_diff = extract_row_diff( diff --git a/backend/src/baserow/contrib/database/views/exceptions.py b/backend/src/baserow/contrib/database/views/exceptions.py index c3b3e7feff..40d0e07f55 100644 --- a/backend/src/baserow/contrib/database/views/exceptions.py +++ b/backend/src/baserow/contrib/database/views/exceptions.py @@ -198,8 +198,7 @@ class FormViewReadOnlyFieldIsNotSupported(Exception): def __init__(self, field_name, *args, **kwargs): self.field_name = field_name super().__init__( - f"The field {field_name} is read only and compatible with the form" - f"view.", + f"The field {field_name} is read only and compatible with the formview.", *args, **kwargs, ) diff --git a/backend/src/baserow/contrib/database/views/registries.py b/backend/src/baserow/contrib/database/views/registries.py index f179e62c71..60602a3af6 100644 --- a/backend/src/baserow/contrib/database/views/registries.py +++ b/backend/src/baserow/contrib/database/views/registries.py @@ -386,9 +386,9 @@ def import_serialized( ).select_related("user") for workspaceuser in workspaceusers_from_workspace: - id_mapping["owned_by"][ - workspaceuser.user.email - ] = workspaceuser.user + id_mapping["owned_by"][workspaceuser.user.email] = ( + workspaceuser.user + ) try: ownership_type = view_ownership_type_registry.get( @@ -444,9 +444,9 @@ def import_serialized( filter_group_object = ViewFilterGroup.objects.create( view=view, **filter_group_copy ) - id_mapping["database_view_filter_groups"][ - filter_group_id - ] = filter_group_object.id + id_mapping["database_view_filter_groups"][filter_group_id] = ( + filter_group_object.id + ) for view_filter in filters: view_filter_type = view_filter_type_registry.get(view_filter["type"]) @@ -455,10 +455,10 @@ def import_serialized( view_filter_copy["field_id"] = id_mapping["database_fields"][ view_filter_copy["field_id"] ] - view_filter_copy[ - "value" - ] = view_filter_type.set_import_serialized_value( - view_filter_copy["value"], id_mapping + view_filter_copy["value"] = ( + view_filter_type.set_import_serialized_value( + view_filter_copy["value"], id_mapping + ) ) if view_filter.get("group", None): view_filter_copy["group_id"] = id_mapping[ @@ -467,9 +467,9 @@ def import_serialized( view_filter_object = ViewFilter.objects.create( view=view, **view_filter_copy ) - id_mapping["database_view_filters"][ - view_filter_id - ] = view_filter_object.id + id_mapping["database_view_filters"][view_filter_id] = ( + view_filter_object.id + ) if self.can_sort: for view_sorting in sortings: @@ -491,9 +491,9 @@ def import_serialized( view_group_by_object = ViewGroupBy.objects.create( view=view, **view_group_by_copy ) - id_mapping["database_view_group_bys"][ - view_group_by_id - ] = view_group_by_object.id + id_mapping["database_view_group_bys"][view_group_by_id] = ( + view_group_by_object.id + ) if self.can_decorate: for view_decoration in decorations: @@ -519,9 +519,9 @@ def import_serialized( view_decoration_object = ViewDecoration.objects.create( view=view, **view_decoration_copy ) - id_mapping["database_view_decorations"][ - view_decoration_id - ] = view_decoration_object.id + id_mapping["database_view_decorations"][view_decoration_id] = ( + view_decoration_object.id + ) for ( serialized_structure_processor @@ -830,8 +830,7 @@ def get_hidden_fields( """ raise NotImplementedError( - "An exportable or publicly sharable view must implement " - "`get_hidden_fields`" + "An exportable or publicly sharable view must implement `get_hidden_fields`" ) def after_field_moved_between_tables(self, field: "Field", original_table_id: int): diff --git a/backend/src/baserow/contrib/database/views/view_filters.py b/backend/src/baserow/contrib/database/views/view_filters.py index 429d0682fa..e958e3b651 100644 --- a/backend/src/baserow/contrib/database/views/view_filters.py +++ b/backend/src/baserow/contrib/database/views/view_filters.py @@ -1474,9 +1474,9 @@ def get_export_serialized_value(self, value, id_mapping): ).select_related("user") for workspaceuser in workspaceusers_from_workspace: - id_mapping[self.COLLABORATORS_KEY][ - str(workspaceuser.user.id) - ] = workspaceuser.user.email + id_mapping[self.COLLABORATORS_KEY][str(workspaceuser.user.id)] = ( + workspaceuser.user.email + ) return id_mapping[self.COLLABORATORS_KEY].get(value, "") @@ -1543,9 +1543,9 @@ def get_export_serialized_value(self, value, id_mapping): ).select_related("user") for workspaceuser in workspaceusers_from_workspace: - id_mapping[self.USER_KEY][ - str(workspaceuser.user.id) - ] = workspaceuser.user.email + id_mapping[self.USER_KEY][str(workspaceuser.user.id)] = ( + workspaceuser.user.email + ) return id_mapping[self.USER_KEY].get(value, "") diff --git a/backend/src/baserow/contrib/database/views/view_types.py b/backend/src/baserow/contrib/database/views/view_types.py index 8e5952940f..a0502b94f8 100644 --- a/backend/src/baserow/contrib/database/views/view_types.py +++ b/backend/src/baserow/contrib/database/views/view_types.py @@ -172,9 +172,9 @@ def import_serialized( field_option_object = GridViewFieldOptions.objects.create( grid_view=grid_view, **field_option_copy ) - id_mapping["database_grid_view_field_options"][ - field_option_id - ] = field_option_object.id + id_mapping["database_grid_view_field_options"][field_option_id] = ( + field_option_object.id + ) return grid_view @@ -490,9 +490,9 @@ def import_serialized( field_option_object = GalleryViewFieldOptions.objects.create( gallery_view=gallery_view, **field_option_copy ) - id_mapping["database_gallery_view_field_options"][ - field_option_id - ] = field_option_object.id + id_mapping["database_gallery_view_field_options"][field_option_id] = ( + field_option_object.id + ) return gallery_view @@ -1018,9 +1018,7 @@ def after_field_options_update( field_option__in=updated_field_options ).annotate( count=Count("conditions") + Count("formviewfieldoptionsconditiongroup") - ).filter( - count=0 - ).delete() + ).filter(count=0).delete() self._update_field_options_allowed_select_options( view, field_options, updated_field_options_by_field_id @@ -1304,9 +1302,9 @@ def get_file(file): ) field_option_object.id - id_mapping["database_form_view_field_options"][ - field_option_id - ] = field_option_object.id + id_mapping["database_form_view_field_options"][field_option_id] = ( + field_option_object.id + ) # Create the objects in bulk to improve performance. FormViewFieldOptionsCondition.objects.bulk_create(condition_objects) diff --git a/backend/src/baserow/contrib/database/ws/views/rows/signals.py b/backend/src/baserow/contrib/database/ws/views/rows/signals.py index 40d7f35925..f98c27417a 100644 --- a/backend/src/baserow/contrib/database/ws/views/rows/signals.py +++ b/backend/src/baserow/contrib/database/ws/views/rows/signals.py @@ -167,9 +167,9 @@ def views_rows_updated( old_row_views: List[FilteredViewRows] = before_return_dict["old_rows_views"] existing_checker = before_return_dict["caching_row_checker"] - view_rows: List[ - FilteredViewRows - ] = existing_checker.get_filtered_views_where_rows_are_visible(rows) + view_rows: List[FilteredViewRows] = ( + existing_checker.get_filtered_views_where_rows_are_visible(rows) + ) view_slug_to_updated_view_rows = {view.view.slug: view for view in view_rows} diff --git a/backend/src/baserow/contrib/integrations/ai/integration_types.py b/backend/src/baserow/contrib/integrations/ai/integration_types.py index 6ebb676423..a124c6d1cf 100644 --- a/backend/src/baserow/contrib/integrations/ai/integration_types.py +++ b/backend/src/baserow/contrib/integrations/ai/integration_types.py @@ -132,9 +132,9 @@ def export_serialized( workspace_provider_settings, ) in workspace.generative_ai_models_settings.items(): if provider_type not in materialized_settings: - materialized_settings[ - provider_type - ] = workspace_provider_settings + materialized_settings[provider_type] = ( + workspace_provider_settings + ) serialized["ai_settings"] = materialized_settings diff --git a/backend/src/baserow/contrib/integrations/core/service_types.py b/backend/src/baserow/contrib/integrations/core/service_types.py index 837f271ace..c8d917c25d 100644 --- a/backend/src/baserow/contrib/integrations/core/service_types.py +++ b/backend/src/baserow/contrib/integrations/core/service_types.py @@ -583,7 +583,7 @@ def dispatch_data( except (UnacceptableAddressException, ConnectionError) as e: raise UnexpectedDispatchException( - f'Invalid URL: {resolved_values["url"]}' + f"Invalid URL: {resolved_values['url']}" ) from e except request_exceptions.RequestException as e: raise UnexpectedDispatchException(str(e)) from e diff --git a/backend/src/baserow/contrib/integrations/local_baserow/utils.py b/backend/src/baserow/contrib/integrations/local_baserow/utils.py index aec02c0488..346a63830b 100644 --- a/backend/src/baserow/contrib/integrations/local_baserow/utils.py +++ b/backend/src/baserow/contrib/integrations/local_baserow/utils.py @@ -43,7 +43,7 @@ def guess_json_type_from_response_serializer_field( - serializer_field: Union[Field, Serializer] + serializer_field: Union[Field, Serializer], ) -> Dict[str, Any]: """ Responsible for taking a serializer field, and guessing what its JSON diff --git a/backend/src/baserow/core/action/handler.py b/backend/src/baserow/core/action/handler.py index 7ed67298e5..83ce9a56d7 100755 --- a/backend/src/baserow/core/action/handler.py +++ b/backend/src/baserow/core/action/handler.py @@ -43,7 +43,6 @@ class OneActionHasErrorAndCannotBeRedone(Exception): class ActionHandler(metaclass=baserow_trace_methods(tracer)): - """ Contains methods to do high level operations on ActionType's like undoing or redoing them. diff --git a/backend/src/baserow/core/app_auth_providers/handler.py b/backend/src/baserow/core/app_auth_providers/handler.py index c3b1aa1c4c..5426ae1c83 100644 --- a/backend/src/baserow/core/app_auth_providers/handler.py +++ b/backend/src/baserow/core/app_auth_providers/handler.py @@ -102,8 +102,8 @@ def import_app_auth_provider( cache=cache, ) - id_mapping["app_auth_providers"][ - serialized_app_auth_provider["id"] - ] = app_auth_provider.id + id_mapping["app_auth_providers"][serialized_app_auth_provider["id"]] = ( + app_auth_provider.id + ) return app_auth_provider diff --git a/backend/src/baserow/core/app_auth_providers/types.py b/backend/src/baserow/core/app_auth_providers/types.py index 1499214a8f..75be2ecc67 100644 --- a/backend/src/baserow/core/app_auth_providers/types.py +++ b/backend/src/baserow/core/app_auth_providers/types.py @@ -1,5 +1,4 @@ from baserow.core.auth_provider.types import AuthProviderTypeDict -class AppAuthProviderTypeDict(AuthProviderTypeDict): - ... +class AppAuthProviderTypeDict(AuthProviderTypeDict): ... diff --git a/backend/src/baserow/core/auth_provider/models.py b/backend/src/baserow/core/auth_provider/models.py index c5b8df4a73..3d3dc365ea 100644 --- a/backend/src/baserow/core/auth_provider/models.py +++ b/backend/src/baserow/core/auth_provider/models.py @@ -79,5 +79,4 @@ class Meta: ordering = ["domain"] -class PasswordAuthProviderModel(AuthProviderModel): - ... +class PasswordAuthProviderModel(AuthProviderModel): ... diff --git a/backend/src/baserow/core/constants.py b/backend/src/baserow/core/constants.py index 7d023229f4..059101353c 100644 --- a/backend/src/baserow/core/constants.py +++ b/backend/src/baserow/core/constants.py @@ -1,4 +1,5 @@ """Miscellaneous constant values used across the codebase.""" + from django.db import models # Date formats supported in Baserow diff --git a/backend/src/baserow/core/handler.py b/backend/src/baserow/core/handler.py index b38a98cff5..f528a4093b 100755 --- a/backend/src/baserow/core/handler.py +++ b/backend/src/baserow/core/handler.py @@ -835,7 +835,7 @@ def get_workspace_user(self, workspace_user_id, base_queryset=None): ) except WorkspaceUser.DoesNotExist: raise WorkspaceUserDoesNotExist( - f"The workspace user with id {workspace_user_id} does " f"not exist." + f"The workspace user with id {workspace_user_id} does not exist." ) return workspace_user @@ -1834,7 +1834,7 @@ def get_template(self, template_id, base_queryset=None): template = base_queryset.get(id=template_id) except Template.DoesNotExist: raise TemplateDoesNotExist( - f"The template with id {template_id} does not " f"exist." + f"The template with id {template_id} does not exist." ) return template diff --git a/backend/src/baserow/core/jobs/mixins.py b/backend/src/baserow/core/jobs/mixins.py index 04eea3799e..53e56365de 100644 --- a/backend/src/baserow/core/jobs/mixins.py +++ b/backend/src/baserow/core/jobs/mixins.py @@ -76,7 +76,7 @@ def _save_user_data_if_not_present(self, user: AbstractUser) -> None: :param user: The user to save the data for. """ - if getattr(self, "user_ip_address") is None: + if self.user_ip_address is None: self.user_ip_address = get_user_remote_addr_ip(user) def _restore_user_data_if_present(self, user: AbstractUser) -> None: @@ -86,7 +86,7 @@ def _restore_user_data_if_present(self, user: AbstractUser) -> None: :param user: The user to restore the data for. """ - if getattr(self, "user_ip_address") is not None: + if self.user_ip_address is not None: set_user_remote_addr_ip(user, self.user_ip_address) class Meta: @@ -113,7 +113,7 @@ def _save_user_data_if_not_present(self, user: AbstractUser) -> None: :param user: The user to save the data for. """ - if getattr(self, "user_websocket_id") is None: + if self.user_websocket_id is None: self.user_websocket_id = getattr(user, "web_socket_id", None) def _restore_user_data_if_present(self, user: AbstractUser) -> None: @@ -123,7 +123,7 @@ def _restore_user_data_if_present(self, user: AbstractUser) -> None: :param user: The user to restore the data for. """ - if getattr(self, "user_websocket_id") is not None: + if self.user_websocket_id is not None: user.web_socket_id = self.user_websocket_id class Meta: @@ -155,10 +155,10 @@ def _save_user_data_if_not_present(self, user: AbstractUser) -> None: :param user: The user to save the data for. """ - if getattr(self, "user_session_id") is None: + if self.user_session_id is None: self.user_session_id = get_untrusted_client_session_id(user) - if getattr(self, "user_action_group_id") is None: + if self.user_action_group_id is None: self.user_action_group_id = get_client_undo_redo_action_group_id(user) def _restore_user_data_if_present(self, user: AbstractUser) -> None: @@ -168,10 +168,10 @@ def _restore_user_data_if_present(self, user: AbstractUser) -> None: :param user: The user to restore the data for. """ - if getattr(self, "user_session_id") is not None: + if self.user_session_id is not None: set_untrusted_client_session_id(user, self.user_session_id) - if getattr(self, "user_action_group_id") is not None: + if self.user_action_group_id is not None: set_client_undo_redo_action_group_id(user, self.user_action_group_id) class Meta: diff --git a/backend/src/baserow/core/jobs/registries.py b/backend/src/baserow/core/jobs/registries.py index 63dbb67188..d19f3dc644 100644 --- a/backend/src/baserow/core/jobs/registries.py +++ b/backend/src/baserow/core/jobs/registries.py @@ -36,7 +36,6 @@ class JobType( Instance, metaclass=baserow_trace_methods(tracer, only="do"), ): - """ This abstract class represents a custom job type that can be added to the job type registry. It must be extended so customization can be done. Each job diff --git a/backend/src/baserow/core/management/backup/backup_runner.py b/backend/src/baserow/core/management/backup/backup_runner.py index c6eceb5178..81b47c9fe4 100644 --- a/backend/src/baserow/core/management/backup/backup_runner.py +++ b/backend/src/baserow/core/management/backup/backup_runner.py @@ -111,7 +111,7 @@ def restore_baserow( with tempfile.TemporaryDirectory() as temporary_directory_name: with tarfile.open(backup_file_name, "r:gz") as backup_input_tar: - backup_input_tar.extractall(temporary_directory_name) # nosec B202 + backup_input_tar.extractall(temporary_directory_name) # noqa: S202 backup_internal_folder_name = Path(backup_file_name).name backup_sub_folder = Path( @@ -139,7 +139,7 @@ def _get_postgres_tool_args(self) -> List[str]: params = [ "--host=" + self.host, "--dbname=" + self.database, - "--port=" + self.port, + "--port=" + str(self.port), "--username=" + self.username, # Run in directory mode so we can do parallel dumps using the jobs flag. "-Fd", @@ -266,7 +266,7 @@ def _run_command_in_sub_process(self, command): print(" ".join(command)) # Adding nosec ignore as this is only used by admin only command line tools # where it it is completely reasonable to use subprocess and not insecure. - subprocess.check_output(command) # nosec + subprocess.check_output(command) # noqa: S603 def _get_sorted_user_tables_names(conn) -> List[str]: diff --git a/backend/src/baserow/core/management/commands/export_workspace_applications.py b/backend/src/baserow/core/management/commands/export_workspace_applications.py index 40d8e75239..bd70398e69 100644 --- a/backend/src/baserow/core/management/commands/export_workspace_applications.py +++ b/backend/src/baserow/core/management/commands/export_workspace_applications.py @@ -27,7 +27,7 @@ def add_arguments(self, parser): parser.add_argument( "workspace_id", type=int, - help="The id of the workspace that " "must be exported.", + help="The id of the workspace that must be exported.", ) parser.add_argument( "--indent", diff --git a/backend/src/baserow/core/management/utils.py b/backend/src/baserow/core/management/utils.py index 2957077f12..3ebb11efea 100644 --- a/backend/src/baserow/core/management/utils.py +++ b/backend/src/baserow/core/management/utils.py @@ -31,7 +31,7 @@ def run_command_concurrently( for i in range(concurrency): # Only used by management commands so safe to Popen - p = subprocess.Popen( # nosec + p = subprocess.Popen( # noqa: S603 command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, @@ -60,10 +60,7 @@ def run_command_concurrently( curses.endwin() if error_processes: - print( - f"Errors from subprocesses were (they will be mixed due to " - f"concurrency):" - ) + print(f"Errors from subprocesses were (they will be mixed due to concurrency):") for error_process in error_processes: print(error_process.stderr.read(), file=sys.stderr) raise CommandError( diff --git a/backend/src/baserow/core/output_parsers.py b/backend/src/baserow/core/output_parsers.py index 720e791bbf..8862dee0fc 100644 --- a/backend/src/baserow/core/output_parsers.py +++ b/backend/src/baserow/core/output_parsers.py @@ -15,10 +15,9 @@ def get_format_instructions(self) -> str: - Don't use quotes or commas or partial values, just the option name. - Choose the option that most closely matches the row values. - ```json - {json_array} - ```""" # nosec this falsely marks as hardcoded sql expression, but it's not related - # to SQL at all. +```json +{json_array} +```""" # noqa: S608 - not SQL, just a JSON template def parse(self, response: str) -> Any: response = response.strip() diff --git a/backend/src/baserow/core/populate.py b/backend/src/baserow/core/populate.py index c26eb2f339..a8c800b6f6 100644 --- a/backend/src/baserow/core/populate.py +++ b/backend/src/baserow/core/populate.py @@ -38,13 +38,13 @@ def load_test_data(): try: workspace = Workspace.objects.get( - name=f"Acme Corp ({i +1})" if i > 0 else "Acme Corp" + name=f"Acme Corp ({i + 1})" if i > 0 else "Acme Corp" ) except Workspace.DoesNotExist: workspace = ( CoreHandler() .create_workspace( - admin, name=f"Acme Corp ({i +1})" if i > 0 else "Acme Corp" + admin, name=f"Acme Corp ({i + 1})" if i > 0 else "Acme Corp" ) .workspace ) @@ -52,7 +52,9 @@ def load_test_data(): # Create a second admin for the workspace email = f"admin{i + 1}_bis@baserow.io" if i > 0 else "admin_bis@baserow.io" try: - admin_bis = user_handler.create_user(f"Admin {i+1} bis", email, "password") + admin_bis = user_handler.create_user( + f"Admin {i + 1} bis", email, "password" + ) except UserAlreadyExist: admin_bis = User.objects.get(email=email) WorkspaceUser.objects.update_or_create( @@ -62,14 +64,14 @@ def load_test_data(): ) for j in range(3): - member_email_prefix = f"member{i+1}" if j > 0 else "member" + member_email_prefix = f"member{i + 1}" if j > 0 else "member" member_email_prefix = ( - f"{member_email_prefix}_{j+1}" if i > 0 else member_email_prefix + f"{member_email_prefix}_{j + 1}" if i > 0 else member_email_prefix ) member_email = f"{member_email_prefix}@baserow.io" try: member = user_handler.create_user( - f"Member {i+1} {j+1}", member_email, "password" + f"Member {i + 1} {j + 1}", member_email, "password" ) except UserAlreadyExist: member = User.objects.get(email=member_email) diff --git a/backend/src/baserow/core/psycopg.py b/backend/src/baserow/core/psycopg.py index 09f598ac9e..4bfedc1cf3 100644 --- a/backend/src/baserow/core/psycopg.py +++ b/backend/src/baserow/core/psycopg.py @@ -18,8 +18,11 @@ else: import psycopg2 as psycopg # noqa: F401 - from psycopg2 import DataError # noqa: F401 - from psycopg2 import errors, sql # noqa: F401 + from psycopg2 import ( # noqa: F401 + DataError, # noqa: F401 + errors, + sql, + ) def is_deadlock_error(exc: OperationalError) -> bool: diff --git a/backend/src/baserow/core/registry.py b/backend/src/baserow/core/registry.py index 8468de6738..cf43673a96 100644 --- a/backend/src/baserow/core/registry.py +++ b/backend/src/baserow/core/registry.py @@ -166,7 +166,7 @@ def __init__(self): :raises ValueError: If the object does not have a `model_class` attribute. """ - model_class = getattr(self, "model_class") + model_class = self.model_class if not model_class: raise ValueError( "Attribute model_class must be set, maybe you forgot to " @@ -987,7 +987,7 @@ def get_serializer( :rtype: ModelSerializer """ - get_by_model = getattr(self, "get_by_model") + get_by_model = self.get_by_model if not get_by_model: raise ValueError( "The method get_by_model must exist on the registry in " diff --git a/backend/src/baserow/core/services/formula_importer.py b/backend/src/baserow/core/services/formula_importer.py index 5613cfcfde..cf55b43210 100644 --- a/backend/src/baserow/core/services/formula_importer.py +++ b/backend/src/baserow/core/services/formula_importer.py @@ -12,8 +12,7 @@ class BaserowFormulaImporter(BaserowFormulaVisitor, ABC): """ @abstractmethod - def get_data_provider_type_registry(self): - ... + def get_data_provider_type_registry(self): ... def __init__(self, id_mapping, **kwargs): self.id_mapping = id_mapping diff --git a/backend/src/baserow/core/telemetry/env_overrides_parser.py b/backend/src/baserow/core/telemetry/env_overrides_parser.py index da4856e54e..3fe1e575b2 100644 --- a/backend/src/baserow/core/telemetry/env_overrides_parser.py +++ b/backend/src/baserow/core/telemetry/env_overrides_parser.py @@ -47,9 +47,9 @@ def get_sampler_overrides_from_str(overrides: str) -> Dict[str, Sampler]: for override in overrides: module_and_sampler = _try_get_sampler_and_module_from_str(override) if module_and_sampler is not None: - per_module_sampler_overrides[ - module_and_sampler.module - ] = module_and_sampler.sampler + per_module_sampler_overrides[module_and_sampler.module] = ( + module_and_sampler.sampler + ) return per_module_sampler_overrides @@ -115,8 +115,7 @@ def _generate_sampler_from_string_args( return None logger.info( - "Instrumentation from module: {} overriden to use sampler {} with " - "param {}", + "Instrumentation from module: {} overriden to use sampler {} with param {}", module, trace_sampler, rate, diff --git a/backend/src/baserow/core/telemetry/telemetry.py b/backend/src/baserow/core/telemetry/telemetry.py index 9e67e6b9d0..e12cc381d6 100644 --- a/backend/src/baserow/core/telemetry/telemetry.py +++ b/backend/src/baserow/core/telemetry/telemetry.py @@ -22,7 +22,7 @@ def emit(self, record: logging.LogRecord) -> None: del record.extra # by default otel doesn't send funcName, rename it so it does. - setattr(record, "python_function", record.funcName) + record.python_function = record.funcName super().emit(record) diff --git a/backend/src/baserow/core/trash/handler.py b/backend/src/baserow/core/trash/handler.py index 1107c6c456..08abd5c473 100755 --- a/backend/src/baserow/core/trash/handler.py +++ b/backend/src/baserow/core/trash/handler.py @@ -279,11 +279,14 @@ def try_perm_delete_trash_entry( trash_entry: TrashEntry, trash_item_lookup_cache: Optional[Dict[str, Any]] = None, ): - """ + ( + """ Responsible for finding the trash item type for this `TrashEntry`, then finding the model to destroy and passing it into `_permanently_delete_and_signal` for it to be permanently deleted. - """ "" + """ + "" + ) trash_item_type = trash_item_type_registry.get(trash_entry.trash_item_type) diff --git a/backend/src/baserow/core/two_factor_auth/exceptions.py b/backend/src/baserow/core/two_factor_auth/exceptions.py index 976df929a3..1011c14330 100644 --- a/backend/src/baserow/core/two_factor_auth/exceptions.py +++ b/backend/src/baserow/core/two_factor_auth/exceptions.py @@ -1,22 +1,16 @@ -class TwoFactorAuthTypeDoesNotExist(Exception): - ... +class TwoFactorAuthTypeDoesNotExist(Exception): ... -class VerificationFailed(Exception): - ... +class VerificationFailed(Exception): ... -class WrongPassword(Exception): - ... +class WrongPassword(Exception): ... -class TwoFactorAuthAlreadyConfigured(Exception): - ... +class TwoFactorAuthAlreadyConfigured(Exception): ... -class TwoFactorAuthNotConfigured(Exception): - ... +class TwoFactorAuthNotConfigured(Exception): ... -class TwoFactorAuthCannotBeConfigured(Exception): - ... +class TwoFactorAuthCannotBeConfigured(Exception): ... diff --git a/backend/src/baserow/core/types.py b/backend/src/baserow/core/types.py index 8438f53d35..c60e42a26f 100644 --- a/backend/src/baserow/core/types.py +++ b/backend/src/baserow/core/types.py @@ -1,8 +1,10 @@ from typing import TYPE_CHECKING, Any, NamedTuple, Optional, TypedDict, Union if TYPE_CHECKING: - from django.contrib.auth.models import AbstractUser # noqa: F401 - from django.contrib.auth.models import AnonymousUser # noqa: F401 + from django.contrib.auth.models import ( + AbstractUser, # noqa: F401 + AnonymousUser, # noqa: F401 + ) from baserow.contrib.automation.models import Automation # noqa: F401 from baserow.contrib.builder.models import Builder # noqa: F401 diff --git a/backend/src/baserow/core/user_files/handler.py b/backend/src/baserow/core/user_files/handler.py index 4ac2ec2680..dc727d5806 100644 --- a/backend/src/baserow/core/user_files/handler.py +++ b/backend/src/baserow/core/user_files/handler.py @@ -58,7 +58,7 @@ def get_user_file_url(self, user_file): """Returns user file url""" storage = get_default_storage() - name = getattr(user_file, "name") + name = user_file.name path = UserFileHandler().user_file_path(name) url = storage.url(path) return url diff --git a/backend/src/baserow/core/utils.py b/backend/src/baserow/core/utils.py index c0f03a9d0f..3c3e5e79be 100644 --- a/backend/src/baserow/core/utils.py +++ b/backend/src/baserow/core/utils.py @@ -455,7 +455,7 @@ def truncate_middle(content, max_length, middle="..."): if max_length <= len(middle): raise ValueError( - "The max_length cannot be lower than the length if the " "middle string." + "The max_length cannot be lower than the length if the middle string." ) total = max_length - len(middle) @@ -545,7 +545,7 @@ def remove_invalid_surrogate_characters( ) -> str: """ Removes illegal unicode characters from the provided content. If you for example - run something like `b"\uD83D".encode("utf-8")`, it will result in a + run something like `b"\ud83d".encode("utf-8")`, it will result in a UnicodeEncodeError. This function removed the illegal characters, it keeps the valid emoji's. diff --git a/backend/src/baserow/manage.py b/backend/src/baserow/manage.py index cfe0e0e51c..7728dcb5db 100755 --- a/backend/src/baserow/manage.py +++ b/backend/src/baserow/manage.py @@ -11,7 +11,7 @@ def enable_debugger(): import debugpy debugger_port = int(os.environ.get("BASEROW_BACKEND_DEBUGGER_PORT", 5678)) - debugpy.listen(("0.0.0.0", debugger_port)) # nosec + debugpy.listen(("0.0.0.0", debugger_port)) # noqa: S104 print(f"Debugger attached! Listening on 0.0.0.0:{debugger_port}") diff --git a/backend/src/baserow/test_utils/fixtures/element.py b/backend/src/baserow/test_utils/fixtures/element.py index 275d6318f3..a82b391b65 100644 --- a/backend/src/baserow/test_utils/fixtures/element.py +++ b/backend/src/baserow/test_utils/fixtures/element.py @@ -70,9 +70,9 @@ def create_builder_table_element(self, user=None, page=None, **kwargs): ) if "data_source" not in kwargs: - kwargs[ - "data_source" - ] = self.create_builder_local_baserow_list_rows_data_source(page=page) + kwargs["data_source"] = ( + self.create_builder_local_baserow_list_rows_data_source(page=page) + ) element = self.create_builder_element(TableElement, user, page, **kwargs) @@ -103,17 +103,17 @@ def create_builder_choice_element(self, user=None, page=None, **kwargs): def create_builder_repeat_element(self, user=None, page=None, **kwargs): if "data_source" not in kwargs: - kwargs[ - "data_source" - ] = self.create_builder_local_baserow_list_rows_data_source(page=page) + kwargs["data_source"] = ( + self.create_builder_local_baserow_list_rows_data_source(page=page) + ) element = self.create_builder_element(RepeatElement, user, page, **kwargs) return element def create_builder_record_selector_element(self, user=None, page=None, **kwargs): if "data_source" not in kwargs: - kwargs[ - "data_source" - ] = self.create_builder_local_baserow_list_rows_data_source(page=page) + kwargs["data_source"] = ( + self.create_builder_local_baserow_list_rows_data_source(page=page) + ) element = self.create_builder_element( RecordSelectorElement, user, page, **kwargs ) diff --git a/backend/src/baserow/test_utils/fixtures/job.py b/backend/src/baserow/test_utils/fixtures/job.py index fb7fa9811e..5e8a16c2ea 100644 --- a/backend/src/baserow/test_utils/fixtures/job.py +++ b/backend/src/baserow/test_utils/fixtures/job.py @@ -14,8 +14,7 @@ ) -class TestException(Exception): - ... +class TestException(Exception): ... class TmpJobType1FiltersSerializer(serializers.Serializer): diff --git a/backend/src/baserow/test_utils/helpers.py b/backend/src/baserow/test_utils/helpers.py index e80ad1d112..eff0606710 100644 --- a/backend/src/baserow/test_utils/helpers.py +++ b/backend/src/baserow/test_utils/helpers.py @@ -129,19 +129,19 @@ def setup_interesting_test_table( link_table_duration_field = data_fixture.create_duration_field( table=link_table, name="duration_field" ) - linked_tables[ - "decimal_link_table" - ] = decimal_link_table = data_fixture.create_database_table( - database=database, user=user, name="decimal_link_table" + linked_tables["decimal_link_table"] = decimal_link_table = ( + data_fixture.create_database_table( + database=database, user=user, name="decimal_link_table" + ) ) - linked_tables[ - "file_link_table" - ] = file_link_table = data_fixture.create_database_table( - database=database, user=user, name="file_link_table" + linked_tables["file_link_table"] = file_link_table = ( + data_fixture.create_database_table( + database=database, user=user, name="file_link_table" + ) ) - linked_tables[ - "multiple_collaborators_link_table" - ] = multiple_collaborators_link_table = data_fixture.create_database_table( + linked_tables["multiple_collaborators_link_table"] = ( + multiple_collaborators_link_table + ) = data_fixture.create_database_table( database=database, user=user, name="multiple_collaborators_link_table" ) all_possible_kwargs_per_type = construct_all_possible_field_kwargs( @@ -524,17 +524,17 @@ def register_instance_temporarily(registry, instance): def assert_undo_redo_actions_are_valid( actions: List[Action], expected_action_types: List[Type[ActionType]] ): - assert len(actions) == len( - expected_action_types - ), f"Expected {len(actions)} actions but got {len(expected_action_types)} action_types" + assert len(actions) == len(expected_action_types), ( + f"Expected {len(actions)} actions but got {len(expected_action_types)} action_types" + ) for action, expected_action_type in zip(actions, expected_action_types): - assert ( - action.type == expected_action_type.type - ), f"Action expected of type {expected_action_type} but got {action}" - assert ( - action is not None - ), f"Action is None, but should be of type {expected_action_type}" + assert action.type == expected_action_type.type, ( + f"Action expected of type {expected_action_type} but got {action}" + ) + assert action is not None, ( + f"Action is None, but should be of type {expected_action_type}" + ) assert action.error is None, f"Action has error: {action.error}" diff --git a/backend/src/baserow/test_utils/pytest_conftest.py b/backend/src/baserow/test_utils/pytest_conftest.py index f07dfdadc0..01fabb1bad 100755 --- a/backend/src/baserow/test_utils/pytest_conftest.py +++ b/backend/src/baserow/test_utils/pytest_conftest.py @@ -465,9 +465,9 @@ def authenticate(self, user_source, **kwargs): def get_roles(self): return [] - mutable_user_source_registry.registry[ - user_source_type.type - ] = StubbedUserSourceType() + mutable_user_source_registry.registry[user_source_type.type] = ( + StubbedUserSourceType() + ) user_source_type_registry.get_for_class.cache_clear() yield user_source_type_registry @@ -505,9 +505,9 @@ def patch(new_storage=None): for model in apps.get_models(): filefields = (f for f in model._meta.fields if isinstance(f, FileField)) for filefield in filefields: - previous_storages[ - f"{model._meta.label}_{filefield.name}" - ] = filefield.storage + previous_storages[f"{model._meta.label}_{filefield.name}"] = ( + filefield.storage + ) filefield.storage = new_storage yield new_storage @@ -665,9 +665,9 @@ def _perform_stub(raise_transaction_exception: bool = True): stub_application_type = MaxLocksPerTransactionExceededApplicationType() stub_application_type.raise_transaction_exception = raise_transaction_exception mutable_application_registry.get_for_class.cache_clear() - mutable_application_registry.registry[ - DatabaseApplicationType.type - ] = stub_application_type + mutable_application_registry.registry[DatabaseApplicationType.type] = ( + stub_application_type + ) yield stub_application_type @@ -690,9 +690,9 @@ def _perform_stub(raise_transaction_exception: bool = True): stub_trash_item_type = MaxLocksPerTransactionExceededGroupTrashableItemType() stub_trash_item_type.raise_transaction_exception = raise_transaction_exception mutable_trash_item_type_registry.get_for_class.cache_clear() - mutable_trash_item_type_registry.registry[ - WorkspaceTrashableItemType.type - ] = stub_trash_item_type + mutable_trash_item_type_registry.registry[WorkspaceTrashableItemType.type] = ( + stub_trash_item_type + ) yield stub_trash_item_type @@ -730,9 +730,9 @@ def bypass_check_permissions( stub_core_permission_manager = StubbedCorePermissionManagerType() for perm_manager in django_settings.PERMISSION_MANAGERS: - mutable_permission_manager_registry.registry[ - perm_manager - ] = stub_core_permission_manager + mutable_permission_manager_registry.registry[perm_manager] = ( + stub_core_permission_manager + ) yield stub_core_permission_manager @@ -756,9 +756,9 @@ def _perform_stub( raise_permission_denied ) first_manager = django_settings.PERMISSION_MANAGERS[0] - permission_manager_type_registry.registry[ - first_manager - ] = stub_core_permission_manager + permission_manager_type_registry.registry[first_manager] = ( + stub_core_permission_manager + ) yield stub_core_permission_manager permission_manager_type_registry.registry = before @@ -1088,17 +1088,17 @@ def create_postgresql_test_table(): create_table_sql = f""" CREATE TABLE {table_name} ( id SERIAL PRIMARY KEY, - {', '.join([f"{col_name} {col_type}" for col_name, col_type in column_definitions.items()])} + {", ".join([f"{col_name} {col_type}" for col_name, col_type in column_definitions.items()])} ) """ # Inserts a couple of random rows for testing purposes. insert_sql = f""" - INSERT INTO {table_name} ({', '.join(column_definitions.keys())}) + INSERT INTO {table_name} ({", ".join(column_definitions.keys())}) VALUES ( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s ) - """ + """ # noqa: S608 try: with connection.cursor() as cursor: diff --git a/backend/src/baserow/throttling.py b/backend/src/baserow/throttling.py index 521a4913bf..dfb3545d54 100644 --- a/backend/src/baserow/throttling.py +++ b/backend/src/baserow/throttling.py @@ -135,7 +135,7 @@ def allow_request(self, request, view): ) if allowed: - django_request = getattr(request, "_request") + django_request = request._request setattr(django_request, BASEROW_CONCURRENCY_THROTTLE_REQUEST_ID, request_id) log_msg = "ALLOWING: as count={count} < limit={limit}" else: diff --git a/backend/tests/baserow/api/admin/groups/test_workspaces_admin_views.py b/backend/tests/baserow/api/admin/groups/test_workspaces_admin_views.py index 078125ce0c..f077b400a0 100644 --- a/backend/tests/baserow/api/admin/groups/test_workspaces_admin_views.py +++ b/backend/tests/baserow/api/admin/groups/test_workspaces_admin_views.py @@ -104,7 +104,7 @@ class as the list users endpoint which already has extensive tests. We only need } response = api_client.get( - f'{reverse("api:admin:workspaces:list")}?search={workspace_1.name}', + f"{reverse('api:admin:workspaces:list')}?search={workspace_1.name}", format="json", HTTP_AUTHORIZATION=f"JWT {staff_token}", ) @@ -136,7 +136,7 @@ class as the list users endpoint which already has extensive tests. We only need } response = api_client.get( - f'{reverse("api:admin:workspaces:list")}?sorts=-name', + f"{reverse('api:admin:workspaces:list')}?sorts=-name", format="json", HTTP_AUTHORIZATION=f"JWT {staff_token}", ) diff --git a/backend/tests/baserow/api/applications/test_application_views.py b/backend/tests/baserow/api/applications/test_application_views.py index 7a7c88d7c9..1aadf79f89 100644 --- a/backend/tests/baserow/api/applications/test_application_views.py +++ b/backend/tests/baserow/api/applications/test_application_views.py @@ -116,9 +116,9 @@ def test_list_applications(api_client, data_fixture, django_assert_num_queries): reverse("api:applications:list"), **{"HTTP_AUTHORIZATION": f"JWT {token}"} ) - assert ( - len(mock_filter_queryset.mock_calls) <= 2 + 4 - ), "Should trigger max 1 call by workspace + 1 by applications" + assert len(mock_filter_queryset.mock_calls) <= 2 + 4, ( + "Should trigger max 1 call by workspace + 1 by applications" + ) assert response.status_code == HTTP_200_OK response_json = response.json() diff --git a/backend/tests/baserow/api/test_api_utils.py b/backend/tests/baserow/api/test_api_utils.py index 27517738f9..8a98139a67 100644 --- a/backend/tests/baserow/api/test_api_utils.py +++ b/backend/tests/baserow/api/test_api_utils.py @@ -186,8 +186,7 @@ def test_map_exceptions_context_manager(): def test_map_exceptions_from_registry(): - class TestException(Exception): - ... + class TestException(Exception): ... test_error = ( "TEST_ERROR", diff --git a/backend/tests/baserow/api/user_sources/test_user_source_auth_views.py b/backend/tests/baserow/api/user_sources/test_user_source_auth_views.py index e3e3f7dc07..2daa7e45ee 100644 --- a/backend/tests/baserow/api/user_sources/test_user_source_auth_views.py +++ b/backend/tests/baserow/api/user_sources/test_user_source_auth_views.py @@ -533,9 +533,10 @@ def test_refresh_json_web_token(api_client, data_fixture, stub_user_source_regis assert "access_token" in response_json assert "refresh_token" not in response_json - with patch( - "baserow.api.user_sources.serializers.jwt_settings" - ) as mocked_settings, stub_user_source_registry(get_user_return=us_user): + with ( + patch("baserow.api.user_sources.serializers.jwt_settings") as mocked_settings, + stub_user_source_registry(get_user_return=us_user), + ): mocked_settings.ROTATE_REFRESH_TOKENS = True mocked_settings.BLACKLIST_AFTER_ROTATION = False mocked_settings.USER_ID_CLAIM = "user_id" @@ -554,9 +555,10 @@ def test_refresh_json_web_token(api_client, data_fixture, stub_user_source_regis assert BlacklistedToken.objects.count() == 0 token = us_user.get_refresh_token() - with patch( - "baserow.api.user_sources.serializers.jwt_settings" - ) as mocked_settings, stub_user_source_registry(get_user_return=us_user): + with ( + patch("baserow.api.user_sources.serializers.jwt_settings") as mocked_settings, + stub_user_source_registry(get_user_return=us_user), + ): mocked_settings.ROTATE_REFRESH_TOKENS = True mocked_settings.BLACKLIST_AFTER_ROTATION = True mocked_settings.USER_ID_CLAIM = "user_id" diff --git a/backend/tests/baserow/api/user_sources/test_user_source_authentication_backend.py b/backend/tests/baserow/api/user_sources/test_user_source_authentication_backend.py index 1d5f42468f..8f858f3d89 100644 --- a/backend/tests/baserow/api/user_sources/test_user_source_authentication_backend.py +++ b/backend/tests/baserow/api/user_sources/test_user_source_authentication_backend.py @@ -460,7 +460,8 @@ def test_user_source_authenticate_missing_user( def get_user_raise_user_not_found(*args, **kwargs): raise UserNotFound() - with pytest.raises(AuthenticationFailed), stub_user_source_registry( - get_user_return=get_user_raise_user_not_found + with ( + pytest.raises(AuthenticationFailed), + stub_user_source_registry(get_user_return=get_user_raise_user_not_found), ): auth.authenticate(fake_request) diff --git a/backend/tests/baserow/api/users/test_user_views.py b/backend/tests/baserow/api/users/test_user_views.py index a707256980..9690983107 100755 --- a/backend/tests/baserow/api/users/test_user_views.py +++ b/backend/tests/baserow/api/users/test_user_views.py @@ -177,7 +177,7 @@ def test_create_user(client, data_fixture): assert response_json["detail"]["language"][0]["code"] == "invalid_language" assert response_json["detail"]["language"][0]["error"] == ( "Only the following language keys are " - f"valid: {','.join([l[0] for l in settings.LANGUAGES])}" + f"valid: {','.join([lang[0] for lang in settings.LANGUAGES])}" ) # Test username with maximum length @@ -267,7 +267,7 @@ def test_user_account(data_fixture, api_client): assert response_json["detail"]["language"][0]["code"] == "invalid_language" assert response_json["detail"]["language"][0]["error"] == ( "Only the following language keys are " - f"valid: {','.join([l[0] for l in settings.LANGUAGES])}" + f"valid: {','.join([lang[0] for lang in settings.LANGUAGES])}" ) response = api_client.patch( diff --git a/backend/tests/baserow/contrib/builder/data_providers/test_data_provider_types.py b/backend/tests/baserow/contrib/builder/data_providers/test_data_provider_types.py index 7695a99f9d..71c4b69977 100644 --- a/backend/tests/baserow/contrib/builder/data_providers/test_data_provider_types.py +++ b/backend/tests/baserow/contrib/builder/data_providers/test_data_provider_types.py @@ -14,16 +14,14 @@ DataSourceContextDataProviderType, DataSourceDataProviderType, DataSourceHandler, -) -from baserow.contrib.builder.data_providers.data_provider_types import ( - ElementHandler as ElementHandlerToMock, -) -from baserow.contrib.builder.data_providers.data_provider_types import ( FormDataProviderType, PageParameterDataProviderType, PreviousActionProviderType, UserDataProviderType, ) +from baserow.contrib.builder.data_providers.data_provider_types import ( + ElementHandler as ElementHandlerToMock, +) from baserow.contrib.builder.data_sources.builder_dispatch_context import ( BuilderDispatchContext, ) diff --git a/backend/tests/baserow/contrib/builder/data_sources/test_data_source_service.py b/backend/tests/baserow/contrib/builder/data_sources/test_data_source_service.py index c17d4c2085..f898431ea8 100644 --- a/backend/tests/baserow/contrib/builder/data_sources/test_data_source_service.py +++ b/backend/tests/baserow/contrib/builder/data_sources/test_data_source_service.py @@ -160,8 +160,9 @@ def test_create_data_source_permission_denied(data_fixture, stub_check_permissio service_type = service_type_registry.get("local_baserow_get_row") - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): DataSourceService().create_data_source( user, @@ -193,8 +194,9 @@ def test_get_data_source_permission_denied(data_fixture, stub_check_permissions) user = data_fixture.create_user() data_source = data_fixture.create_builder_data_source(user=user) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): DataSourceService().get_data_source(user, data_source.id) @@ -259,8 +261,9 @@ def test_delete_data_source_permission_denied(data_fixture, stub_check_permissio user = data_fixture.create_user() data_source = data_fixture.create_builder_data_source(user=user) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): DataSourceService().delete_data_source(user, data_source) @@ -298,8 +301,9 @@ def test_update_data_source_permission_denied(data_fixture, stub_check_permissio user = data_fixture.create_user() data_source = data_fixture.create_builder_data_source(user=user) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): DataSourceService().update_data_source(user, data_source, value="newValue") @@ -366,8 +370,9 @@ def test_move_data_source_permission_denied(data_fixture, stub_check_permissions page=page ) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): DataSourceService().move_data_source(user, data_source3, before=data_source2) @@ -544,8 +549,9 @@ def test_dispatch_data_source_permission_denied(data_fixture, stub_check_permiss HttpRequest(), page, only_expose_public_allowed_properties=False ) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): DataSourceService().dispatch_data_source(user, data_source, dispatch_context) diff --git a/backend/tests/baserow/contrib/builder/domains/test_domain_service.py b/backend/tests/baserow/contrib/builder/domains/test_domain_service.py index b135244ba9..7802fe62a0 100644 --- a/backend/tests/baserow/contrib/builder/domains/test_domain_service.py +++ b/backend/tests/baserow/contrib/builder/domains/test_domain_service.py @@ -209,8 +209,9 @@ def test_get_published_builder_by_domain_name_unauthorized( builder=builder, published_to=builder_to ) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): DomainService().get_public_builder_by_domain_name(user, domain1.domain_name) @@ -235,8 +236,9 @@ def test_async_publish_domain_no_permission(data_fixture, stub_check_permissions user = data_fixture.create_user() domain1 = data_fixture.create_builder_custom_domain() - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): DomainService().async_publish(user, domain1) @@ -265,7 +267,8 @@ def test_publish_domain_unauthorized(data_fixture, stub_check_permissions): progress = Progress(100) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): DomainService().publish(user, domain1, progress) diff --git a/backend/tests/baserow/contrib/builder/elements/mixins/test_collection_element_type_mixin.py b/backend/tests/baserow/contrib/builder/elements/mixins/test_collection_element_type_mixin.py index 723b32d6dd..e025e344a1 100644 --- a/backend/tests/baserow/contrib/builder/elements/mixins/test_collection_element_type_mixin.py +++ b/backend/tests/baserow/contrib/builder/elements/mixins/test_collection_element_type_mixin.py @@ -1,6 +1,7 @@ """ Test the CollectionElementTypeMixin class. """ + import json from io import BytesIO from unittest.mock import MagicMock diff --git a/backend/tests/baserow/contrib/builder/elements/test_element_handler.py b/backend/tests/baserow/contrib/builder/elements/test_element_handler.py index 585a9be710..82a92eb385 100644 --- a/backend/tests/baserow/contrib/builder/elements/test_element_handler.py +++ b/backend/tests/baserow/contrib/builder/elements/test_element_handler.py @@ -127,7 +127,7 @@ def test_get_elements(data_fixture, django_assert_num_queries): elements = ElementHandler().get_elements(page) # Cache of specific elements is set. - assert getattr(page, "_page_elements_specific") == elements + assert page._page_elements_specific == elements assert [e.id for e in elements] == [ element1.id, @@ -142,24 +142,24 @@ def test_get_elements(data_fixture, django_assert_num_queries): # Cache of specific elements is re-used. with django_assert_num_queries(0): elements = ElementHandler().get_elements(page) - assert getattr(page, "_page_elements_specific") == elements + assert page._page_elements_specific == elements # We request non-specific records, the cache changes. with django_assert_num_queries(1): elements = list(ElementHandler().get_elements(page, specific=False)) - assert getattr(page, "_page_elements") == elements + assert page._page_elements == elements # We request non-specific records, the cache is reused. with django_assert_num_queries(0): elements = list(ElementHandler().get_elements(page, specific=False)) - assert getattr(page, "_page_elements") == elements + assert page._page_elements == elements # We pass in a base queryset, no caching strategy is available. base_queryset = Element.objects.filter(page=page, visibility="all") with django_assert_num_queries(3): ElementHandler().get_elements(page, base_queryset) - assert getattr(page, "_page_elements") is None - assert getattr(page, "_page_elements_specific") is None + assert page._page_elements is None + assert page._page_elements_specific is None @pytest.mark.django_db diff --git a/backend/tests/baserow/contrib/builder/elements/test_element_service.py b/backend/tests/baserow/contrib/builder/elements/test_element_service.py index f9f68e246c..ab72b563b0 100644 --- a/backend/tests/baserow/contrib/builder/elements/test_element_service.py +++ b/backend/tests/baserow/contrib/builder/elements/test_element_service.py @@ -149,8 +149,9 @@ def test_create_element_permission_denied(data_fixture, stub_check_permissions): element_type = element_type_registry.get("heading") - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): ElementService().create_element( user, @@ -181,8 +182,9 @@ def test_get_element_permission_denied(data_fixture, stub_check_permissions): user = data_fixture.create_user() element = data_fixture.create_builder_heading_element(user=user) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): ElementService().get_element(user, element.id) @@ -269,8 +271,9 @@ def test_delete_element_permission_denied(data_fixture, stub_check_permissions): user = data_fixture.create_user() element = data_fixture.create_builder_heading_element(user=user) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): ElementService().delete_element(user, element) @@ -294,8 +297,9 @@ def test_update_element_permission_denied(data_fixture, stub_check_permissions): user = data_fixture.create_user() element = data_fixture.create_builder_heading_element(user=user) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): ElementService().update_element(user, element, value="newValue") @@ -350,8 +354,9 @@ def test_move_element_permission_denied(data_fixture, stub_check_permissions): element2 = data_fixture.create_builder_heading_element(page=page) element3 = data_fixture.create_builder_text_element(page=page) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): ElementService().move_element( user, @@ -408,7 +413,8 @@ def test_duplicate_element_permission_denied(data_fixture, stub_check_permission user = data_fixture.create_user() element = data_fixture.create_builder_heading_element(user=user) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): ElementService().duplicate_element(user, element) diff --git a/backend/tests/baserow/contrib/builder/test_builder_application_type.py b/backend/tests/baserow/contrib/builder/test_builder_application_type.py index b8fa40fa4d..ad9bd4c585 100644 --- a/backend/tests/baserow/contrib/builder/test_builder_application_type.py +++ b/backend/tests/baserow/contrib/builder/test_builder_application_type.py @@ -1512,7 +1512,7 @@ def test_builder_application_imports_correct_default_roles(data_fixture): serialized_element = serialized_values["pages"][0]["elements"][0] serialized_element["role_type"] = "allow_all_except" serialized_element["roles"] = [ - f'__user_source_{serialized_user_source["id"]}', + f"__user_source_{serialized_user_source['id']}", ] # Save the single element back to the list. We only need one element diff --git a/backend/tests/baserow/contrib/builder/test_element_formula_mixin.py b/backend/tests/baserow/contrib/builder/test_element_formula_mixin.py index 75d17ba22e..5c63b8dd90 100644 --- a/backend/tests/baserow/contrib/builder/test_element_formula_mixin.py +++ b/backend/tests/baserow/contrib/builder/test_element_formula_mixin.py @@ -87,10 +87,9 @@ def test_element_formula_generator_mixin( in the test_link_element_formula_generator() test case. """ - simple_formula_fields = { - formula_field: formula_generator_fixture["formula_1"] - for formula_field in element_type.simple_formula_fields - } + simple_formula_fields = dict.fromkeys( + element_type.simple_formula_fields, formula_generator_fixture["formula_1"] + ) exported_element = data_fixture.create_builder_element( element_cls, **simple_formula_fields, diff --git a/backend/tests/baserow/contrib/builder/test_runtime_formula_results.py b/backend/tests/baserow/contrib/builder/test_runtime_formula_results.py index 81784aa016..870db3fa1e 100644 --- a/backend/tests/baserow/contrib/builder/test_runtime_formula_results.py +++ b/backend/tests/baserow/contrib/builder/test_runtime_formula_results.py @@ -335,7 +335,7 @@ def test_runtime_formula_if(data_fixture): formula, formula_runtime_function_registry, dispatch_context ) assert result == expected, ( - f"{value} expected to resolve to {expected} " f"but got {result}" + f"{value} expected to resolve to {expected} but got {result}" ) @@ -362,7 +362,7 @@ def test_runtime_formula_get_property(data_fixture): ) expected = "Dark Red" assert result == expected, ( - f"{value} expected to resolve to {expected} " f"but got {result}" + f"{value} expected to resolve to {expected} but got {result}" ) @@ -388,7 +388,7 @@ def test_runtime_formula_datetime_format(data_fixture): ) expected = "12-Nov-2025 21:22:23" assert result == expected, ( - f"{value} expected to resolve to {expected} " f"but got {result}" + f"{value} expected to resolve to {expected} but got {result}" ) @@ -421,7 +421,7 @@ def test_runtime_formula_comparison_operator(data_fixture): formula, formula_runtime_function_registry, dispatch_context ) assert result == expected, ( - f"{value} expected to resolve to {expected} " f"but got {result}" + f"{value} expected to resolve to {expected} but got {result}" ) @@ -454,7 +454,7 @@ def test_runtime_formula_comparison(data_fixture): formula, formula_runtime_function_registry, dispatch_context ) assert result == expected, ( - f"{value} expected to resolve to {expected} " f"but got {result}" + f"{value} expected to resolve to {expected} but got {result}" ) @@ -484,7 +484,7 @@ def test_runtime_formula_boolean(data_fixture): formula, formula_runtime_function_registry, dispatch_context ) assert result == expected, ( - f"{value} expected to resolve to {expected} " f"but got {result}" + f"{value} expected to resolve to {expected} but got {result}" ) @@ -512,7 +512,7 @@ def test_runtime_formula_date(data_fixture): formula, formula_runtime_function_registry, dispatch_context ) assert result == expected, ( - f"{value} expected to resolve to {expected} " f"but got {result}" + f"{value} expected to resolve to {expected} but got {result}" ) @@ -541,7 +541,7 @@ def test_runtime_formula_arithmetic(data_fixture): formula, formula_runtime_function_registry, dispatch_context ) assert result == expected, ( - f"{value} expected to resolve to {expected} " f"but got {result}" + f"{value} expected to resolve to {expected} but got {result}" ) diff --git a/backend/tests/baserow/contrib/database/airtable/test_airtable_utils.py b/backend/tests/baserow/contrib/database/airtable/test_airtable_utils.py index 442f5f4527..994e90d969 100644 --- a/backend/tests/baserow/contrib/database/airtable/test_airtable_utils.py +++ b/backend/tests/baserow/contrib/database/airtable/test_airtable_utils.py @@ -28,8 +28,7 @@ def test_extract_share_id_from_url(): ) long_share_id = ( - "shr22aXe5Hj32sPJB/tblU0bav59SSEyOkU/" - "viwyUDJYyQPYuFj1F?blocks=bipEYER8Qq7fLoPbr" + "shr22aXe5Hj32sPJB/tblU0bav59SSEyOkU/viwyUDJYyQPYuFj1F?blocks=bipEYER8Qq7fLoPbr" ) assert ( extract_share_id_from_url(f"https://airtable.com/{long_share_id}") diff --git a/backend/tests/baserow/contrib/database/api/airtable/test_airtable_views.py b/backend/tests/baserow/contrib/database/api/airtable/test_airtable_views.py index 13c472a790..5aba93e4e1 100644 --- a/backend/tests/baserow/contrib/database/api/airtable/test_airtable_views.py +++ b/backend/tests/baserow/contrib/database/api/airtable/test_airtable_views.py @@ -168,8 +168,7 @@ def test_create_airtable_import_job_long_share_id( user, token = data_fixture.create_user_and_token() workspace = data_fixture.create_workspace(user=user) long_share_id = ( - "shr22aXe5Hj32sPJB/tblU0bav59SSEyOkU/" - "viwyUDJYyQPYuFj1F?blocks=bipEYER8Qq7fLoPbr" + "shr22aXe5Hj32sPJB/tblU0bav59SSEyOkU/viwyUDJYyQPYuFj1F?blocks=bipEYER8Qq7fLoPbr" ) with freeze_time("2025-01-01 12:00:00"): @@ -219,8 +218,7 @@ def test_create_airtable_import_job_skip_files( user, token = data_fixture.create_user_and_token() workspace = data_fixture.create_workspace(user=user) long_share_id = ( - "shr22aXe5Hj32sPJB/tblU0bav59SSEyOkU/" - "viwyUDJYyQPYuFj1F?blocks=bipEYER8Qq7fLoPbr" + "shr22aXe5Hj32sPJB/tblU0bav59SSEyOkU/viwyUDJYyQPYuFj1F?blocks=bipEYER8Qq7fLoPbr" ) response = api_client.post( @@ -257,8 +255,7 @@ def test_create_airtable_import_job_with_session( user, token = data_fixture.create_user_and_token() workspace = data_fixture.create_workspace(user=user) long_share_id = ( - "shr22aXe5Hj32sPJB/tblU0bav59SSEyOkU/" - "viwyUDJYyQPYuFj1F?blocks=bipEYER8Qq7fLoPbr" + "shr22aXe5Hj32sPJB/tblU0bav59SSEyOkU/viwyUDJYyQPYuFj1F?blocks=bipEYER8Qq7fLoPbr" ) response = api_client.post( diff --git a/backend/tests/baserow/contrib/database/api/rows/test_batch_rows_views.py b/backend/tests/baserow/contrib/database/api/rows/test_batch_rows_views.py index 1616a0d39d..6153bb26f6 100644 --- a/backend/tests/baserow/contrib/database/api/rows/test_batch_rows_views.py +++ b/backend/tests/baserow/contrib/database/api/rows/test_batch_rows_views.py @@ -797,11 +797,11 @@ def test_batch_create_rows_dependent_fields(api_client, data_fixture): "items": [ { f"id": 1, - f"field_{formula_field.id}": f"{str(120*2)}", + f"field_{formula_field.id}": f"{str(120 * 2)}", }, { f"id": 2, - f"field_{formula_field.id}": f"{str(240*2)}", + f"field_{formula_field.id}": f"{str(240 * 2)}", }, ] } @@ -1978,11 +1978,11 @@ def test_batch_update_rows_dependent_fields(api_client, data_fixture): "items": [ { f"id": row_1.id, - f"field_{formula_field.id}": f"{str(120*2)}", + f"field_{formula_field.id}": f"{str(120 * 2)}", }, { f"id": row_2.id, - f"field_{formula_field.id}": f"{str(240*2)}", + f"field_{formula_field.id}": f"{str(240 * 2)}", }, ] } @@ -2420,9 +2420,9 @@ def test_batch_delete_rows_trash_them(api_client, data_fixture): row_1.refresh_from_db() row_2.refresh_from_db() row_3.refresh_from_db() - assert getattr(row_1, "trashed") is True - assert getattr(row_2, "trashed") is True - assert getattr(row_3, "trashed") is False + assert row_1.trashed is True + assert row_2.trashed is True + assert row_3.trashed is False @pytest.mark.django_db diff --git a/backend/tests/baserow/contrib/database/api/rows/test_row_views.py b/backend/tests/baserow/contrib/database/api/rows/test_row_views.py index b87c5f865d..8db5b551d8 100644 --- a/backend/tests/baserow/contrib/database/api/rows/test_row_views.py +++ b/backend/tests/baserow/contrib/database/api/rows/test_row_views.py @@ -289,7 +289,7 @@ def test_list_rows(api_client, data_fixture): url = reverse("api:database:rows:list", kwargs={"table_id": table.id}) get_params = [f"filter__field_9999999__contains=last"] response = api_client.get( - f'{url}?{"&".join(get_params)}', + f"{url}?{'&'.join(get_params)}", format="json", HTTP_AUTHORIZATION=f"JWT {jwt_token}", ) @@ -300,7 +300,7 @@ def test_list_rows(api_client, data_fixture): url = reverse("api:database:rows:list", kwargs={"table_id": table.id}) get_params = [f"filter__field_{field_4.id}__contains=100"] response = api_client.get( - f'{url}?{"&".join(get_params)}', + f"{url}?{'&'.join(get_params)}", format="json", HTTP_AUTHORIZATION=f"JWT {jwt_token}", ) @@ -311,7 +311,7 @@ def test_list_rows(api_client, data_fixture): url = reverse("api:database:rows:list", kwargs={"table_id": table.id}) get_params = [f"filter__field_{field_2.id}__INVALID=100"] response = api_client.get( - f'{url}?{"&".join(get_params)}', + f"{url}?{'&'.join(get_params)}", format="json", HTTP_AUTHORIZATION=f"JWT {jwt_token}", ) @@ -326,7 +326,7 @@ def test_list_rows(api_client, data_fixture): f"filter__field_{field_2.id}__equal=200", ] response = api_client.get( - f'{url}?{"&".join(get_params)}', + f"{url}?{'&'.join(get_params)}", format="json", HTTP_AUTHORIZATION=f"JWT {jwt_token}", ) @@ -343,7 +343,7 @@ def test_list_rows(api_client, data_fixture): "filter_type=or", ] response = api_client.get( - f'{url}?{"&".join(get_params)}', + f"{url}?{'&'.join(get_params)}", format="json", HTTP_AUTHORIZATION=f"JWT {jwt_token}", ) @@ -361,7 +361,7 @@ def test_list_rows(api_client, data_fixture): "filter_type=or", ] response = api_client.get( - f'{url}?{"&".join(get_params)}', + f"{url}?{'&'.join(get_params)}", format="json", HTTP_AUTHORIZATION=f"JWT {jwt_token}", ) @@ -503,7 +503,7 @@ def test_list_rows_adhoc_filtering_query_param_null_character(api_client, data_f url = reverse("api:database:rows:list", kwargs={"table_id": table.id}) get_params = [f"filter__field_{text_field.id}__contains={str_with_null_character}"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -543,7 +543,7 @@ def test_list_rows_user_field_names(api_client, data_fixture): "filter_type=or", ] response = api_client.get( - f'{url}?user_field_names=true&{"&".join(get_params)}', + f"{url}?user_field_names=true&{'&'.join(get_params)}", format="json", HTTP_AUTHORIZATION=f"JWT {jwt_token}", ) @@ -562,7 +562,7 @@ def test_list_rows_user_field_names(api_client, data_fixture): "filter_type=or", ] response = api_client.get( - f'{url}?user_field_names=true&{"&".join(get_params)}', + f"{url}?user_field_names=true&{'&'.join(get_params)}", format="json", HTTP_AUTHORIZATION=f"JWT {jwt_token}", ) @@ -580,7 +580,7 @@ def test_list_rows_user_field_names(api_client, data_fixture): "filter_type=or", ] response = api_client.get( - f'{url}?user_field_names=true&{"&".join(get_params)}', + f"{url}?user_field_names=true&{'&'.join(get_params)}", format="json", HTTP_AUTHORIZATION=f"JWT {jwt_token}", ) @@ -597,7 +597,7 @@ def test_list_rows_user_field_names(api_client, data_fixture): "filter_type=or", ] response = api_client.get( - f'{url}?user_field_names=true&{"&".join(get_params)}', + f"{url}?user_field_names=true&{'&'.join(get_params)}", format="json", HTTP_AUTHORIZATION=f"JWT {jwt_token}", ) @@ -711,7 +711,7 @@ def test_list_rows_filter_filters_query_param(data_fixture, api_client): url = reverse("api:database:rows:list", kwargs={"table_id": table.id}) response = api_client.get( - f'{url}?{"&".join(get_params)}', + f"{url}?{'&'.join(get_params)}", format="json", HTTP_AUTHORIZATION=f"JWT {jwt_token}", ) @@ -764,7 +764,7 @@ def test_list_rows_filter_filters_query_param_with_user_field_names( url = reverse("api:database:rows:list", kwargs={"table_id": table.id}) response = api_client.get( - f'{url}?{"&".join(get_params)}', + f"{url}?{'&'.join(get_params)}", format="json", HTTP_AUTHORIZATION=f"JWT {jwt_token}", ) diff --git a/backend/tests/baserow/contrib/database/api/tables/test_table_views.py b/backend/tests/baserow/contrib/database/api/tables/test_table_views.py index 450f77046f..2df9313d56 100644 --- a/backend/tests/baserow/contrib/database/api/tables/test_table_views.py +++ b/backend/tests/baserow/contrib/database/api/tables/test_table_views.py @@ -489,9 +489,10 @@ def test_create_table_with_data_sync(api_client, data_fixture, patch_filefield_s database = data_fixture.create_database_application(user=user) url = reverse("api:database:tables:list", kwargs={"database_id": database.id}) - with override_settings( - BASEROW_INITIAL_CREATE_SYNC_TABLE_DATA_LIMIT=2 - ), patch_filefield_storage(): + with ( + override_settings(BASEROW_INITIAL_CREATE_SYNC_TABLE_DATA_LIMIT=2), + patch_filefield_storage(), + ): response = api_client.post( url, { diff --git a/backend/tests/baserow/contrib/database/api/views/form/test_form_view_views.py b/backend/tests/baserow/contrib/database/api/views/form/test_form_view_views.py index c92f84795e..ed35fc4365 100644 --- a/backend/tests/baserow/contrib/database/api/views/form/test_form_view_views.py +++ b/backend/tests/baserow/contrib/database/api/views/form/test_form_view_views.py @@ -613,7 +613,7 @@ def test_submit_form_view_with_allowed_select_options_override_single_select( f"field_{single_select_field.id}": option_1.id, }, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_200_OK @@ -624,7 +624,7 @@ def test_submit_form_view_with_allowed_select_options_override_single_select( f"field_{single_select_field.id}": option_2.id, }, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) response_json = response.json() assert response_json == { @@ -683,7 +683,7 @@ def test_submit_form_view_with_allowed_select_options_override_multiple_select( f"field_{multiple_select_field.id}": [option_1.id], }, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_200_OK @@ -694,7 +694,7 @@ def test_submit_form_view_with_allowed_select_options_override_multiple_select( f"field_{multiple_select_field.id}": [option_2.id], }, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) response_json = response.json() assert response_json == { @@ -838,7 +838,7 @@ def test_submit_form_view(api_client, data_fixture): url = reverse("api:database:views:form:submit", kwargs={"slug": form.slug}) response = api_client.post( - url, {}, format="json", HTTP_AUTHORIZATION=f"JWT" f" {token_2}" + url, {}, format="json", HTTP_AUTHORIZATION=f"JWT {token_2}" ) assert response.status_code == HTTP_404_NOT_FOUND assert response.json()["error"] == "ERROR_FORM_DOES_NOT_EXIST" @@ -852,7 +852,7 @@ def test_submit_form_view(api_client, data_fixture): f"field_{number_field.id}": {}, }, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_400_BAD_REQUEST response_json = response.json() @@ -869,7 +869,7 @@ def test_submit_form_view(api_client, data_fixture): f"field_{number_field.id}": 0, }, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_200_OK response_json = response.json() @@ -996,7 +996,7 @@ def test_submit_form_view_skip_required_with_conditions(api_client, data_fixture f"field_{text_field.id}": "test", }, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_400_BAD_REQUEST response_json = response.json() @@ -1012,7 +1012,7 @@ def test_submit_form_view_skip_required_with_conditions(api_client, data_fixture f"field_{text_field.id}": "test", }, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_400_BAD_REQUEST response_json = response.json() @@ -1031,7 +1031,7 @@ def test_submit_form_view_skip_required_with_conditions(api_client, data_fixture f"field_{text_field.id}": "test", }, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_200_OK @@ -1104,7 +1104,7 @@ def test_form_view_link_row_lookup_view(api_client, data_fixture): response = api_client.get( url, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token_2}", + HTTP_AUTHORIZATION=f"JWT {token_2}", ) assert response.status_code == HTTP_404_NOT_FOUND assert response.json()["error"] == "ERROR_VIEW_DOES_NOT_EXIST" @@ -1117,7 +1117,7 @@ def test_form_view_link_row_lookup_view(api_client, data_fixture): response = api_client.get( url, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_404_NOT_FOUND assert response.json()["error"] == "ERROR_FIELD_DOES_NOT_EXIST" @@ -1130,7 +1130,7 @@ def test_form_view_link_row_lookup_view(api_client, data_fixture): response = api_client.get( url, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_404_NOT_FOUND assert response.json()["error"] == "ERROR_FIELD_DOES_NOT_EXIST" @@ -1143,7 +1143,7 @@ def test_form_view_link_row_lookup_view(api_client, data_fixture): response = api_client.get( url, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_404_NOT_FOUND assert response.json()["error"] == "ERROR_FIELD_DOES_NOT_EXIST" @@ -1156,7 +1156,7 @@ def test_form_view_link_row_lookup_view(api_client, data_fixture): response = api_client.get( url, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_404_NOT_FOUND assert response.json()["error"] == "ERROR_FIELD_DOES_NOT_EXIST" @@ -2977,7 +2977,7 @@ def test_submit_form_view_for_required_number_field_with_0(api_client, data_fixt f"field_{number_field.id}": "0", }, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_200_OK, ( "Got an error response " diff --git a/backend/tests/baserow/contrib/database/api/views/gallery/test_gallery_view_views.py b/backend/tests/baserow/contrib/database/api/views/gallery/test_gallery_view_views.py index a44dd8cad1..66b54f0cbc 100644 --- a/backend/tests/baserow/contrib/database/api/views/gallery/test_gallery_view_views.py +++ b/backend/tests/baserow/contrib/database/api/views/gallery/test_gallery_view_views.py @@ -149,7 +149,7 @@ def test_list_gallery_rows_adhoc_filtering_query_param_filter(api_client, data_f ) get_params = [f"filter__field_{text_field.id}__contains=a"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -165,7 +165,7 @@ def test_list_gallery_rows_adhoc_filtering_query_param_filter(api_client, data_f f"filter_type=OR", ] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -176,7 +176,7 @@ def test_list_gallery_rows_adhoc_filtering_query_param_filter(api_client, data_f ) get_params = [f"filter__field_{text_field_hidden.id}__contains=y"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -187,7 +187,7 @@ def test_list_gallery_rows_adhoc_filtering_query_param_filter(api_client, data_f ) get_params = [f"filter__field_{text_field.id}__random=y"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -198,7 +198,7 @@ def test_list_gallery_rows_adhoc_filtering_query_param_filter(api_client, data_f ) get_params = [f"filter__field_{text_field.id}__higher_than=1"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -272,7 +272,7 @@ def test_list_gallery_rows_adhoc_filtering_invalid_advanced_filters( for filters, error_detail in expected_errors: get_params = [f"filters={filters}"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -323,7 +323,7 @@ def test_list_gallery_rows_adhoc_filtering_advanced_filters_are_preferred_to_oth f"filter_type=AND", ] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -374,7 +374,7 @@ def test_list_gallery_rows_adhoc_filtering_overrides_existing_filters( "filters=" + json.dumps(advanced_filters), ] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -420,7 +420,7 @@ def test_list_gallery_rows_adhoc_filtering_advanced_filters(api_client, data_fix } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -449,7 +449,7 @@ def test_list_gallery_rows_adhoc_filtering_advanced_filters(api_client, data_fix } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -490,7 +490,7 @@ def test_list_gallery_rows_adhoc_filtering_advanced_filters(api_client, data_fix } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -508,7 +508,7 @@ def test_list_gallery_rows_adhoc_filtering_advanced_filters(api_client, data_fix } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -526,7 +526,7 @@ def test_list_gallery_rows_adhoc_filtering_advanced_filters(api_client, data_fix } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -544,7 +544,7 @@ def test_list_gallery_rows_adhoc_filtering_advanced_filters(api_client, data_fix } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -557,7 +557,7 @@ def test_list_gallery_rows_adhoc_filtering_advanced_filters(api_client, data_fix ]: get_params = [f"filters={filters}"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -922,7 +922,7 @@ def test_list_rows_public_with_query_param_filter(api_client, data_fixture): "api:database:views:gallery:public_rows", kwargs={"slug": gallery_view.slug} ) get_params = [f"filter__field_{public_field.id}__contains=a"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert len(response_json["results"]) == 1 @@ -936,7 +936,7 @@ def test_list_rows_public_with_query_param_filter(api_client, data_fixture): f"filter__field_{public_field.id}__contains=b", f"filter_type=OR", ] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert len(response_json["results"]) == 2 @@ -945,7 +945,7 @@ def test_list_rows_public_with_query_param_filter(api_client, data_fixture): "api:database:views:gallery:public_rows", kwargs={"slug": gallery_view.slug} ) get_params = [f"filter__field_{hidden_field.id}__contains=y"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_FILTER_FIELD_NOT_FOUND" @@ -954,7 +954,7 @@ def test_list_rows_public_with_query_param_filter(api_client, data_fixture): "api:database:views:gallery:public_rows", kwargs={"slug": gallery_view.slug} ) get_params = [f"filter__field_{public_field.id}__random=y"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST" @@ -963,7 +963,7 @@ def test_list_rows_public_with_query_param_filter(api_client, data_fixture): "api:database:views:gallery:public_rows", kwargs={"slug": gallery_view.slug} ) get_params = [f"filter__field_{public_field.id}__higher_than=1"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD" @@ -1116,7 +1116,7 @@ def test_list_rows_public_with_query_param_advanced_filters(api_client, data_fix ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert len(response_json["results"]) == 1 @@ -1143,7 +1143,7 @@ def test_list_rows_public_with_query_param_advanced_filters(api_client, data_fix ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert len(response_json["results"]) == 2 @@ -1182,7 +1182,7 @@ def test_list_rows_public_with_query_param_advanced_filters(api_client, data_fix ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert len(response_json["results"]) == 2 @@ -1198,7 +1198,7 @@ def test_list_rows_public_with_query_param_advanced_filters(api_client, data_fix ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_FILTER_FIELD_NOT_FOUND" @@ -1214,7 +1214,7 @@ def test_list_rows_public_with_query_param_advanced_filters(api_client, data_fix ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST" @@ -1230,7 +1230,7 @@ def test_list_rows_public_with_query_param_advanced_filters(api_client, data_fix ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD" @@ -1241,7 +1241,7 @@ def test_list_rows_public_with_query_param_advanced_filters(api_client, data_fix json.dumps({"filter_type": "OR", "filters": "invalid"}), ]: get_params = [f"filters={filters}"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_FILTERS_PARAM_VALIDATION_ERROR" diff --git a/backend/tests/baserow/contrib/database/api/views/grid/test_grid_view_views.py b/backend/tests/baserow/contrib/database/api/views/grid/test_grid_view_views.py index 338f633ccb..84bfa4442b 100644 --- a/backend/tests/baserow/contrib/database/api/views/grid/test_grid_view_views.py +++ b/backend/tests/baserow/contrib/database/api/views/grid/test_grid_view_views.py @@ -1290,7 +1290,7 @@ def test_view_aggregations_adhoc_filtering_overrides_existing_filters( } get_params = [f"filters={json.dumps(advanced_filters)}"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) assert response.status_code == HTTP_200_OK response_json = response.json() @@ -1350,7 +1350,7 @@ def test_view_aggregations_adhoc_filtering_advanced_filters_are_preferred_to_oth f"filter_type=AND", ] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) assert response.status_code == HTTP_200_OK response_json = response.json() @@ -1433,7 +1433,7 @@ def test_view_aggregations_adhoc_filtering_invalid_advanced_filters( for filters, error_detail in expected_errors: get_params = [f"filters={filters}"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -2649,7 +2649,7 @@ def test_public_view_aggregations_adhoc_filtering_combineswith_existing_filters( } get_params = [f"filters={json.dumps(advanced_filters)}"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") assert response.status_code == HTTP_200_OK, response.json() response_json = response.json() assert response_json == {text_field_for_view_filter.db_column: 1} @@ -2707,7 +2707,7 @@ def test_public_view_aggregations_adhoc_filtering_advanced_filters_are_preferred f"filter__field_{text_field.id}__equal=z", f"filter_type=AND", ] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") assert response.status_code == HTTP_200_OK response_json = response.json() assert response_json == {text_field.db_column: 2} @@ -2787,7 +2787,7 @@ def test_public_view_aggregations_adhoc_filtering_invalid_advanced_filters( for filters, error_detail in expected_errors: get_params = [f"filters={filters}"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_FILTERS_PARAM_VALIDATION_ERROR" @@ -3396,7 +3396,7 @@ def test_list_rows_public_with_query_param_filter(api_client, data_fixture): "api:database:views:grid:public_rows", kwargs={"slug": grid_view.slug} ) get_params = [f"filter__field_{public_field.id}__contains=a"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert len(response_json["results"]) == 1 @@ -3410,7 +3410,7 @@ def test_list_rows_public_with_query_param_filter(api_client, data_fixture): f"filter__field_{public_field.id}__contains=b", f"filter_type=OR", ] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert len(response_json["results"]) == 2 @@ -3419,7 +3419,7 @@ def test_list_rows_public_with_query_param_filter(api_client, data_fixture): "api:database:views:grid:public_rows", kwargs={"slug": grid_view.slug} ) get_params = [f"filter__field_{hidden_field.id}__contains=y"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_FILTER_FIELD_NOT_FOUND" @@ -3428,7 +3428,7 @@ def test_list_rows_public_with_query_param_filter(api_client, data_fixture): "api:database:views:grid:public_rows", kwargs={"slug": grid_view.slug} ) get_params = [f"filter__field_{public_field.id}__random=y"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST" @@ -3437,7 +3437,7 @@ def test_list_rows_public_with_query_param_filter(api_client, data_fixture): "api:database:views:grid:public_rows", kwargs={"slug": grid_view.slug} ) get_params = [f"filter__field_{public_field.id}__higher_than=1"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD" @@ -3507,7 +3507,7 @@ def test_list_rows_public_with_invalid_advanced_filters(api_client, data_fixture for filters, error_detail in expected_errors: get_params = [f"filters={filters}"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_FILTERS_PARAM_VALIDATION_ERROR" @@ -3547,7 +3547,7 @@ def test_list_rows_public_with_query_param_advanced_filters(api_client, data_fix ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert len(response_json["results"]) == 1 @@ -3574,7 +3574,7 @@ def test_list_rows_public_with_query_param_advanced_filters(api_client, data_fix ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert len(response_json["results"]) == 2 @@ -3613,7 +3613,7 @@ def test_list_rows_public_with_query_param_advanced_filters(api_client, data_fix ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert len(response_json["results"]) == 2 @@ -3629,7 +3629,7 @@ def test_list_rows_public_with_query_param_advanced_filters(api_client, data_fix ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_FILTER_FIELD_NOT_FOUND" @@ -3645,7 +3645,7 @@ def test_list_rows_public_with_query_param_advanced_filters(api_client, data_fix ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST" @@ -3661,7 +3661,7 @@ def test_list_rows_public_with_query_param_advanced_filters(api_client, data_fix ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD" @@ -3672,7 +3672,7 @@ def test_list_rows_public_with_query_param_advanced_filters(api_client, data_fix json.dumps({"filter_type": "OR", "filters": "invalid"}), ]: get_params = [f"filters={filters}"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_FILTERS_PARAM_VALIDATION_ERROR" @@ -4171,7 +4171,7 @@ def test_grid_view_link_row_lookup_view(api_client, data_fixture): response = api_client.get( url, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token_2}", + HTTP_AUTHORIZATION=f"JWT {token_2}", ) assert response.status_code == HTTP_404_NOT_FOUND assert response.json()["error"] == "ERROR_VIEW_DOES_NOT_EXIST" @@ -4184,7 +4184,7 @@ def test_grid_view_link_row_lookup_view(api_client, data_fixture): response = api_client.get( url, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_404_NOT_FOUND assert response.json()["error"] == "ERROR_FIELD_DOES_NOT_EXIST" @@ -4197,7 +4197,7 @@ def test_grid_view_link_row_lookup_view(api_client, data_fixture): response = api_client.get( url, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_404_NOT_FOUND assert response.json()["error"] == "ERROR_FIELD_DOES_NOT_EXIST" @@ -4210,7 +4210,7 @@ def test_grid_view_link_row_lookup_view(api_client, data_fixture): response = api_client.get( url, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_404_NOT_FOUND assert response.json()["error"] == "ERROR_FIELD_DOES_NOT_EXIST" @@ -4223,7 +4223,7 @@ def test_grid_view_link_row_lookup_view(api_client, data_fixture): response = api_client.get( url, format="json", - HTTP_AUTHORIZATION=f"JWT" f" {token}", + HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_404_NOT_FOUND assert response.json()["error"] == "ERROR_FIELD_DOES_NOT_EXIST" @@ -4531,7 +4531,7 @@ def test_list_rows_public_advanced_filters_are_preferred_to_other_filter_query_p f"filter__field_{public_field.id}__equal=z", f"filter_type=AND", ] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert len(response_json["results"]) == 2 @@ -4562,7 +4562,7 @@ def test_list_grid_rows_adhoc_filtering_query_param_filter(api_client, data_fixt url = reverse("api:database:views:grid:list", kwargs={"view_id": grid_view.id}) get_params = [f"filter__field_{text_field.id}__contains=a"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -4576,7 +4576,7 @@ def test_list_grid_rows_adhoc_filtering_query_param_filter(api_client, data_fixt f"filter_type=OR", ] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -4585,7 +4585,7 @@ def test_list_grid_rows_adhoc_filtering_query_param_filter(api_client, data_fixt url = reverse("api:database:views:grid:list", kwargs={"view_id": grid_view.id}) get_params = [f"filter__field_{text_field_hidden.id}__contains=y"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -4594,7 +4594,7 @@ def test_list_grid_rows_adhoc_filtering_query_param_filter(api_client, data_fixt url = reverse("api:database:views:grid:list", kwargs={"view_id": grid_view.id}) get_params = [f"filter__field_{text_field.id}__random=y"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -4603,7 +4603,7 @@ def test_list_grid_rows_adhoc_filtering_query_param_filter(api_client, data_fixt url = reverse("api:database:views:grid:list", kwargs={"view_id": grid_view.id}) get_params = [f"filter__field_{text_field.id}__higher_than=1"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -4629,7 +4629,7 @@ def test_list_grid_rows_adhoc_filtering_query_param_null_character( url = reverse("api:database:views:grid:list", kwargs={"view_id": grid_view.id}) get_params = [f"filter__field_{text_field.id}__contains={str_with_null_character}"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -4700,7 +4700,7 @@ def test_list_grid_rows_adhoc_filtering_invalid_advanced_filters( for filters, error_detail in expected_errors: get_params = [f"filters={filters}"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -4749,7 +4749,7 @@ def test_list_grid_rows_adhoc_filtering_advanced_filters_are_preferred_to_other_ f"filter_type=AND", ] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -4798,7 +4798,7 @@ def test_list_grid_rows_adhoc_filtering_overrides_existing_filters( "filters=" + json.dumps(advanced_filters), ] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -4838,7 +4838,7 @@ def test_list_grid_rows_adhoc_filtering_advanced_filters(api_client, data_fixtur } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -4867,7 +4867,7 @@ def test_list_grid_rows_adhoc_filtering_advanced_filters(api_client, data_fixtur } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -4908,7 +4908,7 @@ def test_list_grid_rows_adhoc_filtering_advanced_filters(api_client, data_fixtur } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -4926,7 +4926,7 @@ def test_list_grid_rows_adhoc_filtering_advanced_filters(api_client, data_fixtur } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -4944,7 +4944,7 @@ def test_list_grid_rows_adhoc_filtering_advanced_filters(api_client, data_fixtur } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -4962,7 +4962,7 @@ def test_list_grid_rows_adhoc_filtering_advanced_filters(api_client, data_fixtur } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -4975,7 +4975,7 @@ def test_list_grid_rows_adhoc_filtering_advanced_filters(api_client, data_fixtur ]: get_params = [f"filters={filters}"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST diff --git a/backend/tests/baserow/contrib/database/field/test_field_actions.py b/backend/tests/baserow/contrib/database/field/test_field_actions.py index 345bc11138..c364979225 100644 --- a/backend/tests/baserow/contrib/database/field/test_field_actions.py +++ b/backend/tests/baserow/contrib/database/field/test_field_actions.py @@ -1286,9 +1286,10 @@ def test_can_undo_updating_field_every_type(data_fixture, django_assert_num_quer actions = ActionHandler.undo( user, [UpdateFieldActionType.scope(field.table_id)], session_id ) - assert_undo_redo_actions_are_valid( - actions, [UpdateFieldActionType] - ), f"Failed for {before_serialized}" + ( + assert_undo_redo_actions_are_valid(actions, [UpdateFieldActionType]), + f"Failed for {before_serialized}", + ) new_field = FieldHandler().get_specific_field_for_update(field.id) after_serialized = field_type.export_serialized(new_field) assert before_serialized == after_serialized diff --git a/backend/tests/baserow/contrib/database/field/test_field_handler.py b/backend/tests/baserow/contrib/database/field/test_field_handler.py index 4995cc5d44..86f39481b0 100644 --- a/backend/tests/baserow/contrib/database/field/test_field_handler.py +++ b/backend/tests/baserow/contrib/database/field/test_field_handler.py @@ -9,7 +9,6 @@ from django.test.utils import CaptureQueriesContext import pytest -from baserow_premium.fields.field_types import AIFieldType from faker import Faker from baserow.contrib.database.fields.constants import ( @@ -91,6 +90,7 @@ from baserow.core.registries import ImportExportConfig from baserow.core.trash.handler import TrashHandler from baserow.test_utils.helpers import setup_interesting_test_table +from baserow_premium.fields.field_types import AIFieldType @pytest.fixture(autouse=True) @@ -142,8 +142,7 @@ def test_field_conversion( nonlocal i print( - f"Converting num {i} from {from_field_type_name} to" - f" {to_field_type_name}", + f"Converting num {i} from {from_field_type_name} to {to_field_type_name}", flush=True, ) @@ -572,7 +571,7 @@ def test_create_field(send_mock, data_fixture): name=field_name_with_ok_length, text_default="Some default", ) - assert getattr(field_with_max_length_name, "name") == field_name_with_ok_length + assert field_with_max_length_name.name == field_name_with_ok_length @pytest.mark.django_db @@ -738,7 +737,7 @@ def test_update_field(send_mock, data_fixture): field=field_3, name=field_name_with_ok_length, ) - assert getattr(field_with_max_length_name, "name") == field_name_with_ok_length + assert field_with_max_length_name.name == field_name_with_ok_length @pytest.mark.django_db @@ -2022,9 +2021,9 @@ def test_field_constraints_unique_with_empty(data_fixture): fields_to_test.extend(constraint.get_compatible_field_types()) missing_fields = set(fields_to_test) - set(fields.keys()) - assert set(fields_to_test) == set( - fields.keys() - ), f"Fields that should be tested are missing: {missing_fields}" + assert set(fields_to_test) == set(fields.keys()), ( + f"Fields that should be tested are missing: {missing_fields}" + ) for field_type, field_data in fields.items(): with pytest.raises(Exception) as exc_info: diff --git a/backend/tests/baserow/contrib/database/field/test_field_tasks.py b/backend/tests/baserow/contrib/database/field/test_field_tasks.py index e2b07d4eac..6a368394e5 100644 --- a/backend/tests/baserow/contrib/database/field/test_field_tasks.py +++ b/backend/tests/baserow/contrib/database/field/test_field_tasks.py @@ -73,9 +73,12 @@ def test_run_periodic_fields_updates_if_necessary(data_fixture, settings): workspace_3.save() # less than 5 minutes after - with patch( - "baserow.contrib.database.fields.tasks._run_periodic_field_type_update_per_workspace" - ) as run_field_type_update, freeze_time("2020-01-01 00:04"): + with ( + patch( + "baserow.contrib.database.fields.tasks._run_periodic_field_type_update_per_workspace" + ) as run_field_type_update, + freeze_time("2020-01-01 00:04"), + ): run_periodic_fields_updates(workspace_id=workspace.id) run_field_type_update.assert_called_once_with( field_type_instance, workspace, True diff --git a/backend/tests/baserow/contrib/database/field/test_field_types.py b/backend/tests/baserow/contrib/database/field/test_field_types.py index bf6c128aba..35b72f0753 100644 --- a/backend/tests/baserow/contrib/database/field/test_field_types.py +++ b/backend/tests/baserow/contrib/database/field/test_field_types.py @@ -1316,6 +1316,6 @@ def test_all_fields_with_db_index_have_index(data_fixture): if field_type.can_have_db_index(field): model_field = model._meta.get_field(field_object["name"]) index_name = f"database_table_{table.id}_{model_field.db_column}_" - assert any( - indexdef[0].startswith(index_name) for indexdef in indexes - ), f"{index_name} not found in indexes" + assert any(indexdef[0].startswith(index_name) for indexdef in indexes), ( + f"{index_name} not found in indexes" + ) diff --git a/backend/tests/baserow/contrib/database/field/test_file_field_type.py b/backend/tests/baserow/contrib/database/field/test_file_field_type.py index e8872a91a5..3dd79bd51e 100644 --- a/backend/tests/baserow/contrib/database/field/test_file_field_type.py +++ b/backend/tests/baserow/contrib/database/field/test_file_field_type.py @@ -549,9 +549,9 @@ def test_file_field_type_in_formulas(data_fixture, api_client): response = api_client.get(url, **{"HTTP_AUTHORIZATION": f"JWT {token}"}) response_json = response.json() assert response.status_code == HTTP_200_OK, response_json - assert ( - response_json["results"][0][formula_field.db_column] == expected - ), f"Failed for {formula} but was " + str(response_json["results"][0]) + assert response_json["results"][0][formula_field.db_column] == expected, ( + f"Failed for {formula} but was " + str(response_json["results"][0]) + ) formula_field3 = FieldHandler().create_field( user, diff --git a/backend/tests/baserow/contrib/database/field/test_lookup_field_type.py b/backend/tests/baserow/contrib/database/field/test_lookup_field_type.py index 521a0a6b09..66e5b4a2bf 100644 --- a/backend/tests/baserow/contrib/database/field/test_lookup_field_type.py +++ b/backend/tests/baserow/contrib/database/field/test_lookup_field_type.py @@ -702,10 +702,10 @@ def test_moving_a_looked_up_row_updates_the_order( table2_model = table2.get_model(attribute_names=True) a = table2_model.objects.create( - lookupfield=f"2021-02-01", primaryfield="primary " "a", order=0 + lookupfield=f"2021-02-01", primaryfield="primary a", order=0 ) b = table2_model.objects.create( - lookupfield=f"2022-02-03", primaryfield="primary " "b", order=1 + lookupfield=f"2022-02-03", primaryfield="primary b", order=1 ) table_model = table.get_model(attribute_names=True) @@ -973,10 +973,10 @@ def test_deleting_restoring_lookup_target_works( table2_model = table2.get_model(attribute_names=True) a = table2_model.objects.create( - lookupfield=f"2021-02-01", primaryfield="primary " "a", order=0 + lookupfield=f"2021-02-01", primaryfield="primary a", order=0 ) b = table2_model.objects.create( - lookupfield=f"2022-02-03", primaryfield="primary " "b", order=1 + lookupfield=f"2022-02-03", primaryfield="primary b", order=1 ) table_model = table.get_model(attribute_names=True) @@ -1166,10 +1166,10 @@ def test_deleting_related_link_row_field_dep_breaks_deps( table2_model = table2.get_model(attribute_names=True) a = table2_model.objects.create( - lookupfield=f"2021-02-01", primaryfield="primary " "a", order=0 + lookupfield=f"2021-02-01", primaryfield="primary a", order=0 ) b = table2_model.objects.create( - lookupfield=f"2022-02-03", primaryfield="primary " "b", order=1 + lookupfield=f"2022-02-03", primaryfield="primary b", order=1 ) table_model = table.get_model(attribute_names=True) @@ -1577,10 +1577,10 @@ def test_deleting_related_link_row_field_still_lets_you_create_edit_rows( table2_model = table2.get_model(attribute_names=True) a = table2_model.objects.create( - lookupfield=f"2021-02-01", primaryfield="primary " "a", order=0 + lookupfield=f"2021-02-01", primaryfield="primary a", order=0 ) b = table2_model.objects.create( - lookupfield=f"2022-02-03", primaryfield="primary " "b", order=1 + lookupfield=f"2022-02-03", primaryfield="primary b", order=1 ) table_model = table.get_model(attribute_names=True) @@ -1661,10 +1661,10 @@ def test_deleting_related_table_still_lets_you_create_edit_rows( table2_model = table2.get_model(attribute_names=True) a = table2_model.objects.create( - lookupfield=f"2021-02-01", primaryfield="primary " "a", order=0 + lookupfield=f"2021-02-01", primaryfield="primary a", order=0 ) b = table2_model.objects.create( - lookupfield=f"2022-02-03", primaryfield="primary " "b", order=1 + lookupfield=f"2022-02-03", primaryfield="primary b", order=1 ) table_model = table.get_model(attribute_names=True) @@ -1734,10 +1734,10 @@ def test_converting_away_from_lookup_field_deletes_parent_formula_field( ) table2_model = table2.get_model(attribute_names=True) a = table2_model.objects.create( - lookupfield=f"2021-02-01", primaryfield="primary " "a", order=0 + lookupfield=f"2021-02-01", primaryfield="primary a", order=0 ) b = table2_model.objects.create( - lookupfield=f"2022-02-03", primaryfield="primary " "b", order=1 + lookupfield=f"2022-02-03", primaryfield="primary b", order=1 ) table_model = table.get_model(attribute_names=True) diff --git a/backend/tests/baserow/contrib/database/field/test_multiple_collaborators_field_type.py b/backend/tests/baserow/contrib/database/field/test_multiple_collaborators_field_type.py index bc7ec7a75e..8499103458 100644 --- a/backend/tests/baserow/contrib/database/field/test_multiple_collaborators_field_type.py +++ b/backend/tests/baserow/contrib/database/field/test_multiple_collaborators_field_type.py @@ -150,7 +150,7 @@ def test_get_set_export_serialized_value_multiple_collaborators_field(data_fixtu assert len(all) == 3 imported_row_1 = all[0] imported_row_1_field = ( - getattr(imported_row_1, f"field_" f"{imported_field.id}").order_by("id").all() + getattr(imported_row_1, f"field_{imported_field.id}").order_by("id").all() ) imported_row_2 = all[1] imported_row_2_field = ( diff --git a/backend/tests/baserow/contrib/database/field/test_multiple_select_field_type.py b/backend/tests/baserow/contrib/database/field/test_multiple_select_field_type.py index 39718dab21..5d2f84da9b 100644 --- a/backend/tests/baserow/contrib/database/field/test_multiple_select_field_type.py +++ b/backend/tests/baserow/contrib/database/field/test_multiple_select_field_type.py @@ -285,25 +285,25 @@ def test_multiple_select_field_type_rows(data_fixture, django_assert_num_queries ) row_2_field = getattr(row_2, f"field_{field.id}").all() assert len(row_2_field) == 1 - assert getattr(row_2_field[0], "id") == select_options[0].id - assert getattr(row_2_field[0], "value") == select_options[0].value - assert getattr(row_2_field[0], "color") == select_options[0].color + assert row_2_field[0].id == select_options[0].id + assert row_2_field[0].value == select_options[0].value + assert row_2_field[0].color == select_options[0].color row_3 = row_handler.create_row( user=user, table=table, values={f"field_{field.id}": [select_options[1].id]} ) row_3_field = getattr(row_3, f"field_{field.id}").all() assert len(row_3_field) == 1 - assert getattr(row_3_field[0], "id") == select_options[1].id - assert getattr(row_3_field[0], "value") == select_options[1].value + assert row_3_field[0].id == select_options[1].id + assert row_3_field[0].value == select_options[1].value row_4 = row_handler.create_row( user=user, table=table, values={f"field_{field.id}": [select_options[0].id]} ) row_4_field = getattr(row_4, f"field_{field.id}").all() assert len(row_4_field) == 1 - assert getattr(row_4_field[0], "id") == select_options[0].id - assert getattr(row_4_field[0], "value") == select_options[0].value + assert row_4_field[0].id == select_options[0].id + assert row_4_field[0].value == select_options[0].value row_5 = row_handler.create_row(user=user, table=table) row_5_field = getattr(row_5, f"field_{field.id}").all() @@ -353,7 +353,7 @@ def test_multiple_select_field_type_rows(data_fixture, django_assert_num_queries ) row_5_field = getattr(row_5, f"field_{field.id}").all() assert len(row_5_field) == 2 - assert set([getattr(row_5_field[0], "id"), getattr(row_5_field[1], "id")]) == set( + assert set([row_5_field[0].id, row_5_field[1].id]) == set( [select_options[1].id, select_options[0].id] ) @@ -424,31 +424,31 @@ def test_multiple_select_field_type_multiple_rows(data_fixture): row_0_field = getattr(row_0, f"field_{field.id}").all() assert len(row_0_field) == 2 - assert getattr(row_0_field[0], "id") == select_options[0].id - assert getattr(row_0_field[1], "id") == select_options[1].id + assert row_0_field[0].id == select_options[0].id + assert row_0_field[1].id == select_options[1].id row_1_field = getattr(row_1, f"field_{field.id}").all() assert len(row_1_field) == 2 - assert getattr(row_1_field[0], "id") == select_options[2].id - assert getattr(row_1_field[1], "id") == select_options[0].id + assert row_1_field[0].id == select_options[2].id + assert row_1_field[1].id == select_options[0].id row_2_field = getattr(row_2, f"field_{field.id}").all() assert len(row_2_field) == 2 - assert getattr(row_2_field[0], "id") == select_options[2].id - assert getattr(row_2_field[1], "id") == select_options[0].id + assert row_2_field[0].id == select_options[2].id + assert row_2_field[1].id == select_options[0].id row_3_field = getattr(row_3, f"field_{field.id}").all() assert len(row_3_field) == 2 - assert getattr(row_3_field[0], "id") == select_options[3].id - assert getattr(row_3_field[1], "id") == select_options[0].id + assert row_3_field[0].id == select_options[3].id + assert row_3_field[1].id == select_options[0].id row_4_field = getattr(row_4, f"field_{field.id}").all() assert len(row_4_field) == 1 - assert getattr(row_4_field[0], "id") == select_options[0].id + assert row_4_field[0].id == select_options[0].id row_5_field = getattr(row_5, f"field_{field.id}").all() assert len(row_5_field) == 1 - assert getattr(row_5_field[0], "id") == select_options[0].id + assert row_5_field[0].id == select_options[0].id error_report = row_handler.create_rows( user, @@ -879,9 +879,7 @@ def test_get_set_export_serialized_value_multiple_select_field( assert len(all) == 3 imported_row_1 = all[0] imported_row_1_field = ( - getattr(imported_row_1, f"field_" f"{imported_field.id}") - .order_by("order") - .all() + getattr(imported_row_1, f"field_{imported_field.id}").order_by("order").all() ) imported_row_2 = all[1] imported_row_2_field = ( @@ -1631,7 +1629,7 @@ def test_conversion_date_to_multiple_select_field(data_fixture): table=table, row_id=row.id, values={ - f"field_{date_field_eu.id}": [getattr(x, "id") for x in select_options], + f"field_{date_field_eu.id}": [x.id for x in select_options], }, ) diff --git a/backend/tests/baserow/contrib/database/field/test_uuid_field_type.py b/backend/tests/baserow/contrib/database/field/test_uuid_field_type.py index 45c7589b91..7b4684634d 100644 --- a/backend/tests/baserow/contrib/database/field/test_uuid_field_type.py +++ b/backend/tests/baserow/contrib/database/field/test_uuid_field_type.py @@ -376,7 +376,7 @@ def test_can_filter_on_uuid_field(api_client, data_fixture): get_params = [f"filter__field_{field.id}__equal={row_1.uuid}"] url = reverse("api:database:rows:list", kwargs={"table_id": table.id}) response = api_client.get( - f'{url}?{"&".join(get_params)}', + f"{url}?{'&'.join(get_params)}", {}, format="json", HTTP_AUTHORIZATION=f"JWT {token}", @@ -402,7 +402,7 @@ def test_can_sort_on_uuid_field(api_client, data_fixture): get_params = [f"sort=field_{field.id}1"] url = reverse("api:database:rows:list", kwargs={"table_id": table.id}) response = api_client.get( - f'{url}?{"&".join(get_params)}', + f"{url}?{'&'.join(get_params)}", {}, format="json", HTTP_AUTHORIZATION=f"JWT {token}", diff --git a/backend/tests/baserow/contrib/database/formula/test_rename_field_references.py b/backend/tests/baserow/contrib/database/formula/test_rename_field_references.py index f0dab3951b..9f0c6ecd4a 100644 --- a/backend/tests/baserow/contrib/database/formula/test_rename_field_references.py +++ b/backend/tests/baserow/contrib/database/formula/test_rename_field_references.py @@ -7,7 +7,7 @@ def test_replace_single_quoted_field_ref(): new_formula = FormulaHandler.rename_field_references_in_formula_string( "field('test')", - {"test": "new " "test"}, + {"test": "new test"}, ) assert new_formula == "field('new test')" @@ -32,7 +32,7 @@ def test_replace_field_reference_keeping_whitespace(): def test_replace_field_reference_keeping_whitespace_and_comments(): new_formula = FormulaHandler.rename_field_references_in_formula_string( "//my line comment \n\tfield('test') /*my block comment*/\n\t", - {"test": "new " "test"}, + {"test": "new test"}, ) assert ( @@ -44,7 +44,7 @@ def test_replace_field_reference_keeping_whitespace_and_comments(): def test_replace_field_reference_preserving_case(): new_formula = FormulaHandler.rename_field_references_in_formula_string( "//my line comment \n\tADD(fIeLd('test'),1) /*my block comment*/\n\t", - {"test": "new " "test"}, + {"test": "new test"}, ) assert ( @@ -56,10 +56,10 @@ def test_replace_field_reference_preserving_case(): def test_replace_binary_op_keeping_whitespace_and_comments(): new_formula = FormulaHandler.rename_field_references_in_formula_string( "//my line comment \n\t1+1 /*my block comment*/\n\t", - {"test": "new " "test"}, + {"test": "new test"}, ) - assert new_formula == "//my line comment \n\t1+1 /*my block " "comment*/\n\t" + assert new_formula == "//my line comment \n\t1+1 /*my block comment*/\n\t" def test_replace_function_call_keeping_whitespace_and_comments(): diff --git a/backend/tests/baserow/contrib/database/import_export/test_export_handler.py b/backend/tests/baserow/contrib/database/import_export/test_export_handler.py index a78f68d3bf..99501d93b3 100755 --- a/backend/tests/baserow/contrib/database/import_export/test_export_handler.py +++ b/backend/tests/baserow/contrib/database/import_export/test_export_handler.py @@ -76,7 +76,7 @@ def test_hidden_fields_are_excluded(get_storage_mock, data_fixture): ) _, contents = run_export_job_with_mock_storage(table, grid_view, storage_mock, user) bom = "\ufeff" - expected = bom + "id,text_field\r\n" f"1,Something\r\n" + expected = bom + f"id,text_field\r\n1,Something\r\n" assert contents == expected @@ -235,7 +235,7 @@ def test_exporting_public_view_without_user(get_storage_mock, data_fixture): ) _, contents = run_export_job_with_mock_storage(table, grid_view, storage_mock, None) bom = "\ufeff" - expected = bom + "id,text_field\r\n" "1,hello\r\n" + expected = bom + "id,text_field\r\n1,hello\r\n" assert contents == expected @@ -279,7 +279,7 @@ def test_columns_are_exported_by_order_then_field_id(get_storage_mock, data_fixt assert field_a.id < field_b.id _, contents = run_export_job_with_mock_storage(table, grid_view, storage_mock, user) bom = "\ufeff" - expected = bom + "id,field_c,field_a,field_b\r\n" "1,c,a,b\r\n" + expected = bom + "id,field_c,field_a,field_b\r\n1,c,a,b\r\n" assert contents == expected @@ -419,7 +419,7 @@ def show_diff(actual, expected): diff.append( ( actual_values[0][j], - f"Row {i+1}", + f"Row {i + 1}", actual_value, expected_value, ) @@ -1064,7 +1064,7 @@ def test_a_column_without_a_grid_view_option_has_an_option_made_and_is_exported( _, contents = run_export_job_with_mock_storage(table, grid_view, storage_mock, user) bom = "\ufeff" - expected = bom + "id,field_a,field_b\r\n" "1,a,b\r\n" + expected = bom + "id,field_a,field_b\r\n1,a,b\r\n" assert contents == expected assert GridViewFieldOptions.objects.count() == 2 diff --git a/backend/tests/baserow/contrib/database/management/test_fill_table.py b/backend/tests/baserow/contrib/database/management/test_fill_table.py index ae2bf25a82..7c56ed2938 100644 --- a/backend/tests/baserow/contrib/database/management/test_fill_table.py +++ b/backend/tests/baserow/contrib/database/management/test_fill_table.py @@ -17,7 +17,7 @@ def test_fill_table_rows_no_table(): with pytest.raises(SystemExit) as sys_exit: call_command("fill_table_rows", table_id_that_does_not_exist, 10, stdout=output) - assert sys_exit.type == SystemExit + assert sys_exit.type is SystemExit assert sys_exit.value.code == 1 assert ( diff --git a/backend/tests/baserow/contrib/database/rows/test_row_history.py b/backend/tests/baserow/contrib/database/rows/test_row_history.py index a0a8de6a25..64e43499ae 100644 --- a/backend/tests/baserow/contrib/database/rows/test_row_history.py +++ b/backend/tests/baserow/contrib/database/rows/test_row_history.py @@ -659,9 +659,12 @@ def test_update_rows_insert_entries_in_linked_rows_history_in_multiple_tables( user, table_a, [{primary_a.db_column: "a1"}, {primary_a.db_column: "a2"}] ).created_rows - with freeze_time("2021-01-01 12:00"), patch( - "baserow.contrib.database.rows.signals.rows_history_updated.send" - ) as mock_signal: + with ( + freeze_time("2021-01-01 12:00"), + patch( + "baserow.contrib.database.rows.signals.rows_history_updated.send" + ) as mock_signal, + ): action_type_registry.get_by_type(UpdateRowsActionType).do( user, table_a, @@ -1028,7 +1031,7 @@ def test_create_rows_action_row_history_with_undo_redo( }, ] - freezed_timestamp_undo = datetime(2021, 1, 1, 12, 0, tzinfo=timezone.utc) + freezed_timestamp_undo = datetime(2021, 1, 1, 12, 1, tzinfo=timezone.utc) with freeze_time(freezed_timestamp_undo): undone = ActionHandler.undo( user, @@ -1082,7 +1085,8 @@ def test_create_rows_action_row_history_with_undo_redo( }, ] - with freeze_time(freezed_timestamp_do): + freezed_timestamp_redo = datetime(2021, 1, 1, 12, 2, tzinfo=timezone.utc) + with freeze_time(freezed_timestamp_redo): redone = ActionHandler.redo( user, [TableActionScopeType.value(table_id=table.id)], @@ -1126,7 +1130,7 @@ def test_create_rows_action_row_history_with_undo_redo( "user_name": user.first_name, "table_id": table.id, "row_id": row.id, - "action_timestamp": freezed_timestamp_do, + "action_timestamp": freezed_timestamp_undo, "action_type": action_type_name, "action_command_type": "UNDO", "after_values": {}, @@ -1138,7 +1142,7 @@ def test_create_rows_action_row_history_with_undo_redo( "user_name": user.first_name, "table_id": table.id, "row_id": row.id, - "action_timestamp": freezed_timestamp_do, + "action_timestamp": freezed_timestamp_redo, "action_type": action_type_name, "action_command_type": "REDO", "before_values": {}, diff --git a/backend/tests/baserow/contrib/database/rows/test_rows_handler.py b/backend/tests/baserow/contrib/database/rows/test_rows_handler.py index c8d171110f..75d22a3034 100644 --- a/backend/tests/baserow/contrib/database/rows/test_rows_handler.py +++ b/backend/tests/baserow/contrib/database/rows/test_rows_handler.py @@ -881,7 +881,7 @@ def test_update_rows_created_on_and_last_modified(data_fixture): result = handler.update_rows( user, table, - [{"id": row.id, f"field_" f"{field.id}": "Test"}], + [{"id": row.id, f"field_{field.id}": "Test"}], ) row = result.updated_rows[0] assert row.created_on == datetime(2020, 1, 1, 12, 0, tzinfo=timezone.utc) diff --git a/backend/tests/baserow/contrib/database/search/test_search_compatibility.py b/backend/tests/baserow/contrib/database/search/test_search_compatibility.py index 6ebaf7355f..a2e542e9d5 100644 --- a/backend/tests/baserow/contrib/database/search/test_search_compatibility.py +++ b/backend/tests/baserow/contrib/database/search/test_search_compatibility.py @@ -23,7 +23,7 @@ def test_search_compatibility_between_current_and_postgres(data_fixture, tmpdir) "text": [ ["Peter Evans", "Peter Evans"], # full-text, compat exact ["Peter Ev", "Peter Ev"], # full-text, compat partial - ["peTeR EV", "peTeR EV"] # full-text, compat mixed case. + ["peTeR EV", "peTeR EV"], # full-text, compat mixed case. # Compat can't handle multiple spaces. ], "long_text": [ @@ -204,9 +204,9 @@ def test_search_compatibility_between_current_and_postgres(data_fixture, tmpdir) model = table.get_model() for field_type, queries in query_searches.items(): for pg_query, compat_query in queries: - assert ( - model.objects.filter(pk=row.pk).pg_search(pg_query).exists() - ), f"Unable to match Postgres query '{pg_query}'." + assert model.objects.filter(pk=row.pk).pg_search(pg_query).exists(), ( + f"Unable to match Postgres query '{pg_query}'." + ) if compat_query is not None: assert ( model.objects.filter(pk=row.pk).compat_search(compat_query).exists() diff --git a/backend/tests/baserow/contrib/database/table/test_table_handler.py b/backend/tests/baserow/contrib/database/table/test_table_handler.py index 8e7dc364d6..4805e309dc 100644 --- a/backend/tests/baserow/contrib/database/table/test_table_handler.py +++ b/backend/tests/baserow/contrib/database/table/test_table_handler.py @@ -160,8 +160,9 @@ def test_fill_table_with_initial_data(data_fixture): with pytest.raises(InvalidInitialTableData): table_handler.create_table(user, database, name="Table 1", data=[[]]) - with override_settings(INITIAL_TABLE_DATA_LIMIT=2), pytest.raises( - InitialTableDataLimitExceeded + with ( + override_settings(INITIAL_TABLE_DATA_LIMIT=2), + pytest.raises(InitialTableDataLimitExceeded), ): table_handler.create_table(user, database, name="Table 1", data=[[], [], []]) with override_settings(MAX_FIELD_LIMIT=2), pytest.raises(MaxFieldLimitExceeded): diff --git a/backend/tests/baserow/contrib/database/table/test_table_models.py b/backend/tests/baserow/contrib/database/table/test_table_models.py index 75ae920a40..3658037950 100644 --- a/backend/tests/baserow/contrib/database/table/test_table_models.py +++ b/backend/tests/baserow/contrib/database/table/test_table_models.py @@ -677,7 +677,7 @@ def test_order_by_fields_string_queryset_with_user_field_names(data_fixture): **{ f"field_{fields[0].id}": "Audi", f"field_{fields[1].id}": 2000, - f"field_" f"{fields[3].id}": "Old times", + f"field_{fields[3].id}": "Old times", "order": Decimal("0.1"), } ) @@ -1026,11 +1026,11 @@ def test_order_by_field_string_with_multiple_field_types_requiring_aggregations( model = table.get_model(attribute_names=True) row_1 = model.objects.create() - getattr(row_1, "multi_a").set([option_a.id]) - getattr(row_1, "multi_b").set([option_c.id]) + row_1.multi_a.set([option_a.id]) + row_1.multi_b.set([option_c.id]) row_2 = model.objects.create() - getattr(row_2, "multi_a").set([option_b.id]) - getattr(row_2, "multi_b").set([option_d.id]) + row_2.multi_a.set([option_b.id]) + row_2.multi_b.set([option_d.id]) results = model.objects.all().order_by_fields_string( f"field_{multiple_select_field_a.id},-field_{multiple_select_field_b.id}" diff --git a/backend/tests/baserow/contrib/database/test_database_application_type.py b/backend/tests/baserow/contrib/database/test_database_application_type.py index fc03976f34..d313a75086 100644 --- a/backend/tests/baserow/contrib/database/test_database_application_type.py +++ b/backend/tests/baserow/contrib/database/test_database_application_type.py @@ -124,7 +124,7 @@ def test_import_export_database(data_fixture): assert imported_row.updated_on == datetime(2021, 1, 2, 13, 30, tzinfo=timezone.utc) assert imported_row.last_modified_by == row.last_modified_by assert getattr( - imported_row, f'field_{id_mapping["database_fields"][text_field.id]}' + imported_row, f"field_{id_mapping['database_fields'][text_field.id]}" ) == (getattr(row, f"field_{text_field.id}")) assert ( imported_formula_field.internal_formula == f"error_to_null(field('" @@ -132,7 +132,7 @@ def test_import_export_database(data_fixture): ) assert imported_formula_field.formula_type == "text" assert getattr( - imported_row, f'field_{id_mapping["database_fields"][formula_field.id]}' + imported_row, f"field_{id_mapping['database_fields'][formula_field.id]}" ) == (getattr(row, f"field_{formula_field.id}")) # Because the created on and updated on were not provided, we expect these values diff --git a/backend/tests/baserow/contrib/database/view/test_view_aggregations.py b/backend/tests/baserow/contrib/database/view/test_view_aggregations.py index 190d047c0f..b428c68193 100644 --- a/backend/tests/baserow/contrib/database/view/test_view_aggregations.py +++ b/backend/tests/baserow/contrib/database/view/test_view_aggregations.py @@ -182,9 +182,9 @@ def test_view_unique_count_aggregation_for_interesting_table(data_fixture): user, grid_view, aggregation_query, model=model, with_total=True ) - assert ( - len(result.keys()) == len(aggregation_query) + 1 - ), f"{result} vs {aggregation_query}" + assert len(result.keys()) == len(aggregation_query) + 1, ( + f"{result} vs {aggregation_query}" + ) for field_obj in model._field_objects.values(): field = field_obj["field"] diff --git a/backend/tests/baserow/contrib/database/view/test_view_array_filters.py b/backend/tests/baserow/contrib/database/view/test_view_array_filters.py index 32c3687efb..fa969e59bd 100644 --- a/backend/tests/baserow/contrib/database/view/test_view_array_filters.py +++ b/backend/tests/baserow/contrib/database/view/test_view_array_filters.py @@ -3552,9 +3552,9 @@ def apply_filters_and_assert(expected): mnem_keys = list(MNEMONIC_VALUES.keys()) mnem_res_pos = [mnem_keys[v] for v in res_pos] mnem_exp_res = [mnem_keys[v] for v in expected] - assert res_pos == unordered( - expected - ), f"{filter_type} - {operator}: {mnem_res_pos} != {mnem_exp_res}" + assert res_pos == unordered(expected), ( + f"{filter_type} - {operator}: {mnem_res_pos} != {mnem_exp_res}" + ) # with date array_filter_type_has = SINGLE_TO_ARRAY_FILTER_TYPE_MAP[filter_type]["has"] diff --git a/backend/tests/baserow/contrib/database/view/test_view_filters.py b/backend/tests/baserow/contrib/database/view/test_view_filters.py index fdb15fdd07..dee4670fda 100644 --- a/backend/tests/baserow/contrib/database/view/test_view_filters.py +++ b/backend/tests/baserow/contrib/database/view/test_view_filters.py @@ -6850,9 +6850,9 @@ def apply_filters_and_assert(): mnem_keys = list(MNEMONIC_VALUES.keys()) mnem_res_pos = [mnem_keys[v] for v in res_pos] mnem_exp_res = [mnem_keys[v] for v in expected_results] - assert res_pos == unordered( - expected_results - ), f"{filter_type} - {operator}: {mnem_res_pos} != {mnem_exp_res}" + assert res_pos == unordered(expected_results), ( + f"{filter_type} - {operator}: {mnem_res_pos} != {mnem_exp_res}" + ) # with date view_filter = data_fixture.create_view_filter( diff --git a/backend/tests/baserow/contrib/database/view/test_view_signals.py b/backend/tests/baserow/contrib/database/view/test_view_signals.py index fa68d59c6a..0876cf0d51 100644 --- a/backend/tests/baserow/contrib/database/view/test_view_signals.py +++ b/backend/tests/baserow/contrib/database/view/test_view_signals.py @@ -256,11 +256,12 @@ def test_rows_enter_and_exit_view_are_called_when_view_filters_change( assert p.call_args[1]["view"].id == view_a.id assert p.call_args[1]["row_ids"] == [row_1.id] - with patch( - "baserow.contrib.database.views.signals.rows_entered_view.send" - ) as entered, patch( - "baserow.contrib.database.views.signals.rows_exited_view.send" - ) as exited: + with ( + patch( + "baserow.contrib.database.views.signals.rows_entered_view.send" + ) as entered, + patch("baserow.contrib.database.views.signals.rows_exited_view.send") as exited, + ): view_filter_2 = ViewHandler().update_filter( user, view_filter_2, type_name="empty" ) @@ -418,18 +419,22 @@ def test_rows_enter_and_exit_view_with_periodic_fields_updates(data_fixture): ViewSubscriptionHandler.subscribe_to_views(user, [view]) - with patch( - "baserow.contrib.database.views.signals.rows_entered_view.send" - ) as p, freeze_time("2022-01-01"), local_cache.context(): + with ( + patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p, + freeze_time("2022-01-01"), + local_cache.context(), + ): run_periodic_fields_updates(table.database.workspace_id) p.assert_called_once() assert p.call_args[1]["view"].id == view.id assert p.call_args[1]["row_ids"] == [row_1.id] - with patch( - "baserow.contrib.database.views.signals.rows_exited_view.send" - ) as p, freeze_time("2023-01-01"), local_cache.context(): + with ( + patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p, + freeze_time("2023-01-01"), + local_cache.context(), + ): run_periodic_fields_updates(table.database.workspace_id) p.assert_called_once() @@ -456,9 +461,10 @@ def test_rows_enter_and_exit_view_when_time_sensitive_filters_are_used( ) ViewSubscriptionHandler.subscribe_to_views(user, [view]) - with patch( - "baserow.contrib.database.views.signals.rows_entered_view.send" - ) as p, freeze_time("2022-01-01"): + with ( + patch("baserow.contrib.database.views.signals.rows_entered_view.send") as p, + freeze_time("2022-01-01"), + ): with transaction.atomic(): ViewSubscriptionHandler.check_views_with_time_sensitive_filters() @@ -466,9 +472,10 @@ def test_rows_enter_and_exit_view_when_time_sensitive_filters_are_used( assert p.call_args[1]["view"].id == view.id assert p.call_args[1]["row_ids"] == [row_1.id] - with patch( - "baserow.contrib.database.views.signals.rows_exited_view.send" - ) as p, freeze_time("2022-01-02"): + with ( + patch("baserow.contrib.database.views.signals.rows_exited_view.send") as p, + freeze_time("2022-01-02"), + ): with transaction.atomic(): ViewSubscriptionHandler.check_views_with_time_sensitive_filters() diff --git a/backend/tests/baserow/contrib/database/webhooks/test_webhook_tasks.py b/backend/tests/baserow/contrib/database/webhooks/test_webhook_tasks.py index 7bdcfe1449..e8dc9b81b9 100644 --- a/backend/tests/baserow/contrib/database/webhooks/test_webhook_tasks.py +++ b/backend/tests/baserow/contrib/database/webhooks/test_webhook_tasks.py @@ -459,11 +459,14 @@ def test_webhook_with_paginated_payload( ) # The first page of the payload is sent and contains the batch_id 1. - with patch( - "baserow.contrib.database.webhooks.tasks.enqueue_webhook_task" - ) as mock_enqueue, patch( - "baserow.contrib.database.webhooks.tasks.schedule_next_task_in_queue" - ) as mock_schedule: + with ( + patch( + "baserow.contrib.database.webhooks.tasks.enqueue_webhook_task" + ) as mock_enqueue, + patch( + "baserow.contrib.database.webhooks.tasks.schedule_next_task_in_queue" + ) as mock_schedule, + ): call_webhook( webhook_id=webhook.id, event_id=event_id, @@ -548,11 +551,14 @@ def test_call_webhook_payload_too_large_send_notification( ) # The first page of the payload is sent and contains the batch_id 1. - with patch( - "baserow.contrib.database.webhooks.tasks.enqueue_webhook_task" - ) as mock_enqueue, patch( - "baserow.contrib.database.webhooks.tasks.schedule_next_task_in_queue" - ) as mock_schedule: + with ( + patch( + "baserow.contrib.database.webhooks.tasks.enqueue_webhook_task" + ) as mock_enqueue, + patch( + "baserow.contrib.database.webhooks.tasks.schedule_next_task_in_queue" + ) as mock_schedule, + ): call_webhook( webhook_id=webhook.id, event_id=event_id, diff --git a/backend/tests/baserow/contrib/integrations/core/test_smtp_email_service_type.py b/backend/tests/baserow/contrib/integrations/core/test_smtp_email_service_type.py index 92e55c36cb..ceb7658372 100644 --- a/backend/tests/baserow/contrib/integrations/core/test_smtp_email_service_type.py +++ b/backend/tests/baserow/contrib/integrations/core/test_smtp_email_service_type.py @@ -42,12 +42,15 @@ def mock_django_email( else: server_mock.send.side_effect = exception_class("Generic error") - with patch( - "baserow.contrib.integrations.core.service_types.EmailMultiAlternatives", - return_value=server_mock, - ) as mock_email, patch( - "baserow.contrib.integrations.core.service_types.get_connection", - ) as mock_connection: + with ( + patch( + "baserow.contrib.integrations.core.service_types.EmailMultiAlternatives", + return_value=server_mock, + ) as mock_email, + patch( + "baserow.contrib.integrations.core.service_types.get_connection", + ) as mock_connection, + ): yield (mock_email, mock_connection) diff --git a/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_get_row_service_type.py b/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_get_row_service_type.py index 151fd10404..81bcb1b107 100644 --- a/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_get_row_service_type.py +++ b/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_get_row_service_type.py @@ -363,8 +363,9 @@ def test_local_baserow_get_row_service_dispatch_data_permission_denied( ) dispatch_context = FakeDispatchContext() - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): LocalBaserowGetRowUserServiceType().dispatch_data( service, {"table": table}, dispatch_context diff --git a/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_list_rows_service_type.py b/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_list_rows_service_type.py index f449cdedc5..f0d22afcfe 100644 --- a/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_list_rows_service_type.py +++ b/backend/tests/baserow/contrib/integrations/local_baserow/service_types/test_list_rows_service_type.py @@ -329,8 +329,9 @@ def test_local_baserow_list_rows_service_dispatch_data_permission_denied( ) dispatch_context = FakeDispatchContext() - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): LocalBaserowListRowsUserServiceType().dispatch_data( service, {"table": table}, dispatch_context diff --git a/backend/tests/baserow/contrib/integrations/local_baserow/test_migrations.py b/backend/tests/baserow/contrib/integrations/local_baserow/test_migrations.py index 2449b09272..2a7cf16a64 100644 --- a/backend/tests/baserow/contrib/integrations/local_baserow/test_migrations.py +++ b/backend/tests/baserow/contrib/integrations/local_baserow/test_migrations.py @@ -125,9 +125,9 @@ def test_0003_migrate_local_baserow_table_service_filter_values_to_formulas_forw filters_which_migrated = reduce_to_filter_types_to_migrate(all_filters) for filter_which_migrated in filters_which_migrated: - assert ( - filter_which_migrated.value == "'dogs'" - ), f"Filter type={filter_which_migrated.type} field={filter_which_migrated.field} did not migrate correctly." # noqa: E501 + assert filter_which_migrated.value == "'dogs'", ( + f"Filter type={filter_which_migrated.type} field={filter_which_migrated.field} did not migrate correctly." + ) # noqa: E501 filters_which_did_not_migrate = [ service_filter @@ -135,9 +135,9 @@ def test_0003_migrate_local_baserow_table_service_filter_values_to_formulas_forw if service_filter not in filters_which_migrated ] for filter_which_did_not_migrate in filters_which_did_not_migrate: - assert ( - filter_which_did_not_migrate.value != "'dogs'" - ), f"Filter type={filter_which_did_not_migrate.type} field={filter_which_did_not_migrate.field} migrated when it should not have." # noqa: E501 + assert filter_which_did_not_migrate.value != "'dogs'", ( + f"Filter type={filter_which_did_not_migrate.type} field={filter_which_did_not_migrate.field} migrated when it should not have." + ) # noqa: E501 @pytest.mark.once_per_day_in_ci @@ -295,10 +295,10 @@ def test_0006_migrate_local_baserow_table_service_filter_formulas_to_value_is_fo for service_filter in LocalBaserowTableServiceFilter.objects.all(): if value_parses_as_formula(service_filter.value): - assert ( - service_filter.value_is_formula is True - ), "A valid formula was detected, but value_is_formula was not set to True." + assert service_filter.value_is_formula is True, ( + "A valid formula was detected, but value_is_formula was not set to True." + ) else: - assert ( - service_filter.value_is_formula is False - ), "A invalid formula was detected, but value_is_formula was not set to False." # noqa: E501 + assert service_filter.value_is_formula is False, ( + "A invalid formula was detected, but value_is_formula was not set to False." + ) # noqa: E501 diff --git a/backend/tests/baserow/core/generative_ai/test_generative_ai_model_types.py b/backend/tests/baserow/core/generative_ai/test_generative_ai_model_types.py index d0530889e2..31c6662d7a 100644 --- a/backend/tests/baserow/core/generative_ai/test_generative_ai_model_types.py +++ b/backend/tests/baserow/core/generative_ai/test_generative_ai_model_types.py @@ -203,9 +203,9 @@ def get_client_stub(workspace=None): response = ai_model_type.prompt_with_files("gpt-3.5", "test prompt", file_ids=[]) assert response == "test response" # reference was removed from the ouput - assert ( - len(openai_client.beta.assistants._assistants) == 0 - ), "Assistant has been deleted" + assert len(openai_client.beta.assistants._assistants) == 0, ( + "Assistant has been deleted" + ) openai_client.beta.threads.delete.assert_called_once() assert len(openai_client.beta.assistants._assistants) == 0 diff --git a/backend/tests/baserow/core/integrations/test_integration_service.py b/backend/tests/baserow/core/integrations/test_integration_service.py index 47c582c04c..6e90bcca35 100644 --- a/backend/tests/baserow/core/integrations/test_integration_service.py +++ b/backend/tests/baserow/core/integrations/test_integration_service.py @@ -141,8 +141,9 @@ def test_create_integration_permission_denied(data_fixture, stub_check_permissio integration_type = integration_type_registry.get("local_baserow") - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): IntegrationService().create_integration( user, integration_type, application=application @@ -172,8 +173,9 @@ def test_get_integration_permission_denied(data_fixture, stub_check_permissions) user = data_fixture.create_user() integration = data_fixture.create_local_baserow_integration(user=user) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): IntegrationService().get_integration(user, integration.id) @@ -240,8 +242,9 @@ def test_delete_integration_permission_denied(data_fixture, stub_check_permissio user = data_fixture.create_user() integration = data_fixture.create_local_baserow_integration(user=user) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): IntegrationService().delete_integration(user, integration) @@ -267,8 +270,9 @@ def test_update_integration_permission_denied(data_fixture, stub_check_permissio user = data_fixture.create_user() integration = data_fixture.create_local_baserow_integration(user=user) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): IntegrationService().update_integration(user, integration, value="newValue") @@ -331,8 +335,9 @@ def test_move_integration_permission_denied(data_fixture, stub_check_permissions application=application ) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): IntegrationService().move_integration(user, integration3, before=integration2) diff --git a/backend/tests/baserow/core/jobs/test_jobs_handler.py b/backend/tests/baserow/core/jobs/test_jobs_handler.py index b7f4b5ef57..a1b580d651 100644 --- a/backend/tests/baserow/core/jobs/test_jobs_handler.py +++ b/backend/tests/baserow/core/jobs/test_jobs_handler.py @@ -214,9 +214,9 @@ def run(self, job, progress): t.start() assert m_start.wait(0.5) assert job.started, job.get_cached_state() - assert ( - job.get_cached_progress_percentage() == 11 - ), job.get_cached_progress_percentage() + assert job.get_cached_progress_percentage() == 11, ( + job.get_cached_progress_percentage() + ) jh.cancel_job(job) m_set_stop.set() diff --git a/backend/tests/baserow/core/service/test_service_type.py b/backend/tests/baserow/core/service/test_service_type.py index a2f8e911f4..078fcf6268 100644 --- a/backend/tests/baserow/core/service/test_service_type.py +++ b/backend/tests/baserow/core/service/test_service_type.py @@ -1,12 +1,12 @@ from unittest.mock import MagicMock, Mock, PropertyMock import pytest -from baserow_premium.integrations.local_baserow.service_types import DispatchResult from rest_framework.exceptions import ValidationError from baserow.core.services.models import Service from baserow.core.services.registries import ServiceType from baserow.test_utils.pytest_conftest import FakeDispatchContext +from baserow_premium.integrations.local_baserow.service_types import DispatchResult def test_service_type_get_schema_name(): diff --git a/backend/tests/baserow/core/test_basic_permissions.py b/backend/tests/baserow/core/test_basic_permissions.py index 0a6330b0dc..14c1c6c2e5 100755 --- a/backend/tests/baserow/core/test_basic_permissions.py +++ b/backend/tests/baserow/core/test_basic_permissions.py @@ -1179,9 +1179,10 @@ def test_allow_if_template_permission_manager_query_count(data_fixture): workspace=workspace_1, ) - with CaptureQueriesContext( - connection - ) as query_not_for_template, local_cache.context(): + with ( + CaptureQueriesContext(connection) as query_not_for_template, + local_cache.context(), + ): CoreHandler().check_permissions( buser, UpdateIntegrationOperationType.type, @@ -1214,12 +1215,12 @@ def get_all_subclasses(cls): for t in get_all_subclasses(OperationType) if hasattr(t, "type") and t.type is not None } - assert ( - all_operation_types == registered_operation_types - ), "Please make sure the following operation " "types are added to the registry: " + str( - all_operation_types.difference(registered_operation_types) - ) + " or somehow the following operations are registered but not subclasses?: " + str( - registered_operation_types.difference(all_operation_types) + assert all_operation_types == registered_operation_types, ( + "Please make sure the following operation " + "types are added to the registry: " + + str(all_operation_types.difference(registered_operation_types)) + + " or somehow the following operations are registered but not subclasses?: " + + str(registered_operation_types.difference(all_operation_types)) ) @@ -1240,12 +1241,12 @@ def get_all_subclasses(cls): all_operation_types = { t.type for t in get_all_subclasses(ObjectScopeType) if hasattr(t, "type") } - assert ( - all_operation_types == registered_operation_types - ), "Please make sure the following operation " "types are added to the registry: " + str( - all_operation_types.difference(registered_operation_types) - ) + " or somehow the following operations are registered but not subclasses?: " + str( - registered_operation_types.difference(all_operation_types) + assert all_operation_types == registered_operation_types, ( + "Please make sure the following operation " + "types are added to the registry: " + + str(all_operation_types.difference(registered_operation_types)) + + " or somehow the following operations are registered but not subclasses?: " + + str(registered_operation_types.difference(all_operation_types)) ) @@ -1266,12 +1267,12 @@ def get_all_subclasses(cls): all_op_context_types = { t.context_scope_name for t in get_all_subclasses(OperationType) } - assert ( - all_op_context_types == object_scope_types - ), "Please make sure the following object_scope_types exist and are added to the " "registry: " + str( - all_op_context_types.difference(object_scope_types) - ) + " or somehow the following context types are registered but not subclasses?: " + str( - object_scope_types.difference(all_op_context_types) + assert all_op_context_types == object_scope_types, ( + "Please make sure the following object_scope_types exist and are added to the " + "registry: " + + str(all_op_context_types.difference(object_scope_types)) + + " or somehow the following context types are registered but not subclasses?: " + + str(object_scope_types.difference(all_op_context_types)) ) diff --git a/backend/tests/baserow/core/test_core_utils.py b/backend/tests/baserow/core/test_core_utils.py index bae2963a00..a161f42f97 100755 --- a/backend/tests/baserow/core/test_core_utils.py +++ b/backend/tests/baserow/core/test_core_utils.py @@ -156,7 +156,7 @@ def test_unused_names(): # Try another suffix assert ( find_unused_name( - ["field"], ["field", "field 4" "field (1)", "field (2)"], suffix=" ({0})" + ["field"], ["field", "field 4field (1)", "field (2)"], suffix=" ({0})" ) == "field (3)" ) diff --git a/backend/tests/baserow/core/user_sources/test_user_source_handler.py b/backend/tests/baserow/core/user_sources/test_user_source_handler.py index b43fb3e8f0..798b6d273f 100644 --- a/backend/tests/baserow/core/user_sources/test_user_source_handler.py +++ b/backend/tests/baserow/core/user_sources/test_user_source_handler.py @@ -601,9 +601,12 @@ def mock_raise_update_user_count(user_source): UserSourceHandler().update_all_user_source_counts() # When an exception raises, we can make it propagate. - with stub_user_source_registry( - update_user_count_return=mock_raise_update_user_count - ), pytest.raises(Exception) as exc: + with ( + stub_user_source_registry( + update_user_count_return=mock_raise_update_user_count + ), + pytest.raises(Exception) as exc, + ): UserSourceHandler().update_all_user_source_counts(raise_on_error=True) assert str(exc.value) == "An error has occurred." diff --git a/backend/tests/baserow/core/user_sources/test_user_source_service.py b/backend/tests/baserow/core/user_sources/test_user_source_service.py index 2e1b733e0c..52304c20c5 100644 --- a/backend/tests/baserow/core/user_sources/test_user_source_service.py +++ b/backend/tests/baserow/core/user_sources/test_user_source_service.py @@ -184,8 +184,9 @@ def test_create_user_source_permission_denied(data_fixture, stub_check_permissio user_source_type = user_source_type_registry.get("local_baserow") - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): UserSourceService().create_user_source( user, user_source_type, application=application @@ -215,8 +216,9 @@ def test_get_user_source_permission_denied(data_fixture, stub_check_permissions) user = data_fixture.create_user() user_source = data_fixture.create_user_source_with_first_type(user=user) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): UserSourceService().get_user_source(user, user_source.id) @@ -283,8 +285,9 @@ def test_delete_user_source_permission_denied(data_fixture, stub_check_permissio user = data_fixture.create_user() user_source = data_fixture.create_user_source_with_first_type(user=user) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): UserSourceService().delete_user_source(user, user_source) @@ -310,8 +313,9 @@ def test_update_user_source_permission_denied(data_fixture, stub_check_permissio user = data_fixture.create_user() user_source = data_fixture.create_user_source_with_first_type(user=user) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): UserSourceService().update_user_source(user, user_source, value="newValue") @@ -374,8 +378,9 @@ def test_move_user_source_permission_denied(data_fixture, stub_check_permissions application=application ) - with stub_check_permissions(raise_permission_denied=True), pytest.raises( - PermissionException + with ( + stub_check_permissions(raise_permission_denied=True), + pytest.raises(PermissionException), ): UserSourceService().move_user_source(user, user_source3, before=user_source2) diff --git a/backend/tests/baserow/performance/test_formula_performance.py b/backend/tests/baserow/performance/test_formula_performance.py index 69abfd82b2..e439653b2f 100644 --- a/backend/tests/baserow/performance/test_formula_performance.py +++ b/backend/tests/baserow/performance/test_formula_performance.py @@ -75,7 +75,7 @@ def test_very_nested_formula_field_change(data_fixture, django_assert_num_querie table=table, name=f"perf_formula{i}", type_name="formula", - formula=f"field('perf_formula{i-1}')+1", + formula=f"field('perf_formula{i - 1}')+1", ) profiler = Profiler() profiler.start() @@ -108,7 +108,7 @@ def test_creating_very_nested_formula_field(data_fixture): table=table, name=f"perf_formula{i}", type_name="formula", - formula=f"field('perf_formula{i-1}')+1", + formula=f"field('perf_formula{i - 1}')+1", ) profiler.stop() print(profiler.output_text(unicode=True, color=True)) @@ -134,7 +134,7 @@ def test_altering_very_nested_formula_field(data_fixture, django_assert_num_quer table=table, name=f"perf_formula{i}", type_name="formula", - formula=f"field('perf_formula{i-1}')+1", + formula=f"field('perf_formula{i - 1}')+1", ) if i == 1: first_field = field @@ -170,7 +170,7 @@ def test_getting_data_from_a_very_nested_formula_field(data_fixture, api_client) table=table, name=f"perf_formula{i}", type_name="formula", - formula=f"field('perf_formula{i-1}')+1", + formula=f"field('perf_formula{i - 1}')+1", ) url = reverse("api:database:views:grid:list", kwargs={"view_id": grid.id}) profiler = Profiler() diff --git a/backend/uv.lock b/backend/uv.lock index 9d99dbd44d..20d9b10a40 100644 --- a/backend/uv.lock +++ b/backend/uv.lock @@ -173,14 +173,14 @@ wheels = [ [[package]] name = "autopep8" -version = "2.1.0" +version = "2.3.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pycodestyle", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4a/65/d187da76e65c358654a1bcdc4cbeb85767433e1e3eb67c473482301f2416/autopep8-2.1.0.tar.gz", hash = "sha256:1fa8964e4618929488f4ec36795c7ff12924a68b8bf01366c094fc52f770b6e7", size = 88891, upload-time = "2024-03-17T10:47:33.726Z" } +sdist = { url = "https://files.pythonhosted.org/packages/50/d8/30873d2b7b57dee9263e53d142da044c4600a46f2d28374b3e38b023df16/autopep8-2.3.2.tar.gz", hash = "sha256:89440a4f969197b69a995e4ce0661b031f455a9f776d2c5ba3dbd83466931758", size = 92210, upload-time = "2025-01-14T14:46:18.454Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/09/87d32f364e09faebd126b2e52609182ce71ecc2ccf7e6daf8889704756b7/autopep8-2.1.0-py2.py3-none-any.whl", hash = "sha256:2bb76888c5edbcafe6aabab3c47ba534f5a2c2d245c2eddced4a30c4b4946357", size = 44957, upload-time = "2024-03-17T10:44:22.275Z" }, + { url = "https://files.pythonhosted.org/packages/9e/43/53afb8ba17218f19b77c7834128566c5bbb100a0ad9ba2e8e89d089d7079/autopep8-2.3.2-py2.py3-none-any.whl", hash = "sha256:ce8ad498672c845a0c3de2629c15b635ec2b05ef8177a6e7c91c74f3e9b51128", size = 45807, upload-time = "2025-01-14T14:46:15.466Z" }, ] [[package]] @@ -229,20 +229,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/eb/ae/69e52acdcf381b108b36d989ea58656de4a9ab8863aba6176d80d01041df/backports.cached_property-1.0.2-py3-none-any.whl", hash = "sha256:baeb28e1cd619a3c9ab8941431fe34e8490861fb998c6c4590693d50171db0cc", size = 6090, upload-time = "2022-06-14T08:48:16.734Z" }, ] -[[package]] -name = "bandit" -version = "1.7.8" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, - { name = "rich", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, - { name = "stevedore", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/92/60/3f6e0e58f3f53bbb7227daf61654c9b22ff651e670e44cdc08a0f1d0f493/bandit-1.7.8.tar.gz", hash = "sha256:36de50f720856ab24a24dbaa5fee2c66050ed97c1477e0a1159deab1775eab6b", size = 1983678, upload-time = "2024-03-08T19:25:56.173Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/88/03935559af80b39cb64a00a4731d62ed2f79f4799c1758eadb01a4ef6b8d/bandit-1.7.8-py3-none-any.whl", hash = "sha256:509f7af645bc0cd8fd4587abc1a038fc795636671ee8204d502b933aee44f381", size = 127633, upload-time = "2024-03-08T19:25:54.618Z" }, -] - [[package]] name = "baserow" source = { editable = "." } @@ -338,10 +324,7 @@ dependencies = [ [package.dev-dependencies] dev = [ { name = "argh", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, - { name = "autopep8", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "backports-cached-property", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, - { name = "bandit", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, - { name = "black", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "build", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "coverage", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "debugpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, @@ -350,13 +333,11 @@ dev = [ { name = "django-stubs", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "django-stubs-ext", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "fakeredis", extra = ["lua"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, - { name = "flake8", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "freezegun", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "graphviz", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "httpretty", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "ipdb", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "ipython", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, - { name = "isort", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "mypy", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "mypy-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "openapi-spec-validator", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, @@ -377,6 +358,7 @@ dev = [ { name = "pytest-unordered", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "pytest-xdist", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "responses", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, + { name = "ruff", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "rust-just", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "snoop", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "watchdog", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, @@ -477,10 +459,7 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ { name = "argh", specifier = "==0.31.2" }, - { name = "autopep8", specifier = "==2.1.0" }, { name = "backports-cached-property", specifier = "==1.0.2" }, - { name = "bandit", specifier = "==1.7.8" }, - { name = "black", specifier = "==23.3.0" }, { name = "build" }, { name = "coverage", specifier = "==7.5.1" }, { name = "debugpy", specifier = "==1.8.1" }, @@ -489,13 +468,11 @@ dev = [ { name = "django-stubs", specifier = "==1.16.0" }, { name = "django-stubs-ext", specifier = "==0.8.0" }, { name = "fakeredis", extras = ["lua"], specifier = "==2.23.2" }, - { name = "flake8", specifier = "==7.0.0" }, { name = "freezegun", specifier = "==1.5.0" }, { name = "graphviz", specifier = "==0.20.3" }, { name = "httpretty", specifier = "==1.1.4" }, { name = "ipdb" }, { name = "ipython" }, - { name = "isort", specifier = "==5.13.2" }, { name = "mypy", specifier = "==1.10.0" }, { name = "mypy-extensions", specifier = "==1.0.0" }, { name = "openapi-spec-validator", specifier = "==0.7.2" }, @@ -516,6 +493,7 @@ dev = [ { name = "pytest-unordered", specifier = "==0.6.0" }, { name = "pytest-xdist", specifier = "==3.6.1" }, { name = "responses", specifier = "==0.25.0" }, + { name = "ruff", specifier = ">=0.8.0" }, { name = "rust-just", specifier = ">=1.43.1" }, { name = "snoop", specifier = "==0.4.3" }, { name = "watchdog", specifier = "==4.0.0" }, @@ -540,26 +518,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/87/8bab77b323f16d67be364031220069f79159117dd5e43eeb4be2fef1ac9b/billiard-4.2.4-py3-none-any.whl", hash = "sha256:525b42bdec68d2b983347ac312f892db930858495db601b5836ac24e6477cde5", size = 87070, upload-time = "2025-11-30T13:28:47.016Z" }, ] -[[package]] -name = "black" -version = "23.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, - { name = "mypy-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, - { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, - { name = "pathspec", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, - { name = "platformdirs", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d6/36/66370f5017b100225ec4950a60caeef60201a10080da57ddb24124453fba/black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940", size = 582156, upload-time = "2023-03-29T01:00:54.457Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/6f/d3832960a3b646b333b7f0d80d336a3c123012e9d9d5dba4a622b2b6181d/black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6", size = 1326112, upload-time = "2023-03-29T01:19:05.794Z" }, - { url = "https://files.pythonhosted.org/packages/eb/a5/17b40bfd9b607b69fa726b0b3a473d14b093dcd5191ea1a1dd664eccfee3/black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b", size = 2643808, upload-time = "2023-03-29T01:25:27.825Z" }, - { url = "https://files.pythonhosted.org/packages/69/49/7e1f0cf585b0d607aad3f971f95982cc4208fc77f92363d632d23021ee57/black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d", size = 1503287, upload-time = "2023-03-29T01:28:35.228Z" }, - { url = "https://files.pythonhosted.org/packages/c0/53/42e312c17cfda5c8fc4b6b396a508218807a3fcbb963b318e49d3ddd11d5/black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70", size = 1638625, upload-time = "2023-03-29T01:11:16.193Z" }, - { url = "https://files.pythonhosted.org/packages/ad/e7/4642b7f462381799393fbad894ba4b32db00870a797f0616c197b07129a9/black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4", size = 180965, upload-time = "2023-03-29T01:00:52.253Z" }, -] - [[package]] name = "boto3" version = "1.40.40" @@ -1176,14 +1134,14 @@ wheels = [ [[package]] name = "django-timezone-field" -version = "7.1" +version = "7.2.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ba/5b/0dbe271fef3c2274b83dbcb1b19fa3dacf1f7e542382819294644e78ea8b/django_timezone_field-7.1.tar.gz", hash = "sha256:b3ef409d88a2718b566fabe10ea996f2838bc72b22d3a2900c0aa905c761380c", size = 13727, upload-time = "2025-01-11T17:49:54.486Z" } +sdist = { url = "https://files.pythonhosted.org/packages/da/05/9b93a66452cdb8a08ab26f08d5766d2332673e659a8b2aeb73f2a904d421/django_timezone_field-7.2.1.tar.gz", hash = "sha256:def846f9e7200b7b8f2a28fcce2b78fb2d470f6a9f272b07c4e014f6ba4c6d2e", size = 13096, upload-time = "2025-12-06T23:50:44.591Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/09/7a808392a751a24ffa62bec00e3085a9c1a151d728c323a5bab229ea0e58/django_timezone_field-7.1-py3-none-any.whl", hash = "sha256:93914713ed882f5bccda080eda388f7006349f25930b6122e9b07bf8db49c4b4", size = 13177, upload-time = "2025-01-11T17:49:52.142Z" }, + { url = "https://files.pythonhosted.org/packages/41/7f/d885667401515b467f84569c56075bc9add72c9fd425fca51a25f4c997e1/django_timezone_field-7.2.1-py3-none-any.whl", hash = "sha256:276915b72c5816f57c3baf9e43f816c695ef940d1b21f91ebf6203c09bf4ad44", size = 13284, upload-time = "2025-12-06T23:50:43.302Z" }, ] [[package]] @@ -1313,20 +1271,6 @@ lua = [ { name = "lupa", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, ] -[[package]] -name = "flake8" -version = "7.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mccabe", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, - { name = "pycodestyle", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, - { name = "pyflakes", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/40/3c/3464b567aa367b221fa610bbbcce8015bf953977d21e52f2d711b526fb48/flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132", size = 48219, upload-time = "2024-01-05T00:41:52.142Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/01/cc8cdec7b61db0315c2ab62d80677a138ef06832ec17f04d87e6ef858f7f/flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3", size = 57570, upload-time = "2024-01-05T00:41:49.837Z" }, -] - [[package]] name = "flower" version = "2.0.1" @@ -1409,7 +1353,7 @@ wheels = [ [[package]] name = "google-cloud-storage" -version = "3.6.0" +version = "3.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, @@ -1419,9 +1363,9 @@ dependencies = [ { name = "google-resumable-media", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f4/cd/7e112cf025b2b591067b599e4bfe965df0c12b0cc0afdb5556469bff126d/google_cloud_storage-3.6.0.tar.gz", hash = "sha256:29cc6b9a6c0fc9cdad071e375d540a5a50fbc9a7fad8300fa02fb904f6fe2ca2", size = 17251072, upload-time = "2025-11-17T10:18:29.81Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d2/8e/fab2de1a0ab7fdbd452eaae5a9a5c933d0911c26b04efa0c76ddfd921259/google_cloud_storage-3.7.0.tar.gz", hash = "sha256:9ce59c65f4d6e372effcecc0456680a8d73cef4f2dc9212a0704799cb3d69237", size = 17258914, upload-time = "2025-12-09T18:24:48.97Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ae/ef/3b57bf617ee0c79450c1ff211d1eb888db8fc1050ac74b3e52cc6ed86e63/google_cloud_storage-3.6.0-py3-none-any.whl", hash = "sha256:5decbdddd63b7d1fc3e266a393ad6453d2e27d172bd982b1e2f15481668db097", size = 299039, upload-time = "2025-11-17T10:18:27.66Z" }, + { url = "https://files.pythonhosted.org/packages/2d/80/6e5c7c83cea15ed4dfc4843b9df9db0716bc551ac938f7b5dd18a72bd5e4/google_cloud_storage-3.7.0-py3-none-any.whl", hash = "sha256:469bc9540936e02f8a4bfd1619e9dca1e42dec48f95e4204d783b36476a15093", size = 303364, upload-time = "2025-12-09T18:24:47.343Z" }, ] [[package]] @@ -1482,20 +1426,17 @@ wheels = [ [[package]] name = "greenlet" -version = "3.2.4" +version = "3.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/03/b8/704d753a5a45507a7aab61f18db9509302ed3d0a27ac7e0359ec2905b1a6/greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d", size = 188260, upload-time = "2025-08-07T13:24:33.51Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/e5/40dbda2736893e3e53d25838e0f19a2b417dfc122b9989c91918db30b5d3/greenlet-3.3.0.tar.gz", hash = "sha256:a82bb225a4e9e4d653dd2fb7b8b2d36e4fb25bc0165422a11e48b88e9e6f78fb", size = 190651, upload-time = "2025-12-04T14:49:44.05Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305, upload-time = "2025-08-07T13:15:41.288Z" }, - { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472, upload-time = "2025-08-07T13:42:55.044Z" }, - { url = "https://files.pythonhosted.org/packages/ae/8f/95d48d7e3d433e6dae5b1682e4292242a53f22df82e6d3dda81b1701a960/greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3", size = 644646, upload-time = "2025-08-07T13:45:26.523Z" }, - { url = "https://files.pythonhosted.org/packages/d5/5e/405965351aef8c76b8ef7ad370e5da58d57ef6068df197548b015464001a/greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633", size = 640519, upload-time = "2025-08-07T13:53:13.928Z" }, - { url = "https://files.pythonhosted.org/packages/25/5d/382753b52006ce0218297ec1b628e048c4e64b155379331f25a7316eb749/greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079", size = 639707, upload-time = "2025-08-07T13:18:27.146Z" }, - { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" }, - { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" }, - { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073, upload-time = "2025-08-07T13:18:21.737Z" }, - { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c", size = 1548385, upload-time = "2025-11-04T12:42:11.067Z" }, - { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5", size = 1613329, upload-time = "2025-11-04T12:42:12.928Z" }, + { url = "https://files.pythonhosted.org/packages/1f/cb/48e964c452ca2b92175a9b2dca037a553036cb053ba69e284650ce755f13/greenlet-3.3.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e29f3018580e8412d6aaf5641bb7745d38c85228dacf51a73bd4e26ddf2a6a8e", size = 274908, upload-time = "2025-12-04T14:23:26.435Z" }, + { url = "https://files.pythonhosted.org/packages/28/da/38d7bff4d0277b594ec557f479d65272a893f1f2a716cad91efeb8680953/greenlet-3.3.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a687205fb22794e838f947e2194c0566d3812966b41c78709554aa883183fb62", size = 577113, upload-time = "2025-12-04T14:50:05.493Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f2/89c5eb0faddc3ff014f1c04467d67dee0d1d334ab81fadbf3744847f8a8a/greenlet-3.3.0-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4243050a88ba61842186cb9e63c7dfa677ec146160b0efd73b855a3d9c7fcf32", size = 590338, upload-time = "2025-12-04T14:57:41.136Z" }, + { url = "https://files.pythonhosted.org/packages/80/d7/db0a5085035d05134f8c089643da2b44cc9b80647c39e93129c5ef170d8f/greenlet-3.3.0-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:670d0f94cd302d81796e37299bcd04b95d62403883b24225c6b5271466612f45", size = 601098, upload-time = "2025-12-04T15:07:11.898Z" }, + { url = "https://files.pythonhosted.org/packages/dc/a6/e959a127b630a58e23529972dbc868c107f9d583b5a9f878fb858c46bc1a/greenlet-3.3.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cb3a8ec3db4a3b0eb8a3c25436c2d49e3505821802074969db017b87bc6a948", size = 590206, upload-time = "2025-12-04T14:26:01.254Z" }, + { url = "https://files.pythonhosted.org/packages/48/60/29035719feb91798693023608447283b266b12efc576ed013dd9442364bb/greenlet-3.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2de5a0b09eab81fc6a382791b995b1ccf2b172a9fec934747a7a23d2ff291794", size = 1550668, upload-time = "2025-12-04T15:04:22.439Z" }, + { url = "https://files.pythonhosted.org/packages/0a/5f/783a23754b691bfa86bd72c3033aa107490deac9b2ef190837b860996c9f/greenlet-3.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4449a736606bd30f27f8e1ff4678ee193bc47f6ca810d705981cfffd6ce0d8c5", size = 1615483, upload-time = "2025-12-04T14:27:28.083Z" }, ] [[package]] @@ -1726,15 +1667,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320, upload-time = "2024-10-08T23:04:09.501Z" }, ] -[[package]] -name = "isort" -version = "5.13.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/87/f9/c1eb8635a24e87ade2efce21e3ce8cd6b8630bb685ddc9cdaca1349b2eb5/isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109", size = 175303, upload-time = "2023-12-13T20:37:26.124Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/b3/8def84f539e7d2289a02f0524b944b15d7c75dab7628bedf1c4f0992029c/isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6", size = 92310, upload-time = "2023-12-13T20:37:23.244Z" }, -] - [[package]] name = "itsdangerous" version = "2.2.0" @@ -1967,7 +1899,7 @@ wheels = [ [[package]] name = "langsmith" -version = "0.4.53" +version = "0.4.56" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, @@ -1979,9 +1911,9 @@ dependencies = [ { name = "uuid-utils", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "zstandard", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/49/1c/8c4fbb995d176594d79e7347ca5e3cf1a839d300bee2b6b38861fbf57809/langsmith-0.4.53.tar.gz", hash = "sha256:362255850ac80abf6edc9e9b3455c42aa248e12686a24c637d4c56fc41139ffe", size = 990765, upload-time = "2025-12-03T01:00:43.505Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4b/e0/6d8a07b25a3ac308156707edaeffebbc30b2737bba8a75e65c40908beb94/langsmith-0.4.56.tar.gz", hash = "sha256:c3dc53509972689dbbc24f9ac92a095dcce00f76bb0db03ae385815945572540", size = 991755, upload-time = "2025-12-06T00:15:52.893Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/48/37cc533e2d16e4ec1d01f30b41933c9319af18389ea0f6866835ace7d331/langsmith-0.4.53-py3-none-any.whl", hash = "sha256:62e0b69d0f3b25afbd63dc5743a3bcec52993fe6c4e43e5b9e5311606aa04e9e", size = 411526, upload-time = "2025-12-03T01:00:42.053Z" }, + { url = "https://files.pythonhosted.org/packages/b8/6f/d5f9c4f1e03c91045d3675dc99df0682bc657952ad158c92c1f423de04f4/langsmith-0.4.56-py3-none-any.whl", hash = "sha256:f2c61d3f10210e78f16f77e3115f407d40f562ab00ac8c76927c7dd55b5c17b2", size = 411849, upload-time = "2025-12-06T00:15:50.828Z" }, ] [[package]] @@ -2064,15 +1996,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/af/33/ee4519fa02ed11a94aef9559552f3b17bb863f2ecfe1a35dc7f548cde231/matplotlib_inline-0.2.1-py3-none-any.whl", hash = "sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76", size = 9516, upload-time = "2025-10-23T09:00:20.675Z" }, ] -[[package]] -name = "mccabe" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/ff/0ffefdcac38932a54d2b5eed4e0ba8a408f215002cd178ad1df0f2806ff8/mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", size = 9658, upload-time = "2022-01-24T01:14:51.113Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350, upload-time = "2022-01-24T01:14:49.62Z" }, -] - [[package]] name = "mcp" version = "1.9.4" @@ -2594,22 +2517,22 @@ wheels = [ [[package]] name = "orjson" -version = "3.11.4" +version = "3.11.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c6/fe/ed708782d6709cc60eb4c2d8a361a440661f74134675c72990f2c48c785f/orjson-3.11.4.tar.gz", hash = "sha256:39485f4ab4c9b30a3943cfe99e1a213c4776fb69e8abd68f66b83d5a0b0fdc6d", size = 5945188, upload-time = "2025-10-24T15:50:38.027Z" } +sdist = { url = "https://files.pythonhosted.org/packages/04/b8/333fdb27840f3bf04022d21b654a35f58e15407183aeb16f3b41aa053446/orjson-3.11.5.tar.gz", hash = "sha256:82393ab47b4fe44ffd0a7659fa9cfaacc717eb617c93cde83795f14af5c2e9d5", size = 5972347, upload-time = "2025-12-06T15:55:39.458Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/63/1d/1ea6005fffb56715fd48f632611e163d1604e8316a5bad2288bee9a1c9eb/orjson-3.11.4-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5e59d23cd93ada23ec59a96f215139753fbfe3a4d989549bcb390f8c00370b39", size = 243498, upload-time = "2025-10-24T15:48:48.101Z" }, - { url = "https://files.pythonhosted.org/packages/37/d7/ffed10c7da677f2a9da307d491b9eb1d0125b0307019c4ad3d665fd31f4f/orjson-3.11.4-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:5c3aedecfc1beb988c27c79d52ebefab93b6c3921dbec361167e6559aba2d36d", size = 128961, upload-time = "2025-10-24T15:48:49.571Z" }, - { url = "https://files.pythonhosted.org/packages/a2/96/3e4d10a18866d1368f73c8c44b7fe37cc8a15c32f2a7620be3877d4c55a3/orjson-3.11.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da9e5301f1c2caa2a9a4a303480d79c9ad73560b2e7761de742ab39fe59d9175", size = 130321, upload-time = "2025-10-24T15:48:50.713Z" }, - { url = "https://files.pythonhosted.org/packages/eb/1f/465f66e93f434f968dd74d5b623eb62c657bdba2332f5a8be9f118bb74c7/orjson-3.11.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8873812c164a90a79f65368f8f96817e59e35d0cc02786a5356f0e2abed78040", size = 129207, upload-time = "2025-10-24T15:48:52.193Z" }, - { url = "https://files.pythonhosted.org/packages/28/43/d1e94837543321c119dff277ae8e348562fe8c0fafbb648ef7cb0c67e521/orjson-3.11.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5d7feb0741ebb15204e748f26c9638e6665a5fa93c37a2c73d64f1669b0ddc63", size = 136323, upload-time = "2025-10-24T15:48:54.806Z" }, - { url = "https://files.pythonhosted.org/packages/bf/04/93303776c8890e422a5847dd012b4853cdd88206b8bbd3edc292c90102d1/orjson-3.11.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01ee5487fefee21e6910da4c2ee9eef005bee568a0879834df86f888d2ffbdd9", size = 137440, upload-time = "2025-10-24T15:48:56.326Z" }, - { url = "https://files.pythonhosted.org/packages/1e/ef/75519d039e5ae6b0f34d0336854d55544ba903e21bf56c83adc51cd8bf82/orjson-3.11.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d40d46f348c0321df01507f92b95a377240c4ec31985225a6668f10e2676f9a", size = 136680, upload-time = "2025-10-24T15:48:57.476Z" }, - { url = "https://files.pythonhosted.org/packages/b5/18/bf8581eaae0b941b44efe14fee7b7862c3382fbc9a0842132cfc7cf5ecf4/orjson-3.11.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95713e5fc8af84d8edc75b785d2386f653b63d62b16d681687746734b4dfc0be", size = 136160, upload-time = "2025-10-24T15:48:59.631Z" }, - { url = "https://files.pythonhosted.org/packages/c4/35/a6d582766d351f87fc0a22ad740a641b0a8e6fc47515e8614d2e4790ae10/orjson-3.11.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad73ede24f9083614d6c4ca9a85fe70e33be7bf047ec586ee2363bc7418fe4d7", size = 140318, upload-time = "2025-10-24T15:49:00.834Z" }, - { url = "https://files.pythonhosted.org/packages/76/b3/5a4801803ab2e2e2d703bce1a56540d9f99a9143fbec7bf63d225044fef8/orjson-3.11.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:842289889de515421f3f224ef9c1f1efb199a32d76d8d2ca2706fa8afe749549", size = 406330, upload-time = "2025-10-24T15:49:02.327Z" }, - { url = "https://files.pythonhosted.org/packages/80/55/a8f682f64833e3a649f620eafefee175cbfeb9854fc5b710b90c3bca45df/orjson-3.11.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3b2427ed5791619851c52a1261b45c233930977e7de8cf36de05636c708fa905", size = 149580, upload-time = "2025-10-24T15:49:03.517Z" }, - { url = "https://files.pythonhosted.org/packages/ad/e4/c132fa0c67afbb3eb88274fa98df9ac1f631a675e7877037c611805a4413/orjson-3.11.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c36e524af1d29982e9b190573677ea02781456b2e537d5840e4538a5ec41907", size = 139846, upload-time = "2025-10-24T15:49:04.761Z" }, + { url = "https://files.pythonhosted.org/packages/fd/68/6b3659daec3a81aed5ab47700adb1a577c76a5452d35b91c88efee89987f/orjson-3.11.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9c8494625ad60a923af6b2b0bd74107146efe9b55099e20d7740d995f338fcd8", size = 245318, upload-time = "2025-12-06T15:54:02.355Z" }, + { url = "https://files.pythonhosted.org/packages/e9/00/92db122261425f61803ccf0830699ea5567439d966cbc35856fe711bfe6b/orjson-3.11.5-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:7bb2ce0b82bc9fd1168a513ddae7a857994b780b2945a8c51db4ab1c4b751ebc", size = 129491, upload-time = "2025-12-06T15:54:03.877Z" }, + { url = "https://files.pythonhosted.org/packages/94/4f/ffdcb18356518809d944e1e1f77589845c278a1ebbb5a8297dfefcc4b4cb/orjson-3.11.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67394d3becd50b954c4ecd24ac90b5051ee7c903d167459f93e77fc6f5b4c968", size = 132167, upload-time = "2025-12-06T15:54:04.944Z" }, + { url = "https://files.pythonhosted.org/packages/97/c6/0a8caff96f4503f4f7dd44e40e90f4d14acf80d3b7a97cb88747bb712d3e/orjson-3.11.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:298d2451f375e5f17b897794bcc3e7b821c0f32b4788b9bcae47ada24d7f3cf7", size = 130516, upload-time = "2025-12-06T15:54:06.274Z" }, + { url = "https://files.pythonhosted.org/packages/4d/63/43d4dc9bd9954bff7052f700fdb501067f6fb134a003ddcea2a0bb3854ed/orjson-3.11.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa5e4244063db8e1d87e0f54c3f7522f14b2dc937e65d5241ef0076a096409fd", size = 135695, upload-time = "2025-12-06T15:54:07.702Z" }, + { url = "https://files.pythonhosted.org/packages/87/6f/27e2e76d110919cb7fcb72b26166ee676480a701bcf8fc53ac5d0edce32f/orjson-3.11.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1db2088b490761976c1b2e956d5d4e6409f3732e9d79cfa69f876c5248d1baf9", size = 139664, upload-time = "2025-12-06T15:54:08.828Z" }, + { url = "https://files.pythonhosted.org/packages/d4/f8/5966153a5f1be49b5fbb8ca619a529fde7bc71aa0a376f2bb83fed248bcd/orjson-3.11.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2ed66358f32c24e10ceea518e16eb3549e34f33a9d51f99ce23b0251776a1ef", size = 137289, upload-time = "2025-12-06T15:54:09.898Z" }, + { url = "https://files.pythonhosted.org/packages/a7/34/8acb12ff0299385c8bbcbb19fbe40030f23f15a6de57a9c587ebf71483fb/orjson-3.11.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2021afda46c1ed64d74b555065dbd4c2558d510d8cec5ea6a53001b3e5e82a9", size = 138784, upload-time = "2025-12-06T15:54:11.022Z" }, + { url = "https://files.pythonhosted.org/packages/ee/27/910421ea6e34a527f73d8f4ee7bdffa48357ff79c7b8d6eb6f7b82dd1176/orjson-3.11.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b42ffbed9128e547a1647a3e50bc88ab28ae9daa61713962e0d3dd35e820c125", size = 141322, upload-time = "2025-12-06T15:54:12.427Z" }, + { url = "https://files.pythonhosted.org/packages/87/a3/4b703edd1a05555d4bb1753d6ce44e1a05b7a6d7c164d5b332c795c63d70/orjson-3.11.5-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8d5f16195bb671a5dd3d1dbea758918bada8f6cc27de72bd64adfbd748770814", size = 413612, upload-time = "2025-12-06T15:54:13.858Z" }, + { url = "https://files.pythonhosted.org/packages/1b/36/034177f11d7eeea16d3d2c42a1883b0373978e08bc9dad387f5074c786d8/orjson-3.11.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c0e5d9f7a0227df2927d343a6e3859bebf9208b427c79bd31949abcc2fa32fa5", size = 150993, upload-time = "2025-12-06T15:54:15.189Z" }, + { url = "https://files.pythonhosted.org/packages/44/2f/ea8b24ee046a50a7d141c0227c4496b1180b215e728e3b640684f0ea448d/orjson-3.11.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:23d04c4543e78f724c4dfe656b3791b5f98e4c9253e13b2636f1af5d90e4a880", size = 141774, upload-time = "2025-12-06T15:54:16.451Z" }, ] [[package]] @@ -2639,15 +2562,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7d/eb/b6260b31b1a96386c0a880edebe26f89669098acea8e0318bff6adb378fd/pathable-0.4.4-py3-none-any.whl", hash = "sha256:5ae9e94793b6ef5a4cbe0a7ce9dbbefc1eec38df253763fd0aeeacf2762dbbc2", size = 9592, upload-time = "2025-01-10T18:43:11.88Z" }, ] -[[package]] -name = "pathspec" -version = "0.12.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, -] - [[package]] name = "pexpect" version = "4.9.0" @@ -2688,15 +2602,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c8/57/2899b82394a35a0fbfd352e290945440e3b3785655a03365c0ca8279f351/pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696", size = 4600802, upload-time = "2024-07-01T09:46:08.145Z" }, ] -[[package]] -name = "platformdirs" -version = "4.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" }, -] - [[package]] name = "pluggy" version = "1.6.0" @@ -2779,15 +2684,15 @@ wheels = [ [[package]] name = "protobuf" -version = "6.33.1" +version = "6.33.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/03/a1440979a3f74f16cab3b75b0da1a1a7f922d56a8ddea96092391998edc0/protobuf-6.33.1.tar.gz", hash = "sha256:97f65757e8d09870de6fd973aeddb92f85435607235d20b2dfed93405d00c85b", size = 443432, upload-time = "2025-11-13T16:44:18.895Z" } +sdist = { url = "https://files.pythonhosted.org/packages/34/44/e49ecff446afeec9d1a66d6bbf9adc21e3c7cea7803a920ca3773379d4f6/protobuf-6.33.2.tar.gz", hash = "sha256:56dc370c91fbb8ac85bc13582c9e373569668a290aa2e66a590c2a0d35ddb9e4", size = 444296, upload-time = "2025-12-06T00:17:53.311Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cd/93/26213ff72b103ae55bb0d73e7fb91ea570ef407c3ab4fd2f1f27cac16044/protobuf-6.33.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:fe34575f2bdde76ac429ec7b570235bf0c788883e70aee90068e9981806f2490", size = 427522, upload-time = "2025-11-13T16:44:10.475Z" }, - { url = "https://files.pythonhosted.org/packages/c2/32/df4a35247923393aa6b887c3b3244a8c941c32a25681775f96e2b418f90e/protobuf-6.33.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:f8adba2e44cde2d7618996b3fc02341f03f5bc3f2748be72dc7b063319276178", size = 324445, upload-time = "2025-11-13T16:44:11.869Z" }, - { url = "https://files.pythonhosted.org/packages/8e/d0/d796e419e2ec93d2f3fa44888861c3f88f722cde02b7c3488fcc6a166820/protobuf-6.33.1-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:0f4cf01222c0d959c2b399142deb526de420be8236f22c71356e2a544e153c53", size = 339161, upload-time = "2025-11-13T16:44:12.778Z" }, - { url = "https://files.pythonhosted.org/packages/1d/2a/3c5f05a4af06649547027d288747f68525755de692a26a7720dced3652c0/protobuf-6.33.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:8fd7d5e0eb08cd5b87fd3df49bc193f5cfd778701f47e11d127d0afc6c39f1d1", size = 323171, upload-time = "2025-11-13T16:44:14.035Z" }, - { url = "https://files.pythonhosted.org/packages/08/b4/46310463b4f6ceef310f8348786f3cff181cea671578e3d9743ba61a459e/protobuf-6.33.1-py3-none-any.whl", hash = "sha256:d595a9fd694fdeb061a62fbe10eb039cc1e444df81ec9bb70c7fc59ebcb1eafa", size = 170477, upload-time = "2025-11-13T16:44:17.633Z" }, + { url = "https://files.pythonhosted.org/packages/b2/ca/7e485da88ba45c920fb3f50ae78de29ab925d9e54ef0de678306abfbb497/protobuf-6.33.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d9b19771ca75935b3a4422957bc518b0cecb978b31d1dd12037b088f6bcc0e43", size = 427621, upload-time = "2025-12-06T00:17:44.445Z" }, + { url = "https://files.pythonhosted.org/packages/7d/4f/f743761e41d3b2b2566748eb76bbff2b43e14d5fcab694f494a16458b05f/protobuf-6.33.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5d3b5625192214066d99b2b605f5783483575656784de223f00a8d00754fc0e", size = 324460, upload-time = "2025-12-06T00:17:45.678Z" }, + { url = "https://files.pythonhosted.org/packages/b1/fa/26468d00a92824020f6f2090d827078c09c9c587e34cbfd2d0c7911221f8/protobuf-6.33.2-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8cd7640aee0b7828b6d03ae518b5b4806fdfc1afe8de82f79c3454f8aef29872", size = 339168, upload-time = "2025-12-06T00:17:46.813Z" }, + { url = "https://files.pythonhosted.org/packages/56/13/333b8f421738f149d4fe5e49553bc2a2ab75235486259f689b4b91f96cec/protobuf-6.33.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:1f8017c48c07ec5859106533b682260ba3d7c5567b1ca1f24297ce03384d1b4f", size = 323270, upload-time = "2025-12-06T00:17:48.253Z" }, + { url = "https://files.pythonhosted.org/packages/0e/15/4f02896cc3df04fc465010a4c6a0cd89810f54617a32a70ef531ed75d61c/protobuf-6.33.2-py3-none-any.whl", hash = "sha256:7636aad9bb01768870266de5dc009de2d1b936771b38a793f73cbbf279c91c5c", size = 170501, upload-time = "2025-12-06T00:17:52.211Z" }, ] [[package]] @@ -2867,11 +2772,11 @@ wheels = [ [[package]] name = "pycodestyle" -version = "2.11.1" +version = "2.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/34/8f/fa09ae2acc737b9507b5734a9aec9a2b35fa73409982f57db1b42f8c3c65/pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f", size = 38974, upload-time = "2023-10-12T23:39:39.762Z" } +sdist = { url = "https://files.pythonhosted.org/packages/11/e0/abfd2a0d2efe47670df87f3e3a0e2edda42f055053c85361f19c0e2c1ca8/pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783", size = 39472, upload-time = "2025-06-20T18:49:48.75Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/90/a998c550d0ddd07e38605bb5c455d00fcc177a800ff9cc3dafdcb3dd7b56/pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67", size = 31132, upload-time = "2023-10-12T23:39:38.242Z" }, + { url = "https://files.pythonhosted.org/packages/d7/27/a58ddaf8c588a3ef080db9d0b7e0b97215cee3a45df74f3a94dbbf5c893a/pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d", size = 31594, upload-time = "2025-06-20T18:49:47.491Z" }, ] [[package]] @@ -2954,15 +2859,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/18/e2ccdcf7dcde91e9cc6e0608cdb3177eed6d7b38096aae6d31da63086a25/pyfakefs-5.4.1-py3-none-any.whl", hash = "sha256:21d6a3276d9c964510c85cef0c568920d53ec9033da9b2a2c616489cedbe700a", size = 216311, upload-time = "2024-04-11T18:13:44.549Z" }, ] -[[package]] -name = "pyflakes" -version = "3.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/57/f9/669d8c9c86613c9d568757c7f5824bd3197d7b1c6c27553bc5618a27cce2/pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f", size = 63788, upload-time = "2024-01-05T00:28:47.703Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/d7/f1b7db88d8e4417c5d47adad627a93547f44bdc9028372dbd2313f34a855/pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a", size = 62725, upload-time = "2024-01-05T00:28:45.903Z" }, -] - [[package]] name = "pygments" version = "2.19.2" @@ -3484,24 +3380,47 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, ] +[[package]] +name = "ruff" +version = "0.14.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/d9/f7a0c4b3a2bf2556cd5d99b05372c29980249ef71e8e32669ba77428c82c/ruff-0.14.8.tar.gz", hash = "sha256:774ed0dd87d6ce925e3b8496feb3a00ac564bea52b9feb551ecd17e0a23d1eed", size = 5765385, upload-time = "2025-12-04T15:06:17.669Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/b8/9537b52010134b1d2b72870cc3f92d5fb759394094741b09ceccae183fbe/ruff-0.14.8-py3-none-linux_armv6l.whl", hash = "sha256:ec071e9c82eca417f6111fd39f7043acb53cd3fde9b1f95bbed745962e345afb", size = 13441540, upload-time = "2025-12-04T15:06:14.896Z" }, + { url = "https://files.pythonhosted.org/packages/24/00/99031684efb025829713682012b6dd37279b1f695ed1b01725f85fd94b38/ruff-0.14.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8cdb162a7159f4ca36ce980a18c43d8f036966e7f73f866ac8f493b75e0c27e9", size = 13669384, upload-time = "2025-12-04T15:06:51.809Z" }, + { url = "https://files.pythonhosted.org/packages/72/64/3eb5949169fc19c50c04f28ece2c189d3b6edd57e5b533649dae6ca484fe/ruff-0.14.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e2fcbefe91f9fad0916850edf0854530c15bd1926b6b779de47e9ab619ea38f", size = 12806917, upload-time = "2025-12-04T15:06:08.925Z" }, + { url = "https://files.pythonhosted.org/packages/c4/08/5250babb0b1b11910f470370ec0cbc67470231f7cdc033cee57d4976f941/ruff-0.14.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d70721066a296f45786ec31916dc287b44040f553da21564de0ab4d45a869b", size = 13256112, upload-time = "2025-12-04T15:06:23.498Z" }, + { url = "https://files.pythonhosted.org/packages/78/4c/6c588e97a8e8c2d4b522c31a579e1df2b4d003eddfbe23d1f262b1a431ff/ruff-0.14.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2c87e09b3cd9d126fc67a9ecd3b5b1d3ded2b9c7fce3f16e315346b9d05cfb52", size = 13227559, upload-time = "2025-12-04T15:06:33.432Z" }, + { url = "https://files.pythonhosted.org/packages/23/ce/5f78cea13eda8eceac71b5f6fa6e9223df9b87bb2c1891c166d1f0dce9f1/ruff-0.14.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d62cb310c4fbcb9ee4ac023fe17f984ae1e12b8a4a02e3d21489f9a2a5f730c", size = 13896379, upload-time = "2025-12-04T15:06:02.687Z" }, + { url = "https://files.pythonhosted.org/packages/cf/79/13de4517c4dadce9218a20035b21212a4c180e009507731f0d3b3f5df85a/ruff-0.14.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1af35c2d62633d4da0521178e8a2641c636d2a7153da0bac1b30cfd4ccd91344", size = 15372786, upload-time = "2025-12-04T15:06:29.828Z" }, + { url = "https://files.pythonhosted.org/packages/00/06/33df72b3bb42be8a1c3815fd4fae83fa2945fc725a25d87ba3e42d1cc108/ruff-0.14.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25add4575ffecc53d60eed3f24b1e934493631b48ebbc6ebaf9d8517924aca4b", size = 14990029, upload-time = "2025-12-04T15:06:36.812Z" }, + { url = "https://files.pythonhosted.org/packages/64/61/0f34927bd90925880394de0e081ce1afab66d7b3525336f5771dcf0cb46c/ruff-0.14.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c943d847b7f02f7db4201a0600ea7d244d8a404fbb639b439e987edcf2baf9a", size = 14407037, upload-time = "2025-12-04T15:06:39.979Z" }, + { url = "https://files.pythonhosted.org/packages/96/bc/058fe0aefc0fbf0d19614cb6d1a3e2c048f7dc77ca64957f33b12cfdc5ef/ruff-0.14.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb6e8bf7b4f627548daa1b69283dac5a296bfe9ce856703b03130732e20ddfe2", size = 14102390, upload-time = "2025-12-04T15:06:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/af/a4/e4f77b02b804546f4c17e8b37a524c27012dd6ff05855d2243b49a7d3cb9/ruff-0.14.8-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:7aaf2974f378e6b01d1e257c6948207aec6a9b5ba53fab23d0182efb887a0e4a", size = 14230793, upload-time = "2025-12-04T15:06:20.497Z" }, + { url = "https://files.pythonhosted.org/packages/3f/52/bb8c02373f79552e8d087cedaffad76b8892033d2876c2498a2582f09dcf/ruff-0.14.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e5758ca513c43ad8a4ef13f0f081f80f08008f410790f3611a21a92421ab045b", size = 13160039, upload-time = "2025-12-04T15:06:49.06Z" }, + { url = "https://files.pythonhosted.org/packages/1f/ad/b69d6962e477842e25c0b11622548df746290cc6d76f9e0f4ed7456c2c31/ruff-0.14.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f74f7ba163b6e85a8d81a590363bf71618847e5078d90827749bfda1d88c9cdf", size = 13205158, upload-time = "2025-12-04T15:06:54.574Z" }, + { url = "https://files.pythonhosted.org/packages/06/63/54f23da1315c0b3dfc1bc03fbc34e10378918a20c0b0f086418734e57e74/ruff-0.14.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:eed28f6fafcc9591994c42254f5a5c5ca40e69a30721d2ab18bb0bb3baac3ab6", size = 13469550, upload-time = "2025-12-04T15:05:59.209Z" }, + { url = "https://files.pythonhosted.org/packages/70/7d/a4d7b1961e4903bc37fffb7ddcfaa7beb250f67d97cfd1ee1d5cddb1ec90/ruff-0.14.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:21d48fa744c9d1cb8d71eb0a740c4dd02751a5de9db9a730a8ef75ca34cf138e", size = 14211332, upload-time = "2025-12-04T15:06:06.027Z" }, +] + [[package]] name = "rust-just" -version = "1.43.1" +version = "1.44.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/83/60/f3c0d2c557f89d607fef543e64f7088056484cc0ea0b4e9681b217da72e8/rust_just-1.43.1.tar.gz", hash = "sha256:dc7ab2efd8259cdcd80eff1514f7e859772bfafb21842effff870dbed2107663", size = 1428118, upload-time = "2025-11-19T07:49:20.097Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/bf/f57bd02651a25822d75a86ba29ba5f392caf4f9087c6bfe90d568cc3bb9c/rust_just-1.44.0.tar.gz", hash = "sha256:acf638cadd45b71fb31d74ca71541addb033b3f1996ebc8e03bb52d505c30693", size = 1430207, upload-time = "2025-12-07T02:12:19.026Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/24/c5b87694c7d33848f820f016c3dd5aed6150d946d60a6e606a0e5165a084/rust_just-1.43.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6fe8204a2c230abd2b3b946b3246e1f9ed80ee40de16473ede5ebdf0ded9454a", size = 1678997, upload-time = "2025-11-19T07:48:42.187Z" }, - { url = "https://files.pythonhosted.org/packages/23/b2/45c6925f7851c65436d5b8a75bc378921624a550d4ce296a1723a1b3f369/rust_just-1.43.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:04f8bd5fabcdebe8bf03d93b12352ab54ae3d831d317240be5cca7272173151d", size = 1561329, upload-time = "2025-11-19T07:48:45.23Z" }, - { url = "https://files.pythonhosted.org/packages/9f/a8/11f1418448d579c2e5fb99d7f26b15120292bd5032a3f091da8757e09cb6/rust_just-1.43.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92781d23ce0e5d5815a0e43af97e44ce52d0ca2817c3e0bf4824795bc8db7516", size = 1645638, upload-time = "2025-11-19T07:48:47.854Z" }, - { url = "https://files.pythonhosted.org/packages/86/fd/5b3098d3dd442f21a25b51c9a94e82d2602a62f1d3f09c2587e5f2583f3c/rust_just-1.43.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cde7c8cc0b77bd7c41c24587532b7dad1a44caa396aa2766abbcf5a424a1ac5c", size = 1616029, upload-time = "2025-11-19T07:48:50.622Z" }, - { url = "https://files.pythonhosted.org/packages/8a/c9/7dfec9cc246d9c94f14bdbd477650c5dbe5b9a71a2891e0c6c70b1e97c04/rust_just-1.43.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5019283f827947b5ad8cf9947553595a1810474c56a4d15e9987d33bb05ae4c3", size = 1787480, upload-time = "2025-11-19T07:48:53.118Z" }, - { url = "https://files.pythonhosted.org/packages/e2/a5/96167b70fccc7d345309edb3971a40131dd2aca130c107a849572704b742/rust_just-1.43.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c703f8383f6fc535a164f2cb3122ffa6e9c6c5d4b89c355f032e15623b344205", size = 1863173, upload-time = "2025-11-19T07:48:55.715Z" }, - { url = "https://files.pythonhosted.org/packages/eb/5e/05f537cba29a595748273f30cf3dc8757f633f25dd45464a6caca458c2f2/rust_just-1.43.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:478c90550ac0a287ac76e05381f57b41eedbe53956b227f176a76e5074e5aceb", size = 1855852, upload-time = "2025-11-19T07:48:58.483Z" }, - { url = "https://files.pythonhosted.org/packages/33/cc/77639b3a75edd92ea48ac1626c8b45d40ba0f2f498aad319747ad4a71c46/rust_just-1.43.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31e0e695d215382bbc23f06e7ee0df138712bcce238dc027543e26173a23df31", size = 1771522, upload-time = "2025-11-19T07:49:01.28Z" }, - { url = "https://files.pythonhosted.org/packages/c6/72/a3778b9b3ee34e11c3a83df1e8ce862262158a6219efe514458cf9f1342e/rust_just-1.43.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0c75cd479dc624cee83db8765a453f4ff174c71d71a90c723ba076d998d9834e", size = 1659934, upload-time = "2025-11-19T07:49:03.925Z" }, - { url = "https://files.pythonhosted.org/packages/a2/1c/6fec8e79007620137d33c5dcda463713a73338b02c5cfe15cc5e9d8a47b2/rust_just-1.43.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f9665c00a3bb852aed1f3966ccfe50799c1db9b17841566db7fc1f99e5bbf4cd", size = 1636577, upload-time = "2025-11-19T07:49:06.706Z" }, - { url = "https://files.pythonhosted.org/packages/1c/5b/f40f6baa92014a039e75df14512505cebbeaf9df79c7305e4e8d2d21b26f/rust_just-1.43.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a1dbd5b1634d40ccdfe493edd2d620a6bffbe85e410ddd2acdd929789bcde1fe", size = 1781292, upload-time = "2025-11-19T07:49:09.218Z" }, - { url = "https://files.pythonhosted.org/packages/a3/ab/7374bf4300b00fac5934e34c483a08ff2fdf7e99ce1146140e2e2e64ff05/rust_just-1.43.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:89ba22d438cad2356f80f45c670d794537ef59631c09dfd4938bf5cc3440975b", size = 1838422, upload-time = "2025-11-19T07:49:12.048Z" }, + { url = "https://files.pythonhosted.org/packages/cd/22/e5126315d78a4d8402c887c9a484aa482633c2241e839ac2b5dcfc8d9ccf/rust_just-1.44.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:fdc28a185acd8260f7a22c6c493d74f35823db262dd9543389784aac474c6b73", size = 1684654, upload-time = "2025-12-07T02:11:41.781Z" }, + { url = "https://files.pythonhosted.org/packages/9c/64/dd616426dd3fc88c1e02771477332e8e880ec0ed4098a785f096f4b517af/rust_just-1.44.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f7a80246442156d9cc1609a7ea632f93b154b71541e4ad95c723da3f1561608b", size = 1568663, upload-time = "2025-12-07T02:11:44.468Z" }, + { url = "https://files.pythonhosted.org/packages/28/75/1886a2c0258e5fe490dcc93a326a2173c8eeffa965fcdff4e2b6736f9173/rust_just-1.44.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b76d0aab59efb6ed4627305e40614a641588feb217c22aaeaf5ceecc50528d", size = 1650270, upload-time = "2025-12-07T02:11:47.112Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f6/9182dc73b20a1bb1abe74cd39717b06b2b786d3de3e3f54a1337b659a3db/rust_just-1.44.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c181615b16bd4a391c44335eefe42930b893e5b7e6f67dc6fd04183e6021c256", size = 1620961, upload-time = "2025-12-07T02:11:49.738Z" }, + { url = "https://files.pythonhosted.org/packages/50/fe/99476b6bb11e91ac3f40e52bad0ca8e1a1617d5324eb760bb8d537239e0b/rust_just-1.44.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00065823fd3a831689bdd7e9879f080cd14064815ddbf9800e47365e9742c54b", size = 1794957, upload-time = "2025-12-07T02:11:53.593Z" }, + { url = "https://files.pythonhosted.org/packages/9a/51/a9f6180da830291f324e13e658fc85bc40a17fe8973844818a3ca010af8c/rust_just-1.44.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:708519117867731717c68defcd5d164db5a56d7a0ec843f4e7fbb07ac97c599b", size = 1866444, upload-time = "2025-12-07T02:11:56.258Z" }, + { url = "https://files.pythonhosted.org/packages/98/68/64ca088bb317d26de33ad6a96de2f0c36c1a8bc59e9acf9db3fb5d3825b5/rust_just-1.44.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:350a0117a8f74dee9ba7f7779468d8466940863ecbd1990e26614c8f0daeb76b", size = 1863672, upload-time = "2025-12-07T02:11:58.836Z" }, + { url = "https://files.pythonhosted.org/packages/ee/0d/fbfed0760a1ce2e79d6d09183caff5a7567d01bcb1662d0f13804375d734/rust_just-1.44.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:deb5cf3d2d48a630a4ef63f22e21720e942ee198362157ff75de72b1a075a49b", size = 1776708, upload-time = "2025-12-07T02:12:01.428Z" }, + { url = "https://files.pythonhosted.org/packages/95/78/11998eda1a5233c0b9ba7d9138df44a25657ad6cf60349059ce47bb27186/rust_just-1.44.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b631029c1c5b49dd0e8749ab405c1270099fbf4794a726290cabead9a81ff7ab", size = 1666235, upload-time = "2025-12-07T02:12:03.78Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f2/70c2ddfdf6827e61cdb0c04b849084a7a1ae7012d01c581db5b7b8071599/rust_just-1.44.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7a12786b6a240aacd387d3651445c57c0105e685658a5af53faae41ef7b0c9a5", size = 1641887, upload-time = "2025-12-07T02:12:06.91Z" }, + { url = "https://files.pythonhosted.org/packages/03/1f/0079adfed540a2ef53a1275e3bc5f8a3f37040319d01e9dadae4d0b63a7b/rust_just-1.44.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4acdb92272680909cb9dcaa84da221a58cc49140e28025ae6cbd5c44f4371867", size = 1786560, upload-time = "2025-12-07T02:12:09.681Z" }, + { url = "https://files.pythonhosted.org/packages/e5/61/20dd8ad91efb3d9e47587bdd16d2ebad1b040caa84e93bc7a8520026d896/rust_just-1.44.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e04be73e7b78681d3a449e7e9d3561bde4e8ec13aa0450cbc198d1daea9f1e2e", size = 1842937, upload-time = "2025-12-07T02:12:11.899Z" }, ] [[package]] @@ -3589,22 +3508,13 @@ wheels = [ [[package]] name = "sqlalchemy" -version = "2.0.44" +version = "2.0.45" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "greenlet", marker = "(platform_machine == 'AMD64' and sys_platform == 'darwin') or (platform_machine == 'WIN32' and sys_platform == 'darwin') or (platform_machine == 'aarch64' and sys_platform == 'darwin') or (platform_machine == 'amd64' and sys_platform == 'darwin') or (platform_machine == 'ppc64le' and sys_platform == 'darwin') or (platform_machine == 'win32' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and sys_platform == 'darwin') or (platform_machine == 'AMD64' and sys_platform == 'linux') or (platform_machine == 'WIN32' and sys_platform == 'linux') or (platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'amd64' and sys_platform == 'linux') or (platform_machine == 'ppc64le' and sys_platform == 'linux') or (platform_machine == 'win32' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f0/f2/840d7b9496825333f532d2e3976b8eadbf52034178aac53630d09fe6e1ef/sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22", size = 9819830, upload-time = "2025-10-10T14:39:12.935Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/81/15d7c161c9ddf0900b076b55345872ed04ff1ed6a0666e5e94ab44b0163c/sqlalchemy-2.0.44-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fe3917059c7ab2ee3f35e77757062b1bea10a0b6ca633c58391e3f3c6c488dd", size = 2140517, upload-time = "2025-10-10T15:36:15.64Z" }, - { url = "https://files.pythonhosted.org/packages/d4/d5/4abd13b245c7d91bdf131d4916fd9e96a584dac74215f8b5bc945206a974/sqlalchemy-2.0.44-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de4387a354ff230bc979b46b2207af841dc8bf29847b6c7dbe60af186d97aefa", size = 2130738, upload-time = "2025-10-10T15:36:16.91Z" }, - { url = "https://files.pythonhosted.org/packages/cb/3c/8418969879c26522019c1025171cefbb2a8586b6789ea13254ac602986c0/sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3678a0fb72c8a6a29422b2732fe423db3ce119c34421b5f9955873eb9b62c1e", size = 3304145, upload-time = "2025-10-10T15:34:19.569Z" }, - { url = "https://files.pythonhosted.org/packages/94/2d/fdb9246d9d32518bda5d90f4b65030b9bf403a935cfe4c36a474846517cb/sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cf6872a23601672d61a68f390e44703442639a12ee9dd5a88bbce52a695e46e", size = 3304511, upload-time = "2025-10-10T15:47:05.088Z" }, - { url = "https://files.pythonhosted.org/packages/7d/fb/40f2ad1da97d5c83f6c1269664678293d3fe28e90ad17a1093b735420549/sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:329aa42d1be9929603f406186630135be1e7a42569540577ba2c69952b7cf399", size = 3235161, upload-time = "2025-10-10T15:34:21.193Z" }, - { url = "https://files.pythonhosted.org/packages/95/cb/7cf4078b46752dca917d18cf31910d4eff6076e5b513c2d66100c4293d83/sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:70e03833faca7166e6a9927fbee7c27e6ecde436774cd0b24bbcc96353bce06b", size = 3261426, upload-time = "2025-10-10T15:47:07.196Z" }, - { url = "https://files.pythonhosted.org/packages/9c/5e/6a29fa884d9fb7ddadf6b69490a9d45fded3b38541713010dad16b77d015/sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05", size = 1928718, upload-time = "2025-10-10T15:29:45.32Z" }, -] +sdist = { url = "https://files.pythonhosted.org/packages/be/f9/5e4491e5ccf42f5d9cfc663741d261b3e6e1683ae7812114e7636409fcc6/sqlalchemy-2.0.45.tar.gz", hash = "sha256:1632a4bda8d2d25703fdad6363058d882541bdaaee0e5e3ddfa0cd3229efce88", size = 9869912, upload-time = "2025-12-09T21:05:16.737Z" } [[package]] name = "sqlparse" @@ -3654,15 +3564,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033, upload-time = "2025-11-01T15:25:25.461Z" }, ] -[[package]] -name = "stevedore" -version = "5.6.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/96/5b/496f8abebd10c3301129abba7ddafd46c71d799a70c44ab080323987c4c9/stevedore-5.6.0.tar.gz", hash = "sha256:f22d15c6ead40c5bbfa9ca54aa7e7b4a07d59b36ae03ed12ced1a54cf0b51945", size = 516074, upload-time = "2025-11-20T10:06:07.264Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/40/8561ce06dc46fd17242c7724ab25b257a2ac1b35f4ebf551b40ce6105cfa/stevedore-5.6.0-py3-none-any.whl", hash = "sha256:4a36dccefd7aeea0c70135526cecb7766c4c84c473b1af68db23d541b6dc1820", size = 54428, upload-time = "2025-11-20T10:06:05.946Z" }, -] - [[package]] name = "tenacity" version = "9.1.2" @@ -3766,11 +3667,11 @@ tls = [ [[package]] name = "txaio" -version = "25.9.2" +version = "25.12.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2b/20/2e7ccea9ab2dd824d0bd421d9364424afde3bb33863afb80cd9180335019/txaio-25.9.2.tar.gz", hash = "sha256:e42004a077c02eb5819ff004a4989e49db113836708430d59cb13d31bd309099", size = 50008, upload-time = "2025-09-25T22:21:07.958Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7f/67/ea9c9ddbaa3e0b4d53c91f8778a33e42045be352dc7200ed2b9aaa7dc229/txaio-25.12.2.tar.gz", hash = "sha256:9f232c21e12aa1ff52690e365b5a0ecfd42cc27a6ec86e1b92ece88f763f4b78", size = 117393, upload-time = "2025-12-09T15:03:26.527Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/2c/e276b80f73fc0411cefa1c1eeae6bc17955197a9c3e2b41b41f957322549/txaio-25.9.2-py3-none-any.whl", hash = "sha256:a23ce6e627d130e9b795cbdd46c9eaf8abd35e42d2401bb3fea63d38beda0991", size = 31293, upload-time = "2025-09-25T22:21:06.394Z" }, + { url = "https://files.pythonhosted.org/packages/50/05/bdb6318120cac9bf97779674f49035e0595d894b42d4c43b60637bafdb1f/txaio-25.12.2-py3-none-any.whl", hash = "sha256:5f6cd6c6b397fc3305790d15efd46a2d5b91cdbefa96543b4f8666aeb56ba026", size = 31208, upload-time = "2025-12-09T04:30:27.811Z" }, ] [[package]] diff --git a/changelog/entries/unreleased/refactor/replace_python_linting_and_formatting_toolchain_autopep8_bla.json b/changelog/entries/unreleased/refactor/replace_python_linting_and_formatting_toolchain_autopep8_bla.json new file mode 100644 index 0000000000..b40ca493b1 --- /dev/null +++ b/changelog/entries/unreleased/refactor/replace_python_linting_and_formatting_toolchain_autopep8_bla.json @@ -0,0 +1,9 @@ +{ + "type": "refactor", + "message": "Replace Python linting and formatting toolchain (autopep8, black, flake8, isort, bandit) with ruff", + "issue_origin": "github", + "issue_number": null, + "domain": "core", + "bullet_points": [], + "created_at": "2026-01-16" +} \ No newline at end of file diff --git a/enterprise/backend/src/baserow_enterprise/api/audit_log/views.py b/enterprise/backend/src/baserow_enterprise/api/audit_log/views.py index d814e19abd..13ff4a4bad 100755 --- a/enterprise/backend/src/baserow_enterprise/api/audit_log/views.py +++ b/enterprise/backend/src/baserow_enterprise/api/audit_log/views.py @@ -1,7 +1,6 @@ from django.db import transaction from django.utils import translation -from baserow_premium.license.handler import LicenseHandler from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import OpenApiParameter, extend_schema from rest_framework.permissions import IsAdminUser, IsAuthenticated @@ -31,6 +30,7 @@ check_for_license_and_permissions_or_raise, ) from baserow_enterprise.features import AUDIT_LOG +from baserow_premium.license.handler import LicenseHandler from .serializers import ( AuditLogActionTypeSerializer, diff --git a/enterprise/backend/src/baserow_enterprise/api/builder/custom_code/views.py b/enterprise/backend/src/baserow_enterprise/api/builder/custom_code/views.py index 82b37491ea..fcabc1dcd8 100644 --- a/enterprise/backend/src/baserow_enterprise/api/builder/custom_code/views.py +++ b/enterprise/backend/src/baserow_enterprise/api/builder/custom_code/views.py @@ -1,6 +1,5 @@ from django.http import HttpResponse -from baserow_premium.license.handler import LicenseHandler from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import OpenApiParameter, extend_schema from rest_framework.permissions import AllowAny @@ -15,6 +14,7 @@ AuthenticateFromUserSessionAuthentication, ) from baserow_enterprise.features import BUILDER_CUSTOM_CODE +from baserow_premium.license.handler import LicenseHandler class PublicCustomCodeView(APIView): diff --git a/enterprise/backend/src/baserow_enterprise/api/data_sync/views.py b/enterprise/backend/src/baserow_enterprise/api/data_sync/views.py index dab512a8a6..9981515110 100644 --- a/enterprise/backend/src/baserow_enterprise/api/data_sync/views.py +++ b/enterprise/backend/src/baserow_enterprise/api/data_sync/views.py @@ -1,6 +1,5 @@ from django.db import transaction -from baserow_premium.license.handler import LicenseHandler from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes from drf_spectacular.utils import extend_schema from rest_framework.permissions import IsAuthenticated @@ -24,6 +23,7 @@ UpdatePeriodicDataSyncIntervalActionType, ) from baserow_enterprise.data_sync.models import DATA_SYNC_INTERVAL_MANUAL +from baserow_premium.license.handler import LicenseHandler from ...features import DATA_SYNC from .serializers import PeriodicDataSyncIntervalSerializer diff --git a/enterprise/backend/src/baserow_enterprise/api/field_permissions/views.py b/enterprise/backend/src/baserow_enterprise/api/field_permissions/views.py index fa2db04d6c..273ad6bd87 100644 --- a/enterprise/backend/src/baserow_enterprise/api/field_permissions/views.py +++ b/enterprise/backend/src/baserow_enterprise/api/field_permissions/views.py @@ -2,7 +2,6 @@ from django.db import transaction -from baserow_premium.license.handler import LicenseHandler from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import OpenApiParameter, extend_schema from rest_framework.response import Response @@ -22,6 +21,7 @@ UpdateFieldPermissionsActionType, ) from baserow_enterprise.field_permissions.handler import FieldPermissionsHandler +from baserow_premium.license.handler import LicenseHandler from .serializers import ( UpdateFieldPermissionsRequestSerializer, diff --git a/enterprise/backend/src/baserow_enterprise/api/integrations/common/sso/saml/views.py b/enterprise/backend/src/baserow_enterprise/api/integrations/common/sso/saml/views.py index 189909da04..18ede8f374 100644 --- a/enterprise/backend/src/baserow_enterprise/api/integrations/common/sso/saml/views.py +++ b/enterprise/backend/src/baserow_enterprise/api/integrations/common/sso/saml/views.py @@ -125,9 +125,9 @@ def on_error(error_code): query_params = data.get("saml_request_data", {}) # Add the refresh token as query parameter - query_params[ - f"user_source_saml_token__{user.user_source.id}" - ] = user.get_refresh_token() + query_params[f"user_source_saml_token__{user.user_source.id}"] = ( + user.get_refresh_token() + ) # Otherwise it's a success, we redirect to the login page redirect_url = get_valid_frontend_url( diff --git a/enterprise/backend/src/baserow_enterprise/api/role/views.py b/enterprise/backend/src/baserow_enterprise/api/role/views.py index 6144e0868e..c2bf09e381 100755 --- a/enterprise/backend/src/baserow_enterprise/api/role/views.py +++ b/enterprise/backend/src/baserow_enterprise/api/role/views.py @@ -4,7 +4,6 @@ from django.contrib.auth import get_user_model from django.db import transaction -from baserow_premium.license.handler import LicenseHandler from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import OpenApiParameter, extend_schema from rest_framework.response import Response @@ -49,6 +48,7 @@ from baserow_enterprise.role.constants import ROLE_ASSIGNABLE_OBJECT_MAP from baserow_enterprise.role.handler import RoleAssignmentHandler from baserow_enterprise.role.types import NewRoleAssignment +from baserow_premium.license.handler import LicenseHandler from .exceptions import DuplicateRoleAssignments from .serializers import ( diff --git a/enterprise/backend/src/baserow_enterprise/api/secure_file_serve/views.py b/enterprise/backend/src/baserow_enterprise/api/secure_file_serve/views.py index 860b6db0cb..cb760e11cc 100755 --- a/enterprise/backend/src/baserow_enterprise/api/secure_file_serve/views.py +++ b/enterprise/backend/src/baserow_enterprise/api/secure_file_serve/views.py @@ -4,8 +4,6 @@ from django.http import FileResponse from django.utils.encoding import smart_str -from baserow_premium.license.exceptions import FeaturesNotAvailableError -from baserow_premium.license.handler import LicenseHandler from drf_spectacular.utils import extend_schema from rest_framework.renderers import BaseRenderer from rest_framework.views import APIView @@ -19,6 +17,8 @@ from baserow_enterprise.secure_file_serve.constants import SecureFileServePermission from baserow_enterprise.secure_file_serve.exceptions import SecureFileServeException from baserow_enterprise.secure_file_serve.handler import SecureFileServeHandler +from baserow_premium.license.exceptions import FeaturesNotAvailableError +from baserow_premium.license.handler import LicenseHandler from .errors import ERROR_SECURE_FILE_SERVE_EXCEPTION diff --git a/enterprise/backend/src/baserow_enterprise/apps.py b/enterprise/backend/src/baserow_enterprise/apps.py index b56be031d9..7be1301b82 100755 --- a/enterprise/backend/src/baserow_enterprise/apps.py +++ b/enterprise/backend/src/baserow_enterprise/apps.py @@ -156,13 +156,12 @@ def ready(self): permission_manager_type_registry.register(FieldPermissionManagerType()) permission_manager_type_registry.register(RolePermissionManagerType()) - from baserow_premium.license.registries import license_type_registry - from baserow_enterprise.license_types import ( AdvancedLicenseType, EnterpriseLicenseType, EnterpriseWithoutSupportLicenseType, ) + from baserow_premium.license.registries import license_type_registry license_type_registry.register(AdvancedLicenseType()) license_type_registry.register(EnterpriseWithoutSupportLicenseType()) @@ -379,9 +378,9 @@ def ready(self): # The signals must always be imported last because they use the registries # which need to be filled first. - import baserow_enterprise.assistant.tasks # noqa: F - import baserow_enterprise.audit_log.signals # noqa: F - import baserow_enterprise.ws.signals # noqa: F + import baserow_enterprise.assistant.tasks # noqa: F401 + import baserow_enterprise.audit_log.signals # noqa: F401 + import baserow_enterprise.ws.signals # noqa: F401 def sync_default_roles_after_migrate(sender, **kwargs): diff --git a/enterprise/backend/src/baserow_enterprise/assistant/assistant.py b/enterprise/backend/src/baserow_enterprise/assistant/assistant.py index 4c7ce85821..2404526461 100644 --- a/enterprise/backend/src/baserow_enterprise/assistant/assistant.py +++ b/enterprise/backend/src/baserow_enterprise/assistant/assistant.py @@ -608,10 +608,13 @@ async def astream_messages( ) default_callbacks = udspy.settings.callbacks - with udspy.settings.context( - lm=self._lm_client, - callbacks=[*default_callbacks, *self._callbacks], - ), self._telemetry_callbacks.trace(self._chat, human_msg.content): + with ( + udspy.settings.context( + lm=self._lm_client, + callbacks=[*default_callbacks, *self._callbacks], + ), + self._telemetry_callbacks.trace(self._chat, human_msg.content), + ): message_id = str(human_msg.id) yield AiStartedMessage(message_id=message_id) diff --git a/enterprise/backend/src/baserow_enterprise/assistant/signatures.py b/enterprise/backend/src/baserow_enterprise/assistant/signatures.py index 89bf3d0dd1..0e7e07e54e 100644 --- a/enterprise/backend/src/baserow_enterprise/assistant/signatures.py +++ b/enterprise/backend/src/baserow_enterprise/assistant/signatures.py @@ -31,10 +31,10 @@ class RequestRouter(udspy.Signature): desc="Previous messages formatted as '[index] (role): content', ordered chronologically" ) - routing_decision: Literal[ - "delegate_to_agent", "search_user_docs" - ] = udspy.OutputField( - desc="Must be one of: 'delegate_to_agent' or 'search_user_docs'" + routing_decision: Literal["delegate_to_agent", "search_user_docs"] = ( + udspy.OutputField( + desc="Must be one of: 'delegate_to_agent' or 'search_user_docs'" + ) ) extracted_context: str = udspy.OutputField( desc=( diff --git a/enterprise/backend/src/baserow_enterprise/assistant/tools/core/tools.py b/enterprise/backend/src/baserow_enterprise/assistant/tools/core/tools.py index 68ea702ad3..8d94e02e70 100644 --- a/enterprise/backend/src/baserow_enterprise/assistant/tools/core/tools.py +++ b/enterprise/backend/src/baserow_enterprise/assistant/tools/core/tools.py @@ -28,7 +28,7 @@ def list_builders( builder_types: list[ Literal["database", "application", "automation", "dashboard"] ] - | None = None + | None = None, ) -> list[AnyBuilderItem] | str: """ Lists all the builders the user can access (databases, applications, diff --git a/enterprise/backend/src/baserow_enterprise/assistant/tools/database/tools.py b/enterprise/backend/src/baserow_enterprise/assistant/tools/database/tools.py index 54e8b03994..56d2ed7a37 100644 --- a/enterprise/backend/src/baserow_enterprise/assistant/tools/database/tools.py +++ b/enterprise/backend/src/baserow_enterprise/assistant/tools/database/tools.py @@ -5,7 +5,6 @@ from django.utils.translation import gettext as _ import udspy -from baserow_premium.prompts import get_formula_docs from loguru import logger from pydantic import create_model @@ -28,6 +27,7 @@ from baserow.core.service import CoreService from baserow_enterprise.assistant.tools.registries import AssistantToolType from baserow_enterprise.assistant.types import TableNavigationType, ViewNavigationType +from baserow_premium.prompts import get_formula_docs from . import utils from .types import ( diff --git a/enterprise/backend/src/baserow_enterprise/assistant/tools/database/types/fields.py b/enterprise/backend/src/baserow_enterprise/assistant/tools/database/types/fields.py index 9be4a3be13..742e212c6c 100644 --- a/enterprise/backend/src/baserow_enterprise/assistant/tools/database/types/fields.py +++ b/enterprise/backend/src/baserow_enterprise/assistant/tools/database/types/fields.py @@ -2,12 +2,10 @@ from django.db.models import Q -from baserow_premium.permission_manager import Table from pydantic import Field -from baserow.contrib.database.fields.models import DateField -from baserow.contrib.database.fields.models import Field as BaserowField from baserow.contrib.database.fields.models import ( + DateField, FormulaField, LinkRowField, LookupField, @@ -16,9 +14,11 @@ RatingField, SingleSelectField, ) +from baserow.contrib.database.fields.models import Field as BaserowField from baserow.contrib.database.fields.registries import field_type_registry from baserow_enterprise.assistant.types import BaseModel from baserow_enterprise.data_sync.hubspot_contacts_data_sync import LongTextField +from baserow_premium.permission_manager import Table class FieldItemCreate(BaseModel): diff --git a/enterprise/backend/src/baserow_enterprise/assistant/tools/database/types/views.py b/enterprise/backend/src/baserow_enterprise/assistant/tools/database/types/views.py index e28a7438f4..021e4916a1 100644 --- a/enterprise/backend/src/baserow_enterprise/assistant/tools/database/types/views.py +++ b/enterprise/backend/src/baserow_enterprise/assistant/tools/database/types/views.py @@ -1,7 +1,5 @@ from typing import Annotated, Literal, Type -from baserow_premium.permission_manager import Table -from baserow_premium.views.models import CalendarView, KanbanView, TimelineView from pydantic import Field from baserow.contrib.database.fields.models import ( @@ -13,6 +11,8 @@ from baserow.contrib.database.views.models import View as BaserowView from baserow.contrib.database.views.registries import view_type_registry from baserow_enterprise.assistant.types import BaseModel +from baserow_premium.permission_manager import Table +from baserow_premium.views.models import CalendarView, KanbanView, TimelineView class ViewItemCreate(BaseModel): diff --git a/enterprise/backend/src/baserow_enterprise/assistant/tools/database/utils.py b/enterprise/backend/src/baserow_enterprise/assistant/tools/database/utils.py index 8cb96df372..2cc186c093 100644 --- a/enterprise/backend/src/baserow_enterprise/assistant/tools/database/utils.py +++ b/enterprise/backend/src/baserow_enterprise/assistant/tools/database/utils.py @@ -408,7 +408,12 @@ def _create_rows( Create new rows in the specified table. """ - nonlocal user, workspace, tool_helpers, row_model_for_create, row_model_for_response + nonlocal \ + user, \ + workspace, \ + tool_helpers, \ + row_model_for_create, \ + row_model_for_response if not rows: return [] diff --git a/enterprise/backend/src/baserow_enterprise/assistant/tools/search_user_docs/tools.py b/enterprise/backend/src/baserow_enterprise/assistant/tools/search_user_docs/tools.py index b7b7480ccd..6457cb9063 100644 --- a/enterprise/backend/src/baserow_enterprise/assistant/tools/search_user_docs/tools.py +++ b/enterprise/backend/src/baserow_enterprise/assistant/tools/search_user_docs/tools.py @@ -78,7 +78,7 @@ def get_search_user_docs_tool( async def search_user_docs( question: Annotated[ str, "The English version of the user question, using Baserow vocabulary." - ] + ], ) -> dict[str, Any]: """ Search Baserow documentation to provide instructions and information for USERS. diff --git a/enterprise/backend/src/baserow_enterprise/audit_log/utils.py b/enterprise/backend/src/baserow_enterprise/audit_log/utils.py index 72cfef6b8c..3b8e1b5e17 100644 --- a/enterprise/backend/src/baserow_enterprise/audit_log/utils.py +++ b/enterprise/backend/src/baserow_enterprise/audit_log/utils.py @@ -2,7 +2,6 @@ from django.contrib.auth.models import AbstractUser -from baserow_premium.license.handler import LicenseHandler from rest_framework.exceptions import PermissionDenied from baserow.core.handler import CoreHandler @@ -10,6 +9,7 @@ ListWorkspaceAuditLogEntriesOperationType, ) from baserow_enterprise.features import AUDIT_LOG +from baserow_premium.license.handler import LicenseHandler def check_for_license_and_permissions_or_raise( diff --git a/enterprise/backend/src/baserow_enterprise/builder/application_types.py b/enterprise/backend/src/baserow_enterprise/builder/application_types.py index 7a5c2ce2a3..2432337b26 100644 --- a/enterprise/backend/src/baserow_enterprise/builder/application_types.py +++ b/enterprise/backend/src/baserow_enterprise/builder/application_types.py @@ -1,12 +1,10 @@ -from baserow_premium.builder.application_types import PremiumBuilderApplicationType - from baserow_enterprise.builder.custom_code.application_type_mixin import ( CustomCodeBuilderApplicationTypeMixin, ) +from baserow_premium.builder.application_types import PremiumBuilderApplicationType class EnterpriseBuilderApplicationType( CustomCodeBuilderApplicationTypeMixin, PremiumBuilderApplicationType, -): - ... +): ... diff --git a/enterprise/backend/src/baserow_enterprise/builder/custom_code/application_type_mixin.py b/enterprise/backend/src/baserow_enterprise/builder/custom_code/application_type_mixin.py index af385c5d9e..7dd655cdd9 100644 --- a/enterprise/backend/src/baserow_enterprise/builder/custom_code/application_type_mixin.py +++ b/enterprise/backend/src/baserow_enterprise/builder/custom_code/application_type_mixin.py @@ -1,9 +1,8 @@ -from baserow_premium.license.handler import LicenseHandler - from baserow.contrib.builder.models import Builder from baserow_enterprise.builder.custom_code.models import BuilderCustomScript from baserow_enterprise.builder.custom_code.types import BuilderCustomCodeDict from baserow_enterprise.features import BUILDER_CUSTOM_CODE +from baserow_premium.license.handler import LicenseHandler class CustomCodeBuilderApplicationTypeMixin: diff --git a/enterprise/backend/src/baserow_enterprise/builder/elements/element_types.py b/enterprise/backend/src/baserow_enterprise/builder/elements/element_types.py index 44e5311cdf..a2c23b13eb 100644 --- a/enterprise/backend/src/baserow_enterprise/builder/elements/element_types.py +++ b/enterprise/backend/src/baserow_enterprise/builder/elements/element_types.py @@ -1,7 +1,6 @@ import mimetypes from typing import Any, Dict, Optional -from baserow_premium.license.handler import LicenseHandler from rest_framework import serializers from baserow.api.exceptions import RequestBodyValidationException @@ -19,6 +18,7 @@ from baserow.core.user_sources.handler import UserSourceHandler from baserow_enterprise.builder.elements.models import AuthFormElement, FileInputElement from baserow_enterprise.features import BUILDER_FILE_INPUT +from baserow_premium.license.handler import LicenseHandler class AuthFormElementType(ElementType): @@ -298,8 +298,7 @@ def _handle_file( if not self.is_allowed_content_type(element, file_content.content_type): raise TypeError( - f"The file {file_obj.get('name') or 'unnamed'} " - f"type is not allowed." + f"The file {file_obj.get('name') or 'unnamed'} type is not allowed." ) if file_content.size / 1024 / 1024 > element.max_filesize: diff --git a/enterprise/backend/src/baserow_enterprise/config/settings/settings.py b/enterprise/backend/src/baserow_enterprise/config/settings/settings.py index 63105b068d..d3e8852e11 100644 --- a/enterprise/backend/src/baserow_enterprise/config/settings/settings.py +++ b/enterprise/backend/src/baserow_enterprise/config/settings/settings.py @@ -22,8 +22,7 @@ def setup(settings): settings.BASEROW_ENTERPRISE_USER_SOURCE_COUNTING_CACHE_TTL_SECONDS = int( # Default TTL is 120 minutes: 60 seconds * 120 - os.getenv("BASEROW_ENTERPRISE_USER_SOURCE_COUNTING_CACHE_TTL_SECONDS") - or 7200 + os.getenv("BASEROW_ENTERPRISE_USER_SOURCE_COUNTING_CACHE_TTL_SECONDS") or 7200 ) settings.BASEROW_ENTERPRISE_AUDIT_LOG_CLEANUP_INTERVAL_MINUTES = int( @@ -61,9 +60,9 @@ def setup(settings): os.getenv("BASEROW_SERVE_FILES_THROUGH_BACKEND", False) ) if serve_files_through_backend: - settings.STORAGES["default"][ - "BACKEND" - ] = "baserow_enterprise.secure_file_serve.storage.EnterpriseFileStorage" + settings.STORAGES["default"]["BACKEND"] = ( + "baserow_enterprise.secure_file_serve.storage.EnterpriseFileStorage" + ) settings.BASEROW_SERVE_FILES_THROUGH_BACKEND = serve_files_through_backend diff --git a/enterprise/backend/src/baserow_enterprise/data_sync/baserow_table_data_sync.py b/enterprise/backend/src/baserow_enterprise/data_sync/baserow_table_data_sync.py index 20d5d3e0a3..b17f7eac20 100644 --- a/enterprise/backend/src/baserow_enterprise/data_sync/baserow_table_data_sync.py +++ b/enterprise/backend/src/baserow_enterprise/data_sync/baserow_table_data_sync.py @@ -4,9 +4,6 @@ from django.db.models import Prefetch -from baserow_premium.fields.field_types import AIFieldType -from baserow_premium.fields.registries import ai_field_output_registry -from baserow_premium.license.handler import LicenseHandler from rest_framework import serializers from baserow.contrib.database.data_sync.exceptions import SyncError @@ -52,6 +49,9 @@ from baserow.core.handler import CoreHandler from baserow.core.utils import ChildProgressBuilder from baserow_enterprise.features import DATA_SYNC +from baserow_premium.fields.field_types import AIFieldType +from baserow_premium.fields.registries import ai_field_output_registry +from baserow_premium.license.handler import LicenseHandler from .models import LocalBaserowTableDataSync diff --git a/enterprise/backend/src/baserow_enterprise/data_sync/github_issues_data_sync.py b/enterprise/backend/src/baserow_enterprise/data_sync/github_issues_data_sync.py index d6692c9871..22bc664baf 100644 --- a/enterprise/backend/src/baserow_enterprise/data_sync/github_issues_data_sync.py +++ b/enterprise/backend/src/baserow_enterprise/data_sync/github_issues_data_sync.py @@ -2,7 +2,6 @@ from typing import Any, Dict, List, Optional import requests -from baserow_premium.license.handler import LicenseHandler from requests.exceptions import JSONDecodeError, RequestException from baserow.contrib.database.data_sync.exceptions import SyncError @@ -17,6 +16,7 @@ ) from baserow.core.utils import ChildProgressBuilder, get_value_at_path from baserow_enterprise.features import DATA_SYNC +from baserow_premium.license.handler import LicenseHandler from .models import GitHubIssuesDataSync diff --git a/enterprise/backend/src/baserow_enterprise/data_sync/gitlab_issues_data_sync.py b/enterprise/backend/src/baserow_enterprise/data_sync/gitlab_issues_data_sync.py index 4527edeaad..aeee140253 100644 --- a/enterprise/backend/src/baserow_enterprise/data_sync/gitlab_issues_data_sync.py +++ b/enterprise/backend/src/baserow_enterprise/data_sync/gitlab_issues_data_sync.py @@ -2,7 +2,6 @@ from typing import Any, Dict, List, Optional import requests -from baserow_premium.license.handler import LicenseHandler from requests.exceptions import JSONDecodeError, RequestException from baserow.contrib.database.data_sync.exceptions import SyncError @@ -17,6 +16,7 @@ ) from baserow.core.utils import ChildProgressBuilder, get_value_at_path from baserow_enterprise.features import DATA_SYNC +from baserow_premium.license.handler import LicenseHandler from .models import GitLabIssuesDataSync diff --git a/enterprise/backend/src/baserow_enterprise/data_sync/handler.py b/enterprise/backend/src/baserow_enterprise/data_sync/handler.py index 5de846f0e7..18f0969f12 100644 --- a/enterprise/backend/src/baserow_enterprise/data_sync/handler.py +++ b/enterprise/backend/src/baserow_enterprise/data_sync/handler.py @@ -7,7 +7,6 @@ from django.db.models import Q from django.utils import timezone -from baserow_premium.license.handler import LicenseHandler from loguru import logger from baserow.contrib.database.data_sync.exceptions import ( @@ -26,6 +25,7 @@ PeriodicDataSyncInterval, ) from baserow_enterprise.features import DATA_SYNC +from baserow_premium.license.handler import LicenseHandler from .notification_types import PeriodicDataSyncDeactivatedNotificationType from .tasks import sync_periodic_data_sync @@ -118,7 +118,8 @@ def call_periodic_data_sync_syncs_that_are_due(cls): # The now time must be higher than the now time because the data sync # must be triggered at the desired the of the user. when__lte=now_time, - ).select_related("data_sync__table__database__workspace") + ) + .select_related("data_sync__table__database__workspace") # Take a lock on the periodic data sync because the `last_periodic_sync` # must be updated immediately. This will make sure that if this method is # called frequently, it doesn't trigger the same. If self or `data_sync` is diff --git a/enterprise/backend/src/baserow_enterprise/data_sync/hubspot_contacts_data_sync.py b/enterprise/backend/src/baserow_enterprise/data_sync/hubspot_contacts_data_sync.py index 5a6cf4f04d..8ded20922f 100644 --- a/enterprise/backend/src/baserow_enterprise/data_sync/hubspot_contacts_data_sync.py +++ b/enterprise/backend/src/baserow_enterprise/data_sync/hubspot_contacts_data_sync.py @@ -3,7 +3,6 @@ from decimal import Decimal import requests -from baserow_premium.license.handler import LicenseHandler from baserow.contrib.database.data_sync.exceptions import SyncError from baserow.contrib.database.data_sync.models import DataSyncSyncedProperty @@ -25,6 +24,7 @@ from baserow.core.utils import ChildProgressBuilder, get_value_at_path from baserow_enterprise.data_sync.models import HubSpotContactsDataSync from baserow_enterprise.features import DATA_SYNC +from baserow_premium.license.handler import LicenseHandler class HubspotIDProperty(DataSyncProperty): diff --git a/enterprise/backend/src/baserow_enterprise/data_sync/jira_issues_data_sync.py b/enterprise/backend/src/baserow_enterprise/data_sync/jira_issues_data_sync.py index bc26b777d8..5bdaeb59a3 100644 --- a/enterprise/backend/src/baserow_enterprise/data_sync/jira_issues_data_sync.py +++ b/enterprise/backend/src/baserow_enterprise/data_sync/jira_issues_data_sync.py @@ -4,7 +4,6 @@ import advocate from advocate import UnacceptableAddressException -from baserow_premium.license.handler import LicenseHandler from requests.auth import HTTPBasicAuth from requests.exceptions import JSONDecodeError, RequestException @@ -19,6 +18,7 @@ ) from baserow.core.utils import ChildProgressBuilder, get_value_at_path from baserow_enterprise.features import DATA_SYNC +from baserow_premium.license.handler import LicenseHandler from .models import ( JIRA_ISSUES_DATA_SYNC_API_TOKEN, diff --git a/enterprise/backend/src/baserow_enterprise/data_sync/models.py b/enterprise/backend/src/baserow_enterprise/data_sync/models.py index 9749526b45..d3a65d18d0 100644 --- a/enterprise/backend/src/baserow_enterprise/data_sync/models.py +++ b/enterprise/backend/src/baserow_enterprise/data_sync/models.py @@ -153,7 +153,7 @@ class GitLabIssuesDataSync(DataSync): ) gitlab_project_id = models.CharField( max_length=255, - help_text="The ID of the GitLab project where to sync the " "issues with.", + help_text="The ID of the GitLab project where to sync the issues with.", ) gitlab_access_token = models.CharField( max_length=255, diff --git a/enterprise/backend/src/baserow_enterprise/data_sync/two_way_sync_strategy_types.py b/enterprise/backend/src/baserow_enterprise/data_sync/two_way_sync_strategy_types.py index 072566fb0c..3cd52718cf 100644 --- a/enterprise/backend/src/baserow_enterprise/data_sync/two_way_sync_strategy_types.py +++ b/enterprise/backend/src/baserow_enterprise/data_sync/two_way_sync_strategy_types.py @@ -1,8 +1,6 @@ from django.conf import settings from django.db import transaction -from baserow_premium.license.handler import LicenseHandler - from baserow.contrib.database.data_sync.exceptions import SyncError from baserow.contrib.database.data_sync.models import DataSync, DataSyncSyncedProperty from baserow.contrib.database.data_sync.registries import ( @@ -13,6 +11,7 @@ from baserow.contrib.database.rows.handler import RowHandler from baserow.contrib.database.table.signals import table_updated from baserow_enterprise.features import DATA_SYNC +from baserow_premium.license.handler import LicenseHandler from .notification_types import ( TwoWaySyncDeactivatedNotificationType, diff --git a/enterprise/backend/src/baserow_enterprise/date_dependency/field_rule_types.py b/enterprise/backend/src/baserow_enterprise/date_dependency/field_rule_types.py index 3ae5301a00..ada0ad16bc 100644 --- a/enterprise/backend/src/baserow_enterprise/date_dependency/field_rule_types.py +++ b/enterprise/backend/src/baserow_enterprise/date_dependency/field_rule_types.py @@ -4,8 +4,6 @@ from django.db.models import QuerySet from django.db.transaction import on_commit -from baserow_premium.license.exceptions import FeaturesNotAvailableError -from baserow_premium.license.handler import LicenseHandler from loguru import logger from rest_framework import serializers @@ -27,6 +25,8 @@ ) from baserow_enterprise.date_dependency.types import DateDepenencyDict from baserow_enterprise.features import DATE_DEPENDENCY +from baserow_premium.license.exceptions import FeaturesNotAvailableError +from baserow_premium.license.handler import LicenseHandler from .calculations import DateCalculator, DateDependencyCalculator, DateValues from .serializers import ( diff --git a/enterprise/backend/src/baserow_enterprise/emails_context_types.py b/enterprise/backend/src/baserow_enterprise/emails_context_types.py index 73185ba1e7..b874532030 100644 --- a/enterprise/backend/src/baserow_enterprise/emails_context_types.py +++ b/enterprise/backend/src/baserow_enterprise/emails_context_types.py @@ -1,8 +1,7 @@ -from baserow_premium.license.handler import LicenseHandler - from baserow.core.handler import CoreHandler from baserow.core.registries import EmailContextType from baserow_enterprise.features import ENTERPRISE_SETTINGS +from baserow_premium.license.handler import LicenseHandler class EnterpriseEmailContextType(EmailContextType): diff --git a/enterprise/backend/src/baserow_enterprise/field_permissions/permission_manager.py b/enterprise/backend/src/baserow_enterprise/field_permissions/permission_manager.py index 28d53d5a2d..8e21a3250c 100644 --- a/enterprise/backend/src/baserow_enterprise/field_permissions/permission_manager.py +++ b/enterprise/backend/src/baserow_enterprise/field_permissions/permission_manager.py @@ -5,8 +5,6 @@ from django.contrib.auth.models import AbstractUser from django.db.models import Exists, OuterRef, QuerySet -from baserow_premium.license.handler import LicenseHandler - from baserow.contrib.database.fields.models import Field from baserow.contrib.database.fields.operations import ( SubmitAnonymousFieldValuesOperationType, @@ -27,6 +25,7 @@ ) from baserow_enterprise.role.handler import RoleAssignmentHandler from baserow_enterprise.role.models import Role +from baserow_premium.license.handler import LicenseHandler from .models import FieldPermissions, FieldPermissionsRoleEnum @@ -225,7 +224,7 @@ def check_multiple_permissions( if workspace is None or not self.is_enabled(workspace): # Permissions granted if RBAC is not enabled. - return {check: True for check in field_checks} + return dict.fromkeys(field_checks, True) return self.check_field_permissions( field_checks, workspace, include_trash=include_trash diff --git a/enterprise/backend/src/baserow_enterprise/integrations/common/sso/oauth2/app_auth_provider_types.py b/enterprise/backend/src/baserow_enterprise/integrations/common/sso/oauth2/app_auth_provider_types.py index 03dc6f574d..6eb42c7a95 100644 --- a/enterprise/backend/src/baserow_enterprise/integrations/common/sso/oauth2/app_auth_provider_types.py +++ b/enterprise/backend/src/baserow_enterprise/integrations/common/sso/oauth2/app_auth_provider_types.py @@ -116,8 +116,7 @@ class OpenIdConnectAppAuthProviderType( class SerializedDict( OpenIdConnectAuthProviderTypeMixin.OpenIdConnectSerializedDict, AppAuthProviderTypeDict, - ): - ... + ): ... serializer_field_overrides = { "base_url": serializers.CharField( diff --git a/enterprise/backend/src/baserow_enterprise/integrations/common/sso/saml/app_auth_provider_types.py b/enterprise/backend/src/baserow_enterprise/integrations/common/sso/saml/app_auth_provider_types.py index 71df5b2930..40acf8939c 100644 --- a/enterprise/backend/src/baserow_enterprise/integrations/common/sso/saml/app_auth_provider_types.py +++ b/enterprise/backend/src/baserow_enterprise/integrations/common/sso/saml/app_auth_provider_types.py @@ -26,8 +26,7 @@ class SamlAppAuthProviderType(SamlAuthProviderTypeMixin, AppAuthProviderType): class SerializedDict( AppAuthProviderTypeDict, SamlAuthProviderTypeMixin.SamlSerializedDict - ): - ... + ): ... @property def allowed_fields(self) -> List[str]: diff --git a/enterprise/backend/src/baserow_enterprise/integrations/local_baserow/user_source_types.py b/enterprise/backend/src/baserow_enterprise/integrations/local_baserow/user_source_types.py index e12c5dfff7..db73450926 100644 --- a/enterprise/backend/src/baserow_enterprise/integrations/local_baserow/user_source_types.py +++ b/enterprise/backend/src/baserow_enterprise/integrations/local_baserow/user_source_types.py @@ -267,8 +267,7 @@ def prepare_values( { "email_field_id": [ { - "detail": "This field type can't be used as " - "email.", + "detail": "This field type can't be used as email.", "code": "invalid_field", } ] @@ -327,8 +326,7 @@ def prepare_values( { "name_field_id": [ { - "detail": "This field type can't be used as " - "name.", + "detail": "This field type can't be used as name.", "code": "invalid_field", } ] diff --git a/enterprise/backend/src/baserow_enterprise/license_types.py b/enterprise/backend/src/baserow_enterprise/license_types.py index 449f69b860..f78f9e7d45 100755 --- a/enterprise/backend/src/baserow_enterprise/license_types.py +++ b/enterprise/backend/src/baserow_enterprise/license_types.py @@ -1,9 +1,5 @@ from typing import List, Optional -from baserow_premium.license.features import PREMIUM -from baserow_premium.license.models import License -from baserow_premium.license.registries import LicenseType, SeatUsageSummary - from baserow.core.models import Workspace from baserow_enterprise.features import ( ADVANCED_WEBHOOKS, @@ -25,6 +21,9 @@ from baserow_enterprise.role.seat_usage_calculator import ( RoleBasedSeatUsageSummaryCalculator, ) +from baserow_premium.license.features import PREMIUM +from baserow_premium.license.models import License +from baserow_premium.license.registries import LicenseType, SeatUsageSummary COMMON_ADVANCED_FEATURES = [ # core diff --git a/enterprise/backend/src/baserow_enterprise/migrations/0016_rename_auditlogentry_group_id_workspace_id.py b/enterprise/backend/src/baserow_enterprise/migrations/0016_rename_auditlogentry_group_id_workspace_id.py index fefb21fccc..6ac552b484 100755 --- a/enterprise/backend/src/baserow_enterprise/migrations/0016_rename_auditlogentry_group_id_workspace_id.py +++ b/enterprise/backend/src/baserow_enterprise/migrations/0016_rename_auditlogentry_group_id_workspace_id.py @@ -2,7 +2,6 @@ class Migration(migrations.Migration): - """ This migration handles a special case during the "group" to "workspace" rename. diff --git a/enterprise/backend/src/baserow_enterprise/migrations/0044_migrate_app_labels.py b/enterprise/backend/src/baserow_enterprise/migrations/0044_migrate_app_labels.py index 2ff2ee6db0..0d0226069f 100644 --- a/enterprise/backend/src/baserow_enterprise/migrations/0044_migrate_app_labels.py +++ b/enterprise/backend/src/baserow_enterprise/migrations/0044_migrate_app_labels.py @@ -24,8 +24,8 @@ class Migration(migrations.Migration): ), ( "baserow_premium", - "0025_chartwidget_localbaserowgroupedaggregaterows_and_more" - ) + "0025_chartwidget_localbaserowgroupedaggregaterows_and_more", + ), ] operations = [ diff --git a/enterprise/backend/src/baserow_enterprise/migrations/0050_periodicdatasyncinterval_deactivation_reason.py b/enterprise/backend/src/baserow_enterprise/migrations/0050_periodicdatasyncinterval_deactivation_reason.py index 7389d71ad1..d4a90d67d2 100644 --- a/enterprise/backend/src/baserow_enterprise/migrations/0050_periodicdatasyncinterval_deactivation_reason.py +++ b/enterprise/backend/src/baserow_enterprise/migrations/0050_periodicdatasyncinterval_deactivation_reason.py @@ -13,7 +13,10 @@ class Migration(migrations.Migration): model_name="periodicdatasyncinterval", name="deactivation_reason", field=models.CharField( - choices=[("FAILURE", "FAILURE"), ("LICENSE_UNAVAILABLE", "LICENSE_UNAVAILABLE")], + choices=[ + ("FAILURE", "FAILURE"), + ("LICENSE_UNAVAILABLE", "LICENSE_UNAVAILABLE"), + ], help_text="The reason why the periodic data sync was deactivated.", max_length=20, null=True, diff --git a/enterprise/backend/src/baserow_enterprise/migrations/0057_role_hidden.py b/enterprise/backend/src/baserow_enterprise/migrations/0057_role_hidden.py index b24088d0b6..cdee2fad1f 100644 --- a/enterprise/backend/src/baserow_enterprise/migrations/0057_role_hidden.py +++ b/enterprise/backend/src/baserow_enterprise/migrations/0057_role_hidden.py @@ -16,7 +16,7 @@ class Migration(migrations.Migration): db_default=False, default=False, help_text="Hidden roles are not visible to the user and cannot be " - "set. These are used for internal purposes.", + "set. These are used for internal purposes.", ), ), ] diff --git a/enterprise/backend/src/baserow_enterprise/role/default_roles.py b/enterprise/backend/src/baserow_enterprise/role/default_roles.py index e0556f0b6c..b9c2826576 100755 --- a/enterprise/backend/src/baserow_enterprise/role/default_roles.py +++ b/enterprise/backend/src/baserow_enterprise/role/default_roles.py @@ -1,14 +1,6 @@ from django.conf import settings from django.core.exceptions import ImproperlyConfigured -from baserow_premium.row_comments.operations import ( - CreateRowCommentsOperationType, - DeleteRowCommentsOperationType, - ReadRowCommentsOperationType, - RestoreRowCommentOperationType, - UpdateRowCommentsOperationType, -) - from baserow.contrib.automation.nodes.operations import ( CreateAutomationNodeOperationType, DeleteAutomationNodeOperationType, @@ -309,6 +301,13 @@ from baserow_enterprise.views.operations import ( ListenToAllRestrictedViewEventsOperationType, ) +from baserow_premium.row_comments.operations import ( + CreateRowCommentsOperationType, + DeleteRowCommentsOperationType, + ReadRowCommentsOperationType, + RestoreRowCommentOperationType, + UpdateRowCommentsOperationType, +) default_roles = { ADMIN_ROLE_UID: [], diff --git a/enterprise/backend/src/baserow_enterprise/role/handler.py b/enterprise/backend/src/baserow_enterprise/role/handler.py index 53c0dbfa74..63dfd3cc98 100755 --- a/enterprise/backend/src/baserow_enterprise/role/handler.py +++ b/enterprise/backend/src/baserow_enterprise/role/handler.py @@ -6,8 +6,6 @@ from django.contrib.contenttypes.models import ContentType from django.db.models import Case, IntegerField, Q, QuerySet, Value, When -from baserow_premium.license.handler import LicenseHandler - from baserow.core.cache import local_cache from baserow.core.exceptions import PermissionDenied from baserow.core.handler import CoreHandler @@ -35,6 +33,7 @@ role_assignment_updated, ) from baserow_enterprise.teams.models import Team, TeamSubject +from baserow_premium.license.handler import LicenseHandler from .constants import ( ALLOWED_SUBJECT_TYPE_BY_PRIORITY, @@ -286,9 +285,9 @@ def get_current_role_assignments( ) if key in role_assignments_by_user_id_scope_id: - roles_by_user_scope[ - (subject, scope) - ] = role_assignments_by_user_id_scope_id[key] + roles_by_user_scope[(subject, scope)] = ( + role_assignments_by_user_id_scope_id[key] + ) else: roles_by_user_scope[(subject, scope)] = None @@ -477,9 +476,9 @@ def _get_role_assignments(): # order for role_by_scope. We need to keep it ordered as expected # by the caller. roles_by_scope[actor_id][scope_param] = [role] - priorities_by_scope_per_actor_id[actor_id][ - scope_param - ] = role_assignment_priority + priorities_by_scope_per_actor_id[actor_id][scope_param] = ( + role_assignment_priority + ) else: # If the priority is the same we add the role to the # current role list. diff --git a/enterprise/backend/src/baserow_enterprise/role/member_data_types.py b/enterprise/backend/src/baserow_enterprise/role/member_data_types.py index 87c179a4a2..bff6b693df 100644 --- a/enterprise/backend/src/baserow_enterprise/role/member_data_types.py +++ b/enterprise/backend/src/baserow_enterprise/role/member_data_types.py @@ -3,7 +3,6 @@ from django.conf import settings from django.contrib.auth.models import AbstractUser -from baserow_premium.plugins import PremiumPlugin from rest_framework import serializers from baserow.api.user.registries import MemberDataType @@ -14,6 +13,7 @@ from baserow_enterprise.api.role.serializers import RoleField from baserow_enterprise.role.handler import RoleAssignmentHandler from baserow_enterprise.role.models import Role +from baserow_premium.plugins import PremiumPlugin class EnterpriseRolesDataType(MemberDataType): diff --git a/enterprise/backend/src/baserow_enterprise/role/permission_manager.py b/enterprise/backend/src/baserow_enterprise/role/permission_manager.py index e53da74cce..97bb737c3b 100644 --- a/enterprise/backend/src/baserow_enterprise/role/permission_manager.py +++ b/enterprise/backend/src/baserow_enterprise/role/permission_manager.py @@ -5,8 +5,6 @@ from django.contrib.auth import get_user_model from django.contrib.auth.models import AbstractUser -from baserow_premium.license.handler import LicenseHandler - from baserow.core.cache import local_cache from baserow.core.exceptions import PermissionDenied from baserow.core.models import Workspace @@ -21,6 +19,7 @@ from baserow.core.types import PermissionCheck from baserow_enterprise.features import RBAC from baserow_enterprise.role.handler import RoleAssignmentHandler +from baserow_premium.license.handler import LicenseHandler from .constants import READ_ONLY_ROLE_UID from .models import Role diff --git a/enterprise/backend/src/baserow_enterprise/role/seat_usage_calculator.py b/enterprise/backend/src/baserow_enterprise/role/seat_usage_calculator.py index 29a7504a96..7aa8d2b3ab 100755 --- a/enterprise/backend/src/baserow_enterprise/role/seat_usage_calculator.py +++ b/enterprise/backend/src/baserow_enterprise/role/seat_usage_calculator.py @@ -5,8 +5,6 @@ from django.contrib.contenttypes.models import ContentType from django.db.models import Case, F, Q, TextField, Value, When -from baserow_premium.license.registries import SeatUsageSummary - from baserow.contrib.database.models import Database from baserow.contrib.database.table.models import Table from baserow.core.models import ( @@ -19,6 +17,7 @@ from baserow_enterprise.role.default_roles import default_roles from baserow_enterprise.role.models import RoleAssignment from baserow_enterprise.teams.models import Team +from baserow_premium.license.registries import SeatUsageSummary User = get_user_model() diff --git a/enterprise/backend/src/baserow_enterprise/sso/oauth2/auth_provider_types.py b/enterprise/backend/src/baserow_enterprise/sso/oauth2/auth_provider_types.py index 90fd29e402..0ea1840199 100644 --- a/enterprise/backend/src/baserow_enterprise/sso/oauth2/auth_provider_types.py +++ b/enterprise/backend/src/baserow_enterprise/sso/oauth2/auth_provider_types.py @@ -359,7 +359,7 @@ def get_email(self, headers) -> str: """ email = None - resp = requests.get(self.EMAILS_URL, headers=headers) # nosec B113 + resp = requests.get(self.EMAILS_URL, headers=headers) # noqa: S113 resp.raise_for_status() emails = resp.json() if resp.status_code == 200 and emails: @@ -526,9 +526,7 @@ def get_wellknown_urls(cls, base_url: str) -> WellKnownUrls: try: wellknown_url = f"{base_url}/.well-known/openid-configuration" - json_response = requests.get( - wellknown_url, timeout=120 - ).json() # nosec B113 + json_response = requests.get(wellknown_url, timeout=120).json() # nosec B113 return WellKnownUrls( authorization_url=json_response["authorization_endpoint"], diff --git a/enterprise/backend/src/baserow_enterprise/sso/utils.py b/enterprise/backend/src/baserow_enterprise/sso/utils.py index c30666f3ef..7c65b24bae 100644 --- a/enterprise/backend/src/baserow_enterprise/sso/utils.py +++ b/enterprise/backend/src/baserow_enterprise/sso/utils.py @@ -1,8 +1,7 @@ +from baserow_enterprise.features import SSO from baserow_premium.license.exceptions import FeaturesNotAvailableError from baserow_premium.license.handler import LicenseHandler -from baserow_enterprise.features import SSO - def is_sso_feature_active(): return LicenseHandler.instance_has_feature(SSO) diff --git a/enterprise/backend/src/baserow_enterprise/structure_types.py b/enterprise/backend/src/baserow_enterprise/structure_types.py index 73ed8433cc..c5aeb7b231 100644 --- a/enterprise/backend/src/baserow_enterprise/structure_types.py +++ b/enterprise/backend/src/baserow_enterprise/structure_types.py @@ -2,8 +2,6 @@ from django.contrib.contenttypes.models import ContentType -from baserow_premium.license.handler import LicenseHandler - from baserow.contrib.database.views.models import View from baserow.core.models import Application from baserow.core.registries import ImportExportConfig, SerializationProcessorType @@ -13,6 +11,7 @@ from baserow_enterprise.role.handler import RoleAssignmentHandler from baserow_enterprise.role.models import Role from baserow_enterprise.role.types import NewRoleAssignment +from baserow_premium.license.handler import LicenseHandler if TYPE_CHECKING: from baserow.core.models import Workspace diff --git a/enterprise/backend/src/baserow_enterprise/teams/handler.py b/enterprise/backend/src/baserow_enterprise/teams/handler.py index abac6ed4f2..a461584a47 100644 --- a/enterprise/backend/src/baserow_enterprise/teams/handler.py +++ b/enterprise/backend/src/baserow_enterprise/teams/handler.py @@ -8,8 +8,6 @@ from django.db.models.expressions import RawSQL from django.db.models.functions import Coalesce -from baserow_premium.license.handler import LicenseHandler - from baserow.contrib.database.tokens.models import Token from baserow.core.models import Workspace from baserow.core.trash.handler import TrashHandler @@ -33,6 +31,7 @@ TeamSubjectNotInGroup, TeamSubjectTypeUnsupported, ) +from baserow_premium.license.handler import LicenseHandler from ..features import TEAMS diff --git a/enterprise/backend/src/baserow_enterprise/teams/models.py b/enterprise/backend/src/baserow_enterprise/teams/models.py index 948cf33f7e..680a27ec78 100755 --- a/enterprise/backend/src/baserow_enterprise/teams/models.py +++ b/enterprise/backend/src/baserow_enterprise/teams/models.py @@ -66,7 +66,7 @@ def default_role_uid(self) -> Union[str, None]: """ if hasattr(self, "_annotated_default_role_uid"): - return getattr(self, "_annotated_default_role_uid") + return self._annotated_default_role_uid from baserow_enterprise.role.handler import RoleAssignmentHandler diff --git a/enterprise/backend/src/baserow_enterprise/view_ownership_types.py b/enterprise/backend/src/baserow_enterprise/view_ownership_types.py index b681de7325..619fba176d 100644 --- a/enterprise/backend/src/baserow_enterprise/view_ownership_types.py +++ b/enterprise/backend/src/baserow_enterprise/view_ownership_types.py @@ -1,7 +1,5 @@ from django.contrib.auth.models import AbstractUser -from baserow_premium.license.handler import LicenseHandler - from baserow.contrib.database.views.handler import ViewHandler from baserow.contrib.database.views.models import View from baserow.contrib.database.views.operations import CreateViewFilterOperationType @@ -11,6 +9,7 @@ from baserow.core.models import Workspace from baserow.core.types import PermissionCheck from baserow_enterprise.features import RBAC +from baserow_premium.license.handler import LicenseHandler class RestrictedViewOwnershipType(ViewOwnershipType): diff --git a/enterprise/backend/src/baserow_enterprise/webhook_event_types.py b/enterprise/backend/src/baserow_enterprise/webhook_event_types.py index 641711aef2..fffa2a5549 100644 --- a/enterprise/backend/src/baserow_enterprise/webhook_event_types.py +++ b/enterprise/backend/src/baserow_enterprise/webhook_event_types.py @@ -1,8 +1,6 @@ from django.conf import settings from django.db.models import Q -from baserow_premium.license.handler import LicenseHandler - from baserow.contrib.database.api.rows.serializers import ( RowSerializer, get_row_serializer_class, @@ -13,6 +11,7 @@ from baserow.contrib.database.views.signals import rows_entered_view from baserow.contrib.database.webhooks.registries import WebhookEventType from baserow_enterprise.features import ADVANCED_WEBHOOKS +from baserow_premium.license.handler import LicenseHandler class EnterpriseWebhookEventType(WebhookEventType): diff --git a/enterprise/backend/tests/baserow_enterprise_tests/api/audit_log/test_audit_log_admin_views.py b/enterprise/backend/tests/baserow_enterprise_tests/api/audit_log/test_audit_log_admin_views.py index 9e75af54c3..6a764bfea7 100755 --- a/enterprise/backend/tests/baserow_enterprise_tests/api/audit_log/test_audit_log_admin_views.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/api/audit_log/test_audit_log_admin_views.py @@ -553,8 +553,9 @@ def test_audit_log_can_export_to_csv_all_entries( "export_charset": "utf-8", } - with freeze_time("2023-01-01 12:00"), django_capture_on_commit_callbacks( - execute=True + with ( + freeze_time("2023-01-01 12:00"), + django_capture_on_commit_callbacks(execute=True), ): admin_token = enterprise_data_fixture.generate_token(admin_user) response = api_client.post( @@ -633,8 +634,9 @@ def test_audit_log_can_export_to_csv_filtered_entries( ) assert response.status_code == HTTP_400_BAD_REQUEST - with freeze_time("2023-01-02 12:00"), django_capture_on_commit_callbacks( - execute=True + with ( + freeze_time("2023-01-02 12:00"), + django_capture_on_commit_callbacks(execute=True), ): admin_token = enterprise_data_fixture.generate_token(admin_user) response = api_client.post( diff --git a/enterprise/backend/tests/baserow_enterprise_tests/api/audit_log/test_audit_log_workspace_views.py b/enterprise/backend/tests/baserow_enterprise_tests/api/audit_log/test_audit_log_workspace_views.py index be28bd533c..0e114ebc8c 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/api/audit_log/test_audit_log_workspace_views.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/api/audit_log/test_audit_log_workspace_views.py @@ -314,8 +314,9 @@ def test_workspace_audit_log_can_export_to_csv_filtered_entries( ) assert response.status_code == HTTP_404_NOT_FOUND - with freeze_time("2023-01-02 12:00"), django_capture_on_commit_callbacks( - execute=True + with ( + freeze_time("2023-01-02 12:00"), + django_capture_on_commit_callbacks(execute=True), ): admin_token = enterprise_data_fixture.generate_token(admin_user) response = api_client.post( diff --git a/enterprise/backend/tests/baserow_enterprise_tests/api/role/test_other_views_with_rbac.py b/enterprise/backend/tests/baserow_enterprise_tests/api/role/test_other_views_with_rbac.py index c6885eaad9..b288da0cf0 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/api/role/test_other_views_with_rbac.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/api/role/test_other_views_with_rbac.py @@ -3,7 +3,6 @@ from django.test.utils import override_settings import pytest -from baserow_premium.views.models import OWNERSHIP_TYPE_PERSONAL from rest_framework.status import HTTP_200_OK, HTTP_401_UNAUTHORIZED, HTTP_404_NOT_FOUND from baserow.contrib.database.views.handler import ViewHandler @@ -15,6 +14,7 @@ from baserow_enterprise.apps import sync_default_roles_after_migrate from baserow_enterprise.role.default_roles import default_roles from baserow_enterprise.role.handler import RoleAssignmentHandler +from baserow_premium.views.models import OWNERSHIP_TYPE_PERSONAL @pytest.fixture(autouse=True) diff --git a/enterprise/backend/tests/baserow_enterprise_tests/api/sso/test_oauth_views.py b/enterprise/backend/tests/baserow_enterprise_tests/api/sso/test_oauth_views.py index 40053d7e2d..ca18e8b5e9 100755 --- a/enterprise/backend/tests/baserow_enterprise_tests/api/sso/test_oauth_views.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/api/sso/test_oauth_views.py @@ -58,8 +58,7 @@ def test_oauth2_login_feature_not_active(api_client, enterprise_data_fixture): assert response.status_code == HTTP_302_FOUND assert response.headers["Location"] == ( - f"{settings.PUBLIC_WEB_FRONTEND_URL}/login/" - "error?error=errorSsoFeatureNotActive" + f"{settings.PUBLIC_WEB_FRONTEND_URL}/login/error?error=errorSsoFeatureNotActive" ) @@ -125,8 +124,7 @@ def test_oauth2_callback_feature_not_active(api_client, enterprise_data_fixture) assert response.status_code == HTTP_302_FOUND assert response.headers["Location"] == ( - f"{settings.PUBLIC_WEB_FRONTEND_URL}/login/" - "error?error=errorSsoFeatureNotActive" + f"{settings.PUBLIC_WEB_FRONTEND_URL}/login/error?error=errorSsoFeatureNotActive" ) @@ -369,8 +367,7 @@ def test_oauth2_callback_signup_disabled(api_client, enterprise_data_fixture): assert response.status_code == HTTP_302_FOUND assert response.headers["Location"] == ( - f"{settings.PUBLIC_WEB_FRONTEND_URL}/login/" - "error?error=errorSignupDisabled" + f"{settings.PUBLIC_WEB_FRONTEND_URL}/login/error?error=errorSignupDisabled" ) @@ -469,8 +466,7 @@ def test_oauth2_callback_login_deactivated_user( assert response.status_code == HTTP_302_FOUND assert response.headers["Location"] == ( - f"{settings.PUBLIC_WEB_FRONTEND_URL}/login/" - "error?error=errorUserDeactivated" + f"{settings.PUBLIC_WEB_FRONTEND_URL}/login/error?error=errorUserDeactivated" ) @@ -558,6 +554,5 @@ def get_user_info_raise_error(self, instance, code, session): assert response.status_code == HTTP_302_FOUND assert response.headers["Location"] == ( - f"{settings.PUBLIC_WEB_FRONTEND_URL}/login/" - "error?error=errorAuthFlowError" + f"{settings.PUBLIC_WEB_FRONTEND_URL}/login/error?error=errorAuthFlowError" ) diff --git a/enterprise/backend/tests/baserow_enterprise_tests/api/teams/test_teams_views.py b/enterprise/backend/tests/baserow_enterprise_tests/api/teams/test_teams_views.py index 4b496ee5eb..59a6b7d22f 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/api/teams/test_teams_views.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/api/teams/test_teams_views.py @@ -71,7 +71,7 @@ def test_list_search_teams(api_client, data_fixture, enterprise_data_fixture): enterprise_data_fixture.create_team(name="Engineering", workspace=workspace) response = api_client.get( - f'{reverse("api:enterprise:teams:list", kwargs={"workspace_id": workspace.id})}?search=Sal', + f"{reverse('api:enterprise:teams:list', kwargs={'workspace_id': workspace.id})}?search=Sal", **{"HTTP_AUTHORIZATION": f"JWT {token}"}, ) assert response.status_code == HTTP_200_OK diff --git a/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant.py b/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant.py index 03a8d77c1f..df1e7fa782 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant.py @@ -8,6 +8,7 @@ - Generates and persists chat titles appropriately - Adapts its signature based on chat state """ + from unittest.mock import MagicMock, Mock, patch from django.core.cache import cache @@ -535,7 +536,9 @@ def test_astream_messages_persists_chat_title( user = enterprise_data_fixture.create_user() workspace = enterprise_data_fixture.create_workspace(user=user) chat = AssistantChat.objects.create( - user=user, workspace=workspace, title="" # New chat + user=user, + workspace=workspace, + title="", # New chat ) # Mock title generator @@ -671,7 +674,9 @@ def test_astream_messages_yields_title_chunks( user = enterprise_data_fixture.create_user() workspace = enterprise_data_fixture.create_workspace(user=user) chat = AssistantChat.objects.create( - user=user, workspace=workspace, title="" # New chat + user=user, + workspace=workspace, + title="", # New chat ) # Mock title generator diff --git a/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant_database_table_tools.py b/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant_database_table_tools.py index 67c0786ec1..0d1b052dd0 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant_database_table_tools.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant_database_table_tools.py @@ -686,6 +686,6 @@ def __call__(self, **kwargs): # Verify at least some expected functions are present expected_common_functions = ["concat", "field", "if", "upper", "lower"] for func in expected_common_functions: - assert ( - func in captured_formula_docs - ), f"Expected function '{func}' not found in documentation" + assert func in captured_formula_docs, ( + f"Expected function '{func}' not found in documentation" + ) diff --git a/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant_database_view_filters_tools.py b/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant_database_view_filters_tools.py index b8d5e13b4e..88726637f9 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant_database_view_filters_tools.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_assistant_database_view_filters_tools.py @@ -487,9 +487,9 @@ def test_filter_class_discovery(): } found_excluded = excluded_classes & class_names - assert ( - not found_excluded - ), f"Base/intermediate classes should not be included: {found_excluded}" + assert not found_excluded, ( + f"Base/intermediate classes should not be included: {found_excluded}" + ) @pytest.mark.django_db diff --git a/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_telemetry.py b/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_telemetry.py index 387baa0dd9..4e0b8b095a 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_telemetry.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/assistant/test_telemetry.py @@ -18,9 +18,10 @@ def assistant_chat_fixture(enterprise_data_fixture): @pytest.fixture(autouse=True) def mock_posthog_openai(): - with udspy.settings.context(lm=udspy.LM(model="fake-model")), patch( - "posthog.ai.openai.AsyncOpenAI" - ) as mock: + with ( + udspy.settings.context(lm=udspy.LM(model="fake-model")), + patch("posthog.ai.openai.AsyncOpenAI") as mock, + ): # Configure the mock if needed mock.return_value = MagicMock() mock.return_value.model = "test-model" diff --git a/enterprise/backend/tests/baserow_enterprise_tests/audit_log/test_audit_log_export_job.py b/enterprise/backend/tests/baserow_enterprise_tests/audit_log/test_audit_log_export_job.py index 185b13329f..9b38597f77 100755 --- a/enterprise/backend/tests/baserow_enterprise_tests/audit_log/test_audit_log_export_job.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/audit_log/test_audit_log_export_job.py @@ -5,7 +5,6 @@ from django.test.utils import override_settings import pytest -from baserow_premium.license.exceptions import FeaturesNotAvailableError from freezegun import freeze_time from baserow.contrib.database.export.handler import ExportHandler @@ -13,6 +12,7 @@ from baserow.core.jobs.constants import JOB_FINISHED from baserow.core.jobs.handler import JobHandler from baserow_enterprise.audit_log.job_types import AuditLogExportJobType +from baserow_premium.license.exceptions import FeaturesNotAvailableError @pytest.mark.django_db diff --git a/enterprise/backend/tests/baserow_enterprise_tests/conftest.py b/enterprise/backend/tests/baserow_enterprise_tests/conftest.py index 45b9b1f894..d309625c87 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/conftest.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/conftest.py @@ -30,9 +30,9 @@ def set_openai_api_key_env_var(): """ if not os.getenv("OPENAI_API_KEY"): - os.environ[ - "OPENAI_API_KEY" - ] = "Please, assistant don't crash. You don't need me." + os.environ["OPENAI_API_KEY"] = ( + "Please, assistant don't crash. You don't need me." + ) @pytest.fixture # noqa: F405 diff --git a/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_enterprise_data_sync_handler.py b/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_enterprise_data_sync_handler.py index 4ed0c878b6..382bed91e4 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_enterprise_data_sync_handler.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_enterprise_data_sync_handler.py @@ -9,7 +9,6 @@ import pytest import responses -from baserow_premium.license.exceptions import FeaturesNotAvailableError from freezegun.api import freeze_time from baserow.contrib.database.data_sync.handler import DataSyncHandler @@ -28,6 +27,7 @@ from baserow_enterprise.data_sync.notification_types import ( PeriodicDataSyncDeactivatedNotificationType, ) +from baserow_premium.license.exceptions import FeaturesNotAvailableError @pytest.mark.django_db diff --git a/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_github_issues_data_sync.py b/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_github_issues_data_sync.py index 61aed7bc3c..72d033b65f 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_github_issues_data_sync.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_github_issues_data_sync.py @@ -7,14 +7,14 @@ import pytest import responses -from baserow_premium.license.exceptions import FeaturesNotAvailableError -from baserow_premium.license.models import License from rest_framework.status import HTTP_200_OK, HTTP_402_PAYMENT_REQUIRED from baserow.contrib.database.data_sync.handler import DataSyncHandler from baserow.contrib.database.fields.models import NumberField from baserow.core.db import specific_iterator from baserow_enterprise.data_sync.models import GitHubIssuesDataSync +from baserow_premium.license.exceptions import FeaturesNotAvailableError +from baserow_premium.license.models import License SINGLE_ISSUE = { "url": "https://api.github.com/repos/baserow_owner/baserow_repo/issues/1", diff --git a/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_gitlab_issues_data_sync.py b/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_gitlab_issues_data_sync.py index bb63519e5e..eded488520 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_gitlab_issues_data_sync.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_gitlab_issues_data_sync.py @@ -7,14 +7,14 @@ import pytest import responses -from baserow_premium.license.exceptions import FeaturesNotAvailableError -from baserow_premium.license.models import License from rest_framework.status import HTTP_200_OK, HTTP_402_PAYMENT_REQUIRED from baserow.contrib.database.data_sync.handler import DataSyncHandler from baserow.contrib.database.fields.models import NumberField from baserow.core.db import specific_iterator from baserow_enterprise.data_sync.models import GitLabIssuesDataSync +from baserow_premium.license.exceptions import FeaturesNotAvailableError +from baserow_premium.license.models import License SINGLE_ISSUE = { "id": 1, diff --git a/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_hubspot_contacts_data_sync.py b/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_hubspot_contacts_data_sync.py index 04eb1fc59f..129f491a2a 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_hubspot_contacts_data_sync.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_hubspot_contacts_data_sync.py @@ -7,8 +7,6 @@ import pytest import responses -from baserow_premium.license.exceptions import FeaturesNotAvailableError -from baserow_premium.license.models import License from rest_framework.status import ( HTTP_200_OK, HTTP_400_BAD_REQUEST, @@ -19,6 +17,8 @@ from baserow.contrib.database.fields.models import NumberField from baserow.core.db import specific_iterator from baserow_enterprise.data_sync.models import HubSpotContactsDataSync +from baserow_premium.license.exceptions import FeaturesNotAvailableError +from baserow_premium.license.models import License ALL_PROPERTIES_RESPONSE = { "results": [ diff --git a/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_jira_issues_data_sync.py b/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_jira_issues_data_sync.py index 1d18032be4..c1c7e08ca4 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_jira_issues_data_sync.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_jira_issues_data_sync.py @@ -7,8 +7,6 @@ import pytest import responses -from baserow_premium.license.exceptions import FeaturesNotAvailableError -from baserow_premium.license.models import License from requests.auth import HTTPBasicAuth from responses.matchers import header_matcher from rest_framework.status import HTTP_200_OK, HTTP_402_PAYMENT_REQUIRED @@ -18,6 +16,8 @@ from baserow.contrib.database.fields.models import TextField from baserow.core.db import specific_iterator from baserow_enterprise.data_sync.models import JiraIssuesDataSync +from baserow_premium.license.exceptions import FeaturesNotAvailableError +from baserow_premium.license.models import License SINGLE_ISSUE = { "expand": "operations,versionedRepresentations,editmeta,changelog,customfield_10010.requestTypePractice,renderedFields", diff --git a/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_local_baserow_table_data_sync_type.py b/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_local_baserow_table_data_sync_type.py index 494e55ee13..6a3b5568dd 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_local_baserow_table_data_sync_type.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_local_baserow_table_data_sync_type.py @@ -4,8 +4,6 @@ from django.test.utils import CaptureQueriesContext, override_settings import pytest -from baserow_premium.license.exceptions import FeaturesNotAvailableError -from baserow_premium.license.models import License from rest_framework.status import ( HTTP_200_OK, HTTP_400_BAD_REQUEST, @@ -26,6 +24,8 @@ supported_field_types, ) from baserow_enterprise.data_sync.models import LocalBaserowTableDataSync +from baserow_premium.license.exceptions import FeaturesNotAvailableError +from baserow_premium.license.models import License @pytest.mark.django_db @@ -243,7 +243,7 @@ def test_sync_data_sync_table(enterprise_data_fixture): source_row_1, f"field_{field_1.id}" ) assert getattr(row, f"field_{field_2_field.id}") == getattr( - source_row_1, f"field" f"_{field_2.id}" + source_row_1, f"field_{field_2.id}" ) diff --git a/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_postgresql_data_sync.py b/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_postgresql_data_sync.py index 1eaa69bff0..cc48637e6a 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_postgresql_data_sync.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_postgresql_data_sync.py @@ -497,7 +497,7 @@ def test_delete_row_in_postgresql_table( with connection.cursor() as cursor: row_id = serialized_rows[0][f"field_{id_field.id}"] cursor.execute( - f"SELECT count(*) " f"FROM {create_postgresql_test_table} WHERE id = %s", + f"SELECT count(*) FROM {create_postgresql_test_table} WHERE id = %s", [row_id], ) result = cursor.fetchone() @@ -569,7 +569,7 @@ def test_skip_delete_row_in_postgresql_table_if_unique_primary_is_empty( with connection.cursor() as cursor: cursor.execute( - f"SELECT count(*) " f"FROM {create_postgresql_test_table} WHERE id = %s", + f"SELECT count(*) FROM {create_postgresql_test_table} WHERE id = %s", [old_id], ) result = cursor.fetchone() diff --git a/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_realtime_push_two_way_sync_strategy.py b/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_realtime_push_two_way_sync_strategy.py index eef63b1016..e36d2271ad 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_realtime_push_two_way_sync_strategy.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/data_sync/test_realtime_push_two_way_sync_strategy.py @@ -524,7 +524,7 @@ def test_two_way_sync_update_without_valid_license( with connection.cursor() as cursor: cursor.execute( - f"SELECT count(*) " f"FROM {create_postgresql_test_table}", + f"SELECT count(*) FROM {create_postgresql_test_table}", ) result = cursor.fetchone() # Should be equal to the old number because no rows should have been created. diff --git a/enterprise/backend/tests/baserow_enterprise_tests/date_dependency/test_date_dependency_handler.py b/enterprise/backend/tests/baserow_enterprise_tests/date_dependency/test_date_dependency_handler.py index a67fbab3ae..f8f26e3926 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/date_dependency/test_date_dependency_handler.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/date_dependency/test_date_dependency_handler.py @@ -6,7 +6,6 @@ from django.contrib.auth.models import AbstractUser import pytest -from baserow_premium.license.exceptions import FeaturesNotAvailableError from baserow.contrib.database.field_rules.exceptions import FieldRuleAlreadyExistsError from baserow.contrib.database.field_rules.handlers import FieldRuleHandler @@ -19,6 +18,7 @@ DateDependencyFieldRuleType, ) from baserow_enterprise.date_dependency.models import DateDependency +from baserow_premium.license.exceptions import FeaturesNotAvailableError class DateDepsTestData(NamedTuple): @@ -419,7 +419,7 @@ def test_date_dependency_handler_create_rule_and_populate_rows( start_date = getattr(row, start_date_field.db_column) end_date = getattr(row, end_date_field.db_column) duration = getattr(row, duration_field.db_column) - is_valid = getattr(row, "field_rules_are_valid") + is_valid = row.field_rules_are_valid expected_row = expected.get(row_id) assert ( diff --git a/enterprise/backend/tests/baserow_enterprise_tests/enterprise/test_enterprise_license.py b/enterprise/backend/tests/baserow_enterprise_tests/enterprise/test_enterprise_license.py index 25b7a3500f..418f84e2b4 100755 --- a/enterprise/backend/tests/baserow_enterprise_tests/enterprise/test_enterprise_license.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/enterprise/test_enterprise_license.py @@ -10,11 +10,6 @@ import pytest import responses -from baserow_premium.api.user.user_data_types import ActiveLicensesDataType -from baserow_premium.license.exceptions import CantManuallyChangeSeatsError -from baserow_premium.license.features import PREMIUM -from baserow_premium.license.handler import LicenseHandler -from baserow_premium.license.registries import SeatUsageSummary from freezegun import freeze_time from PIL import Image from responses.matchers import json_params_matcher @@ -35,6 +30,11 @@ RoleBasedSeatUsageSummaryCalculator, ) from baserow_enterprise.teams.models import Team, TeamSubject +from baserow_premium.api.user.user_data_types import ActiveLicensesDataType +from baserow_premium.license.exceptions import CantManuallyChangeSeatsError +from baserow_premium.license.features import PREMIUM +from baserow_premium.license.handler import LicenseHandler +from baserow_premium.license.registries import SeatUsageSummary PAID_EDITOR_ROLE = "EDITOR" @@ -1894,7 +1894,7 @@ def test_order_of_roles_is_as_expected( user, workspace, role=Role.objects.get(uid=uid), scope=table ) - expected_report = {uid: 0 for uid in expected_role_order} + expected_report = dict.fromkeys(expected_role_order, 0) expected_report[uid] = 1 assert ( EnterpriseLicenseType() diff --git a/enterprise/backend/tests/baserow_enterprise_tests/enterprise_fixtures.py b/enterprise/backend/tests/baserow_enterprise_tests/enterprise_fixtures.py index 032fe6d369..865ccc28d0 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/enterprise_fixtures.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/enterprise_fixtures.py @@ -1,9 +1,9 @@ import faker -from baserow_premium.license.models import License from baserow.core.cache import local_cache from baserow.core.models import Settings from baserow_enterprise.models import Role, RoleAssignment, Team, TeamSubject +from baserow_premium.license.models import License VALID_ONE_SEAT_ENTERPRISE_LICENSE = ( # id: "1", instance_id: "1" diff --git a/enterprise/backend/tests/baserow_enterprise_tests/fields/test_link_row_field_rbac.py b/enterprise/backend/tests/baserow_enterprise_tests/fields/test_link_row_field_rbac.py index dab60d491e..af4390bfcd 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/fields/test_link_row_field_rbac.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/fields/test_link_row_field_rbac.py @@ -1,6 +1,7 @@ """ This file tests the link row field in combination with RBAC enabled """ + import pytest from baserow.contrib.database.fields.handler import FieldHandler diff --git a/enterprise/backend/tests/baserow_enterprise_tests/integrations/local_baserow/test_user_source_types.py b/enterprise/backend/tests/baserow_enterprise_tests/integrations/local_baserow/test_user_source_types.py index 1faba65bf2..716f9d6b11 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/integrations/local_baserow/test_user_source_types.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/integrations/local_baserow/test_user_source_types.py @@ -1534,9 +1534,9 @@ def test_local_baserow_user_source_authentication_is_configured( ]: prev_field = getattr(user_source, field) setattr(user_source, field, None) - assert ( - user_source_type.is_configured(user_source) is False - ), f"Failed for {field}" + assert user_source_type.is_configured(user_source) is False, ( + f"Failed for {field}" + ) setattr(user_source, field, prev_field) diff --git a/enterprise/backend/tests/baserow_enterprise_tests/role/test_role_permission_manager.py b/enterprise/backend/tests/baserow_enterprise_tests/role/test_role_permission_manager.py index 3230559f78..86ac5e8685 100755 --- a/enterprise/backend/tests/baserow_enterprise_tests/role/test_role_permission_manager.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/role/test_role_permission_manager.py @@ -212,7 +212,9 @@ def check_perms(user, test_list): try: assert perm_manager.check_permissions( user, permission.type, workspace=workspace, context=context - ), f"User {user} should have permission {permission.type} on context {context}" + ), ( + f"User {user} should have permission {permission.type} on context {context}" + ) except PermissionException: print( f"User {user} should have permission {permission.type} on context {context}" diff --git a/enterprise/backend/tests/baserow_enterprise_tests/sso/oauth2/test_auth_provider_types.py b/enterprise/backend/tests/baserow_enterprise_tests/sso/oauth2/test_auth_provider_types.py index 981007d04f..90db67702e 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/sso/oauth2/test_auth_provider_types.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/sso/oauth2/test_auth_provider_types.py @@ -126,15 +126,14 @@ def test_get_login_options( "items": [ { "redirect_url": ( - f"{settings.PUBLIC_BACKEND_URL}" - f"/api/sso/oauth2/login/{provider.id}/" + f"{settings.PUBLIC_BACKEND_URL}/api/sso/oauth2/login/{provider.id}/" ), "name": provider.name, "type": provider_type, } ], "default_redirect_url": ( - f"{settings.PUBLIC_BACKEND_URL}" f"/api/sso/oauth2/login/{provider.id}/" + f"{settings.PUBLIC_BACKEND_URL}/api/sso/oauth2/login/{provider.id}/" ), } diff --git a/enterprise/backend/tests/baserow_enterprise_tests/views/test_restricted_view.py b/enterprise/backend/tests/baserow_enterprise_tests/views/test_restricted_view.py index 5759d4f9c1..d63e2cd548 100644 --- a/enterprise/backend/tests/baserow_enterprise_tests/views/test_restricted_view.py +++ b/enterprise/backend/tests/baserow_enterprise_tests/views/test_restricted_view.py @@ -5,11 +5,6 @@ from django.urls import reverse import pytest -from baserow_premium.views.view_types import ( - CalendarViewType, - KanbanViewType, - TimelineViewType, -) from starlette.status import HTTP_200_OK from baserow.contrib.database.api.constants import PUBLIC_PLACEHOLDER_ENTITY_ID @@ -26,6 +21,11 @@ from baserow_enterprise.role.handler import RoleAssignmentHandler from baserow_enterprise.role.models import Role from baserow_enterprise.view_ownership_types import RestrictedViewOwnershipType +from baserow_premium.views.view_types import ( + CalendarViewType, + KanbanViewType, + TimelineViewType, +) @pytest.mark.django_db diff --git a/premium/backend/src/baserow_premium/api/builder/serializers.py b/premium/backend/src/baserow_premium/api/builder/serializers.py index 75f9d61fd7..2c74c4cdb4 100644 --- a/premium/backend/src/baserow_premium/api/builder/serializers.py +++ b/premium/backend/src/baserow_premium/api/builder/serializers.py @@ -1,9 +1,9 @@ -from baserow_premium.plugins import PremiumPlugin from rest_framework import serializers from baserow.api.workspaces.serializers import WorkspaceSerializer from baserow.contrib.builder.api.serializers import BuilderSerializer from baserow.core.registries import plugin_registry +from baserow_premium.plugins import PremiumPlugin class PublicWorkspaceSerializer(WorkspaceSerializer): diff --git a/premium/backend/src/baserow_premium/api/dashboard/widgets/serializers.py b/premium/backend/src/baserow_premium/api/dashboard/widgets/serializers.py index 92fd81c1ac..79eecb885e 100644 --- a/premium/backend/src/baserow_premium/api/dashboard/widgets/serializers.py +++ b/premium/backend/src/baserow_premium/api/dashboard/widgets/serializers.py @@ -1,8 +1,9 @@ +from rest_framework import serializers + from baserow_premium.dashboard.widgets.models import ( ChartSeriesChartType, PieChartSeriesChartType, ) -from rest_framework import serializers class ChartSeriesConfigSerializer(serializers.Serializer): diff --git a/premium/backend/src/baserow_premium/api/fields/exceptions.py b/premium/backend/src/baserow_premium/api/fields/exceptions.py index c65427e9c9..500150853f 100644 --- a/premium/backend/src/baserow_premium/api/fields/exceptions.py +++ b/premium/backend/src/baserow_premium/api/fields/exceptions.py @@ -3,5 +3,5 @@ ERROR_GENERATIVE_AI_DOES_NOT_SUPPORT_FILE_FIELD = ( "ERROR_GENERATIVE_AI_DOES_NOT_SUPPORT_FILE_FIELD", HTTP_400_BAD_REQUEST, - "File field is not supported for the particular" "generative AI model type.", + "File field is not supported for the particulargenerative AI model type.", ) diff --git a/premium/backend/src/baserow_premium/api/fields/views.py b/premium/backend/src/baserow_premium/api/fields/views.py index 41cbcdc75b..06d246ac2b 100644 --- a/premium/backend/src/baserow_premium/api/fields/views.py +++ b/premium/backend/src/baserow_premium/api/fields/views.py @@ -1,11 +1,5 @@ from django.db import transaction -from baserow_premium.fields.actions import GenerateFormulaWithAIActionType -from baserow_premium.fields.exceptions import AiFieldOutputParserException -from baserow_premium.fields.job_types import GenerateAIValuesJobType -from baserow_premium.fields.models import AIField -from baserow_premium.license.features import PREMIUM -from baserow_premium.license.handler import LicenseHandler from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes from drf_spectacular.utils import extend_schema from rest_framework import status @@ -49,6 +43,12 @@ from baserow.core.handler import CoreHandler from baserow.core.jobs.handler import JobHandler from baserow.core.jobs.registries import job_type_registry +from baserow_premium.fields.actions import GenerateFormulaWithAIActionType +from baserow_premium.fields.exceptions import AiFieldOutputParserException +from baserow_premium.fields.job_types import GenerateAIValuesJobType +from baserow_premium.fields.models import AIField +from baserow_premium.license.features import PREMIUM +from baserow_premium.license.handler import LicenseHandler from .serializers import ( GenerateAIFieldValueViewSerializer, diff --git a/premium/backend/src/baserow_premium/api/integrations/local_baserow/serializers.py b/premium/backend/src/baserow_premium/api/integrations/local_baserow/serializers.py index 0671695d89..2c31fe2e39 100644 --- a/premium/backend/src/baserow_premium/api/integrations/local_baserow/serializers.py +++ b/premium/backend/src/baserow_premium/api/integrations/local_baserow/serializers.py @@ -1,9 +1,10 @@ +from rest_framework import serializers + from baserow_premium.integrations.local_baserow.models import ( LocalBaserowTableServiceAggregationGroupBy, LocalBaserowTableServiceAggregationSeries, LocalBaserowTableServiceAggregationSortBy, ) -from rest_framework import serializers class LocalBaserowTableServiceAggregationSeriesSerializer(serializers.ModelSerializer): diff --git a/premium/backend/src/baserow_premium/api/license/serializers.py b/premium/backend/src/baserow_premium/api/license/serializers.py index 3c14ae8982..4da39aaf19 100644 --- a/premium/backend/src/baserow_premium/api/license/serializers.py +++ b/premium/backend/src/baserow_premium/api/license/serializers.py @@ -3,12 +3,13 @@ from django.contrib.auth import get_user_model -from baserow_premium.license.models import License -from baserow_premium.license.registries import SeatUsageSummary from drf_spectacular.openapi import OpenApiTypes from drf_spectacular.utils import extend_schema_field from rest_framework import serializers +from baserow_premium.license.models import License +from baserow_premium.license.registries import SeatUsageSummary + User = get_user_model() diff --git a/premium/backend/src/baserow_premium/api/license/views.py b/premium/backend/src/baserow_premium/api/license/views.py index a9611b4d00..108fe3878b 100644 --- a/premium/backend/src/baserow_premium/api/license/views.py +++ b/premium/backend/src/baserow_premium/api/license/views.py @@ -3,18 +3,6 @@ from django.db import transaction from django.db.models import Count, Q, Value -from baserow_premium.license.exceptions import ( - CantManuallyChangeSeatsError, - InvalidLicenseError, - LicenseAlreadyExistsError, - LicenseHasExpiredError, - LicenseInstanceIdMismatchError, - NoSeatsLeftInLicenseError, - UnsupportedLicenseError, - UserAlreadyOnLicenseError, -) -from baserow_premium.license.handler import LicenseHandler -from baserow_premium.license.models import License from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import OpenApiParameter, extend_schema from rest_framework.permissions import IsAdminUser @@ -28,6 +16,18 @@ from baserow.api.user.errors import ERROR_USER_NOT_FOUND from baserow.core.db import LockedAtomicTransaction from baserow.core.user.handler import UserHandler +from baserow_premium.license.exceptions import ( + CantManuallyChangeSeatsError, + InvalidLicenseError, + LicenseAlreadyExistsError, + LicenseHasExpiredError, + LicenseInstanceIdMismatchError, + NoSeatsLeftInLicenseError, + UnsupportedLicenseError, + UserAlreadyOnLicenseError, +) +from baserow_premium.license.handler import LicenseHandler +from baserow_premium.license.models import License from .errors import ( ERROR_CANT_MANUALLY_CHANGE_SEATS, diff --git a/premium/backend/src/baserow_premium/api/row_comments/serializers.py b/premium/backend/src/baserow_premium/api/row_comments/serializers.py index eb4d40f945..5f94ab6d71 100644 --- a/premium/backend/src/baserow_premium/api/row_comments/serializers.py +++ b/premium/backend/src/baserow_premium/api/row_comments/serializers.py @@ -1,11 +1,11 @@ -from baserow_premium.row_comments.models import ( - ALL_ROW_COMMENT_NOTIFICATION_MODES, - RowComment, -) from drf_spectacular.utils import extend_schema_serializer from rest_framework import serializers from baserow.core.prosemirror.utils import is_valid_prosemirror_document +from baserow_premium.row_comments.models import ( + ALL_ROW_COMMENT_NOTIFICATION_MODES, + RowComment, +) @extend_schema_serializer(deprecate_fields=["comment"]) diff --git a/premium/backend/src/baserow_premium/api/row_comments/views.py b/premium/backend/src/baserow_premium/api/row_comments/views.py index dd6d2764d0..f74feb2794 100755 --- a/premium/backend/src/baserow_premium/api/row_comments/views.py +++ b/premium/backend/src/baserow_premium/api/row_comments/views.py @@ -1,22 +1,6 @@ from django.conf import settings from django.db import transaction -from baserow_premium.api.row_comments.errors import ( - ERROR_INVALID_COMMENT_MENTION, - ERROR_ROW_COMMENT_DOES_NOT_EXIST, - ERROR_USER_NOT_COMMENT_AUTHOR, -) -from baserow_premium.row_comments.actions import ( - CreateRowCommentActionType, - DeleteRowCommentActionType, - UpdateRowCommentActionType, -) -from baserow_premium.row_comments.exceptions import ( - InvalidRowCommentMentionException, - RowCommentDoesNotExist, - UserNotRowCommentAuthorException, -) -from baserow_premium.row_comments.handler import RowCommentHandler from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import OpenApiParameter, extend_schema from rest_framework.pagination import LimitOffsetPagination @@ -34,6 +18,22 @@ from baserow.contrib.database.table.exceptions import TableDoesNotExist from baserow.core.action.registries import action_type_registry from baserow.core.exceptions import UserNotInWorkspace +from baserow_premium.api.row_comments.errors import ( + ERROR_INVALID_COMMENT_MENTION, + ERROR_ROW_COMMENT_DOES_NOT_EXIST, + ERROR_USER_NOT_COMMENT_AUTHOR, +) +from baserow_premium.row_comments.actions import ( + CreateRowCommentActionType, + DeleteRowCommentActionType, + UpdateRowCommentActionType, +) +from baserow_premium.row_comments.exceptions import ( + InvalidRowCommentMentionException, + RowCommentDoesNotExist, + UserNotRowCommentAuthorException, +) +from baserow_premium.row_comments.handler import RowCommentHandler from .serializers import ( RowCommentCreateSerializer, diff --git a/premium/backend/src/baserow_premium/api/user/user_data_types.py b/premium/backend/src/baserow_premium/api/user/user_data_types.py index d6342d440f..a71ab5e626 100644 --- a/premium/backend/src/baserow_premium/api/user/user_data_types.py +++ b/premium/backend/src/baserow_premium/api/user/user_data_types.py @@ -1,10 +1,9 @@ from typing import List -from baserow_premium.license.registries import LicenseType - from baserow.api.user.registries import UserDataType from baserow.core.models import Workspace from baserow.core.registries import plugin_registry +from baserow_premium.license.registries import LicenseType class ActiveLicensesDataType(UserDataType): diff --git a/premium/backend/src/baserow_premium/api/views/calendar/serializers.py b/premium/backend/src/baserow_premium/api/views/calendar/serializers.py index 1c252ffb80..ea2437c346 100644 --- a/premium/backend/src/baserow_premium/api/views/calendar/serializers.py +++ b/premium/backend/src/baserow_premium/api/views/calendar/serializers.py @@ -1,6 +1,5 @@ from django.conf import settings -from baserow_premium.views.models import CalendarViewFieldOptions from rest_framework import serializers from baserow.contrib.database.api.rows.serializers import ( @@ -9,6 +8,7 @@ ) from baserow.contrib.database.search.handler import ALL_SEARCH_MODES from baserow.core.datetime import get_timezones +from baserow_premium.views.models import CalendarViewFieldOptions class CalendarViewFieldOptionsSerializer(serializers.ModelSerializer): diff --git a/premium/backend/src/baserow_premium/api/views/calendar/views.py b/premium/backend/src/baserow_premium/api/views/calendar/views.py index 4ad656b13e..687cd2534c 100644 --- a/premium/backend/src/baserow_premium/api/views/calendar/views.py +++ b/premium/backend/src/baserow_premium/api/views/calendar/views.py @@ -2,20 +2,6 @@ from django.db import transaction from django.http import HttpResponse -from baserow_premium.api.views.calendar.errors import ( - ERROR_CALENDAR_VIEW_HAS_NO_DATE_FIELD, -) -from baserow_premium.api.views.calendar.serializers import ( - ListCalendarRowsQueryParamsSerializer, - get_calendar_view_example_response_serializer, -) -from baserow_premium.ical_utils import build_calendar -from baserow_premium.license.features import PREMIUM -from baserow_premium.license.handler import LicenseHandler -from baserow_premium.views.actions import RotateCalendarIcalSlugActionType -from baserow_premium.views.exceptions import CalendarViewHasNoDateField -from baserow_premium.views.handler import get_rows_grouped_by_date_field -from baserow_premium.views.models import CalendarView from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes from drf_spectacular.utils import extend_schema from rest_framework.permissions import AllowAny, IsAuthenticated @@ -82,6 +68,20 @@ from baserow.core.db import specific_queryset from baserow.core.exceptions import UserNotInWorkspace from baserow.core.handler import CoreHandler +from baserow_premium.api.views.calendar.errors import ( + ERROR_CALENDAR_VIEW_HAS_NO_DATE_FIELD, +) +from baserow_premium.api.views.calendar.serializers import ( + ListCalendarRowsQueryParamsSerializer, + get_calendar_view_example_response_serializer, +) +from baserow_premium.ical_utils import build_calendar +from baserow_premium.license.features import PREMIUM +from baserow_premium.license.handler import LicenseHandler +from baserow_premium.views.actions import RotateCalendarIcalSlugActionType +from baserow_premium.views.exceptions import CalendarViewHasNoDateField +from baserow_premium.views.handler import get_rows_grouped_by_date_field +from baserow_premium.views.models import CalendarView class CalendarViewView(APIView): diff --git a/premium/backend/src/baserow_premium/api/views/kanban/serializers.py b/premium/backend/src/baserow_premium/api/views/kanban/serializers.py index d0592e6497..023f2a6dc1 100644 --- a/premium/backend/src/baserow_premium/api/views/kanban/serializers.py +++ b/premium/backend/src/baserow_premium/api/views/kanban/serializers.py @@ -1,10 +1,10 @@ -from baserow_premium.views.models import KanbanViewFieldOptions from rest_framework import serializers from baserow.contrib.database.api.rows.serializers import ( get_example_multiple_rows_metadata_serializer, get_example_row_serializer_class, ) +from baserow_premium.views.models import KanbanViewFieldOptions class KanbanViewFieldOptionsSerializer(serializers.ModelSerializer): diff --git a/premium/backend/src/baserow_premium/api/views/kanban/views.py b/premium/backend/src/baserow_premium/api/views/kanban/views.py index ea6a7e1fd2..8572ed19a1 100644 --- a/premium/backend/src/baserow_premium/api/views/kanban/views.py +++ b/premium/backend/src/baserow_premium/api/views/kanban/views.py @@ -1,10 +1,3 @@ -from baserow_premium.api.views.errors import ERROR_INVALID_SELECT_OPTION_PARAMETER -from baserow_premium.api.views.exceptions import InvalidSelectOptionParameter -from baserow_premium.license.features import PREMIUM -from baserow_premium.license.handler import LicenseHandler -from baserow_premium.views.exceptions import KanbanViewHasNoSingleSelectField -from baserow_premium.views.handler import get_rows_grouped_by_single_select_field -from baserow_premium.views.models import KanbanView from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes from drf_spectacular.utils import extend_schema from rest_framework.permissions import AllowAny @@ -54,6 +47,13 @@ from baserow.contrib.database.views.signals import view_loaded from baserow.core.exceptions import UserNotInWorkspace from baserow.core.handler import CoreHandler +from baserow_premium.api.views.errors import ERROR_INVALID_SELECT_OPTION_PARAMETER +from baserow_premium.api.views.exceptions import InvalidSelectOptionParameter +from baserow_premium.license.features import PREMIUM +from baserow_premium.license.handler import LicenseHandler +from baserow_premium.views.exceptions import KanbanViewHasNoSingleSelectField +from baserow_premium.views.handler import get_rows_grouped_by_single_select_field +from baserow_premium.views.models import KanbanView from .errors import ( ERROR_KANBAN_DOES_NOT_EXIST, diff --git a/premium/backend/src/baserow_premium/api/views/timeline/serializers.py b/premium/backend/src/baserow_premium/api/views/timeline/serializers.py index bfb06967b7..0e46f80938 100644 --- a/premium/backend/src/baserow_premium/api/views/timeline/serializers.py +++ b/premium/backend/src/baserow_premium/api/views/timeline/serializers.py @@ -1,6 +1,7 @@ -from baserow_premium.views.models import TimelineViewFieldOptions from rest_framework import serializers +from baserow_premium.views.models import TimelineViewFieldOptions + class TimelineViewFieldOptionsSerializer(serializers.ModelSerializer): class Meta: diff --git a/premium/backend/src/baserow_premium/api/views/timeline/views.py b/premium/backend/src/baserow_premium/api/views/timeline/views.py index 0619572c6f..5d698cddb6 100644 --- a/premium/backend/src/baserow_premium/api/views/timeline/views.py +++ b/premium/backend/src/baserow_premium/api/views/timeline/views.py @@ -1,14 +1,3 @@ -from baserow_premium.api.views.timeline.errors import ( - ERROR_TIMELINE_VIEW_HAS_INVALID_DATE_SETTINGS, -) -from baserow_premium.license.features import PREMIUM -from baserow_premium.license.handler import LicenseHandler -from baserow_premium.views.exceptions import TimelineViewHasInvalidDateSettings -from baserow_premium.views.handler import ( - get_public_timeline_view_filtered_queryset, - get_timeline_view_filtered_queryset, -) -from baserow_premium.views.models import TimelineView from drf_spectacular.openapi import OpenApiParameter, OpenApiTypes from drf_spectacular.utils import extend_schema from rest_framework.permissions import AllowAny, IsAuthenticated @@ -81,6 +70,17 @@ from baserow.contrib.database.views.signals import view_loaded from baserow.core.exceptions import UserNotInWorkspace from baserow.core.handler import CoreHandler +from baserow_premium.api.views.timeline.errors import ( + ERROR_TIMELINE_VIEW_HAS_INVALID_DATE_SETTINGS, +) +from baserow_premium.license.features import PREMIUM +from baserow_premium.license.handler import LicenseHandler +from baserow_premium.views.exceptions import TimelineViewHasInvalidDateSettings +from baserow_premium.views.handler import ( + get_public_timeline_view_filtered_queryset, + get_timeline_view_filtered_queryset, +) +from baserow_premium.views.models import TimelineView from .errors import ERROR_TIMELINE_DOES_NOT_EXIST from .serializers import TimelineViewFieldOptionsSerializer diff --git a/premium/backend/src/baserow_premium/api/views/views.py b/premium/backend/src/baserow_premium/api/views/views.py index 75705feec3..1e668ca866 100644 --- a/premium/backend/src/baserow_premium/api/views/views.py +++ b/premium/backend/src/baserow_premium/api/views/views.py @@ -3,14 +3,6 @@ from django.db import transaction -from baserow_premium.api.views.errors import ( - ERROR_CANNOT_UPDATE_PREMIUM_ATTRIBUTES_ON_TEMPLATE, -) -from baserow_premium.api.views.exceptions import CannotUpdatePremiumAttributesOnTemplate -from baserow_premium.api.views.serializers import UpdatePremiumViewAttributesSerializer -from baserow_premium.api.views.signers import export_public_view_signer -from baserow_premium.license.features import PREMIUM -from baserow_premium.license.handler import LicenseHandler from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import OpenApiParameter, extend_schema from itsdangerous.exc import BadData @@ -59,6 +51,14 @@ from baserow.contrib.database.views.registries import view_type_registry from baserow.core.action.registries import action_type_registry from baserow.core.exceptions import UserNotInWorkspace +from baserow_premium.api.views.errors import ( + ERROR_CANNOT_UPDATE_PREMIUM_ATTRIBUTES_ON_TEMPLATE, +) +from baserow_premium.api.views.exceptions import CannotUpdatePremiumAttributesOnTemplate +from baserow_premium.api.views.serializers import UpdatePremiumViewAttributesSerializer +from baserow_premium.api.views.signers import export_public_view_signer +from baserow_premium.license.features import PREMIUM +from baserow_premium.license.handler import LicenseHandler class PremiumViewAttributesView(APIView): diff --git a/premium/backend/src/baserow_premium/apps.py b/premium/backend/src/baserow_premium/apps.py index d3f88ca3d6..26e87deb5a 100644 --- a/premium/backend/src/baserow_premium/apps.py +++ b/premium/backend/src/baserow_premium/apps.py @@ -9,6 +9,7 @@ class BaserowPremiumConfig(AppConfig): def ready(self): # noinspection PyUnresolvedReferences import baserow_premium.row_comments.receivers # noqa: F401 + from baserow.core.registries import application_type_registry from baserow_premium.api.user.user_data_types import ActiveLicensesDataType from baserow_premium.builder.application_types import ( PremiumBuilderApplicationType, @@ -18,8 +19,6 @@ def ready(self): RowCommentsNotificationModeMetadataType, ) - from baserow.core.registries import application_type_registry - # We replace the original application type with the premium one to # add the licences to workspace serializer application_type_registry.unregister(PremiumBuilderApplicationType.type) @@ -165,26 +164,22 @@ def ready(self): permission_manager_type_registry.register(ViewOwnershipPermissionManagerType()) + from baserow.core.notifications.registries import notification_type_registry from baserow_premium.row_comments.notification_types import ( RowCommentMentionNotificationType, RowCommentNotificationType, ) - from baserow.core.notifications.registries import notification_type_registry - notification_type_registry.register(RowCommentMentionNotificationType()) notification_type_registry.register(RowCommentNotificationType()) + from baserow.api.settings.registries import settings_data_registry from baserow_premium.api.settings.settings_types import ( InstanceWideSettingsDataType, ) - from baserow.api.settings.registries import settings_data_registry - settings_data_registry.register(InstanceWideSettingsDataType()) - from baserow_premium.integrations.registries import grouped_aggregation_registry - from baserow.contrib.database.fields.field_aggregations import ( AverageFieldAggregationType, CheckedFieldAggregationType, @@ -204,6 +199,7 @@ def ready(self): UniqueCountFieldAggregationType, VarianceFieldAggregationType, ) + from baserow_premium.integrations.registries import grouped_aggregation_registry grouped_aggregation_registry.register(CountFieldAggregationType()) grouped_aggregation_registry.register(EmptyCountFieldAggregationType()) @@ -225,10 +221,6 @@ def ready(self): grouped_aggregation_registry.register(VarianceFieldAggregationType()) grouped_aggregation_registry.register(MedianFieldAggregationType()) - from baserow_premium.integrations.registries import ( - grouped_aggregation_group_by_registry, - ) - from baserow.contrib.database.fields.field_types import ( AutonumberFieldType, BooleanFieldType, @@ -241,6 +233,9 @@ def ready(self): TextFieldType, URLFieldType, ) + from baserow_premium.integrations.registries import ( + grouped_aggregation_group_by_registry, + ) grouped_aggregation_group_by_registry.register(TextFieldType()) grouped_aggregation_group_by_registry.register(LongTextFieldType()) @@ -253,6 +248,8 @@ def ready(self): grouped_aggregation_group_by_registry.register(AutonumberFieldType()) grouped_aggregation_group_by_registry.register(SingleSelectFieldType()) + from baserow.contrib.dashboard.widgets.registries import widget_type_registry + from baserow.core.services.registries import service_type_registry from baserow_premium.dashboard.widgets.widget_types import ( ChartWidgetType, PieChartWidgetType, @@ -261,9 +258,6 @@ def ready(self): LocalBaserowGroupedAggregateRowsUserServiceType, ) - from baserow.contrib.dashboard.widgets.registries import widget_type_registry - from baserow.core.services.registries import service_type_registry - service_type_registry.register( LocalBaserowGroupedAggregateRowsUserServiceType() ) diff --git a/premium/backend/src/baserow_premium/dashboard/widgets/models.py b/premium/backend/src/baserow_premium/dashboard/widgets/models.py index c9fa7d7b2a..81c7b2824f 100644 --- a/premium/backend/src/baserow_premium/dashboard/widgets/models.py +++ b/premium/backend/src/baserow_premium/dashboard/widgets/models.py @@ -1,11 +1,10 @@ from django.db import models +from baserow.contrib.dashboard.widgets.models import Widget from baserow_premium.integrations.local_baserow.models import ( LocalBaserowTableServiceAggregationSeries, ) -from baserow.contrib.dashboard.widgets.models import Widget - class ChartSeriesChartType(models.TextChoices): BAR = "BAR", "Bar" diff --git a/premium/backend/src/baserow_premium/dashboard/widgets/widget_types.py b/premium/backend/src/baserow_premium/dashboard/widgets/widget_types.py index 88131bdd18..0005c8e102 100644 --- a/premium/backend/src/baserow_premium/dashboard/widgets/widget_types.py +++ b/premium/backend/src/baserow_premium/dashboard/widgets/widget_types.py @@ -3,6 +3,16 @@ from django.db import IntegrityError from django.db.models import QuerySet +from rest_framework import serializers + +from baserow.contrib.dashboard.data_sources.handler import DashboardDataSourceHandler +from baserow.contrib.dashboard.data_sources.models import DashboardDataSource +from baserow.contrib.dashboard.types import WidgetDict +from baserow.contrib.dashboard.widgets.exceptions import WidgetImproperlyConfigured +from baserow.contrib.dashboard.widgets.models import Widget +from baserow.contrib.dashboard.widgets.registries import WidgetType +from baserow.contrib.dashboard.widgets.types import UpdatedWidget +from baserow.core.services.registries import service_type_registry from baserow_premium.api.dashboard.widgets.serializers import ( ChartSeriesConfigSerializer, PieChartSeriesConfigSerializer, @@ -19,16 +29,6 @@ ) from baserow_premium.license.features import PREMIUM from baserow_premium.license.handler import LicenseHandler -from rest_framework import serializers - -from baserow.contrib.dashboard.data_sources.handler import DashboardDataSourceHandler -from baserow.contrib.dashboard.data_sources.models import DashboardDataSource -from baserow.contrib.dashboard.types import WidgetDict -from baserow.contrib.dashboard.widgets.exceptions import WidgetImproperlyConfigured -from baserow.contrib.dashboard.widgets.models import Widget -from baserow.contrib.dashboard.widgets.registries import WidgetType -from baserow.contrib.dashboard.widgets.types import UpdatedWidget -from baserow.core.services.registries import service_type_registry class ChartWidgetType(WidgetType): diff --git a/premium/backend/src/baserow_premium/export/exporter_types.py b/premium/backend/src/baserow_premium/export/exporter_types.py index 800a6e8c63..882a2369b4 100644 --- a/premium/backend/src/baserow_premium/export/exporter_types.py +++ b/premium/backend/src/baserow_premium/export/exporter_types.py @@ -3,8 +3,6 @@ from typing import List, Optional, Type import zipstream -from baserow_premium.license.features import PREMIUM -from baserow_premium.license.handler import LicenseHandler from baserow.config.settings.base import BASEROW_DEFAULT_ZIP_COMPRESS_LEVEL from baserow.contrib.database.api.export.serializers import ( @@ -17,6 +15,8 @@ from baserow.contrib.database.fields.field_types import FileFieldType from baserow.contrib.database.views.view_types import GridViewType from baserow.core.storage import ExportZipFile, get_default_storage +from baserow_premium.license.features import PREMIUM +from baserow_premium.license.handler import LicenseHandler from .serializers import ExcelExporterOptionsSerializer, FileExporterOptionsSerializer from .utils import get_unique_name, safe_xml_tag_name, to_xml diff --git a/premium/backend/src/baserow_premium/fields/actions.py b/premium/backend/src/baserow_premium/fields/actions.py index 25d8cc1836..1d942c640f 100644 --- a/premium/backend/src/baserow_premium/fields/actions.py +++ b/premium/backend/src/baserow_premium/fields/actions.py @@ -4,14 +4,13 @@ from django.contrib.auth.models import AbstractUser from django.utils.translation import gettext_lazy as _ -from baserow_premium.fields.handler import AIFieldHandler - from baserow.contrib.database.action.scopes import ( TABLE_ACTION_CONTEXT, TableActionScopeType, ) from baserow.contrib.database.table.models import Table from baserow.core.action.registries import ActionType, ActionTypeDescription +from baserow_premium.fields.handler import AIFieldHandler class GenerateFormulaWithAIActionType(ActionType): diff --git a/premium/backend/src/baserow_premium/fields/field_types.py b/premium/backend/src/baserow_premium/fields/field_types.py index dce63142ee..6b917feb77 100644 --- a/premium/backend/src/baserow_premium/fields/field_types.py +++ b/premium/backend/src/baserow_premium/fields/field_types.py @@ -5,12 +5,6 @@ from django.db.models import Expression, F from django.utils.functional import lazy -from baserow_premium.api.fields.exceptions import ( - ERROR_GENERATIVE_AI_DOES_NOT_SUPPORT_FILE_FIELD, -) -from baserow_premium.fields.exceptions import GenerativeAITypeDoesNotSupportFileField -from baserow_premium.license.features import PREMIUM -from baserow_premium.license.handler import LicenseHandler from rest_framework import serializers from baserow.api.generative_ai.errors import ( @@ -42,6 +36,12 @@ generative_ai_model_type_registry, ) from baserow.core.jobs.handler import JobHandler +from baserow_premium.api.fields.exceptions import ( + ERROR_GENERATIVE_AI_DOES_NOT_SUPPORT_FILE_FIELD, +) +from baserow_premium.fields.exceptions import GenerativeAITypeDoesNotSupportFileField +from baserow_premium.license.features import PREMIUM +from baserow_premium.license.handler import LicenseHandler from .models import AIField from .registries import ai_field_output_registry diff --git a/premium/backend/src/baserow_premium/fields/handler.py b/premium/backend/src/baserow_premium/fields/handler.py index 1b16ed1ff6..54091fce5d 100644 --- a/premium/backend/src/baserow_premium/fields/handler.py +++ b/premium/backend/src/baserow_premium/fields/handler.py @@ -1,14 +1,13 @@ import json from typing import Optional -from baserow_premium.fields.exceptions import AiFieldOutputParserException -from baserow_premium.prompts import get_generate_formula_prompt - from baserow.contrib.database.fields.registries import field_type_registry from baserow.contrib.database.table.models import Table from baserow.core.db import specific_iterator from baserow.core.generative_ai.exceptions import ModelDoesNotBelongToType from baserow.core.generative_ai.registries import generative_ai_model_type_registry +from baserow_premium.fields.exceptions import AiFieldOutputParserException +from baserow_premium.prompts import get_generate_formula_prompt from .pydantic_models import BaserowFormulaModel diff --git a/premium/backend/src/baserow_premium/fields/job_types.py b/premium/backend/src/baserow_premium/fields/job_types.py index 6eb056d7ba..0e97181b5c 100644 --- a/premium/backend/src/baserow_premium/fields/job_types.py +++ b/premium/backend/src/baserow_premium/fields/job_types.py @@ -6,7 +6,6 @@ from django.contrib.auth.models import AbstractUser from django.db.models import QuerySet -from baserow_premium.generative_ai.managers import AIFileManager from loguru import logger from rest_framework import serializers @@ -41,6 +40,7 @@ from baserow.core.jobs.exceptions import MaxJobCountExceeded from baserow.core.jobs.registries import JobType from baserow.core.utils import ChildProgressBuilder, Progress +from baserow_premium.generative_ai.managers import AIFileManager from .models import AIField, GenerateAIValuesJob from .registries import ai_field_output_registry diff --git a/premium/backend/src/baserow_premium/fields/visitors.py b/premium/backend/src/baserow_premium/fields/visitors.py index 2fa8dfc28d..4fc1d72bbc 100644 --- a/premium/backend/src/baserow_premium/fields/visitors.py +++ b/premium/backend/src/baserow_premium/fields/visitors.py @@ -184,7 +184,7 @@ def visitRightWhitespaceOrComments( def extract_field_id_dependencies( - formula: Union[str, BaserowFormulaObject] + formula: Union[str, BaserowFormulaObject], ) -> set[int]: """ Extracts all field IDs referenced by get("fields.field_X") calls in the formula. diff --git a/premium/backend/src/baserow_premium/ical_utils.py b/premium/backend/src/baserow_premium/ical_utils.py index 70af00a8f9..42faee6010 100644 --- a/premium/backend/src/baserow_premium/ical_utils.py +++ b/premium/backend/src/baserow_premium/ical_utils.py @@ -9,12 +9,12 @@ from django.conf import settings from django.db.models import QuerySet -from baserow_premium.views.exceptions import CalendarViewHasNoDateField -from baserow_premium.views.models import CalendarView from icalendar import Calendar, Event from baserow.contrib.database.fields.models import Field from baserow.core.db import specific_queryset +from baserow_premium.views.exceptions import CalendarViewHasNoDateField +from baserow_premium.views.models import CalendarView # required by https://icalendar.org/iCalendar-RFC-5545/3-7-3-product-identifier.html ICAL_PROD_ID = "Baserow / baserow.io" @@ -43,7 +43,7 @@ def url_maker(row_id: str) -> str: return urljoin( settings.PUBLIC_WEB_FRONTEND_URL, - f"/database/{database_id}" f"/table/{table_id}/{view_id}" f"/row/{row_id}", + f"/database/{database_id}/table/{table_id}/{view_id}/row/{row_id}", ) return url_maker diff --git a/premium/backend/src/baserow_premium/integrations/local_baserow/service_types.py b/premium/backend/src/baserow_premium/integrations/local_baserow/service_types.py index 92d1048807..68b66ec748 100644 --- a/premium/backend/src/baserow_premium/integrations/local_baserow/service_types.py +++ b/premium/backend/src/baserow_premium/integrations/local_baserow/service_types.py @@ -3,26 +3,6 @@ from django.conf import settings from django.db.models import F -from baserow_premium.api.integrations.local_baserow.serializers import ( - LocalBaserowTableServiceAggregationGroupBySerializer, - LocalBaserowTableServiceAggregationSeriesSerializer, - LocalBaserowTableServiceAggregationSortBySerializer, -) -from baserow_premium.integrations.local_baserow.models import ( - LocalBaserowGroupedAggregateRows, - LocalBaserowTableServiceAggregationGroupBy, - LocalBaserowTableServiceAggregationSeries, - LocalBaserowTableServiceAggregationSortBy, -) -from baserow_premium.integrations.registries import ( - grouped_aggregation_group_by_registry, - grouped_aggregation_registry, -) -from baserow_premium.services.types import ( - ServiceAggregationGroupByDict, - ServiceAggregationSeriesDict, - ServiceAggregationSortByDict, -) from rest_framework.exceptions import ValidationError as DRFValidationError from baserow.contrib.database.api.fields.serializers import FieldSerializer @@ -48,6 +28,26 @@ from baserow.core.services.registries import DispatchTypes from baserow.core.services.types import DispatchResult from baserow.core.utils import atomic_if_not_already +from baserow_premium.api.integrations.local_baserow.serializers import ( + LocalBaserowTableServiceAggregationGroupBySerializer, + LocalBaserowTableServiceAggregationSeriesSerializer, + LocalBaserowTableServiceAggregationSortBySerializer, +) +from baserow_premium.integrations.local_baserow.models import ( + LocalBaserowGroupedAggregateRows, + LocalBaserowTableServiceAggregationGroupBy, + LocalBaserowTableServiceAggregationSeries, + LocalBaserowTableServiceAggregationSortBy, +) +from baserow_premium.integrations.registries import ( + grouped_aggregation_group_by_registry, + grouped_aggregation_registry, +) +from baserow_premium.services.types import ( + ServiceAggregationGroupByDict, + ServiceAggregationSeriesDict, + ServiceAggregationSortByDict, +) class LocalBaserowGroupedAggregateRowsUserServiceType( diff --git a/premium/backend/src/baserow_premium/license/handler.py b/premium/backend/src/baserow_premium/license/handler.py index 7bf8c7be79..dea20281e5 100755 --- a/premium/backend/src/baserow_premium/license/handler.py +++ b/premium/backend/src/baserow_premium/license/handler.py @@ -13,12 +13,6 @@ from django.db.models import Q import requests -from baserow_premium.api.user.user_data_types import ActiveLicensesDataType -from baserow_premium.license.exceptions import ( - CantManuallyChangeSeatsError, - InvalidLicenseError, -) -from baserow_premium.license.models import License from cryptography.exceptions import InvalidSignature from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes, serialization @@ -35,6 +29,12 @@ from baserow.core.registries import plugin_registry from baserow.core.utils import get_baserow_saas_base_url from baserow.ws.signals import broadcast_to_users +from baserow_premium.api.user.user_data_types import ActiveLicensesDataType +from baserow_premium.license.exceptions import ( + CantManuallyChangeSeatsError, + InvalidLicenseError, +) +from baserow_premium.license.models import License from .constants import ( AUTHORITY_RESPONSE_DOES_NOT_EXIST, diff --git a/premium/backend/src/baserow_premium/license/license_types.py b/premium/backend/src/baserow_premium/license/license_types.py index c8f2b3fcd9..17cb4937c2 100644 --- a/premium/backend/src/baserow_premium/license/license_types.py +++ b/premium/backend/src/baserow_premium/license/license_types.py @@ -1,9 +1,8 @@ +from baserow.core.models import User from baserow_premium.license.features import PREMIUM from baserow_premium.license.models import License, LicenseUser from baserow_premium.license.registries import LicenseType, SeatUsageSummary -from baserow.core.models import User - class PremiumLicenseType(LicenseType): type = "premium" diff --git a/premium/backend/src/baserow_premium/license/models.py b/premium/backend/src/baserow_premium/license/models.py index cc7e96d98f..f0d8d08d91 100644 --- a/premium/backend/src/baserow_premium/license/models.py +++ b/premium/backend/src/baserow_premium/license/models.py @@ -5,9 +5,10 @@ from django.db import models from django.utils.functional import cached_property -from baserow_premium.license.exceptions import InvalidLicenseError from dateutil import parser +from baserow_premium.license.exceptions import InvalidLicenseError + User = get_user_model() diff --git a/premium/backend/src/baserow_premium/license/plugin.py b/premium/backend/src/baserow_premium/license/plugin.py index db85227c93..9bcff01e67 100644 --- a/premium/backend/src/baserow_premium/license/plugin.py +++ b/premium/backend/src/baserow_premium/license/plugin.py @@ -4,13 +4,12 @@ from django.contrib.auth.models import AbstractUser from django.db.models import Q, QuerySet +from baserow.core.cache import local_cache +from baserow.core.models import Workspace from baserow_premium.license.exceptions import InvalidLicenseError from baserow_premium.license.models import License from baserow_premium.license.registries import LicenseType, SeatUsageSummary -from baserow.core.cache import local_cache -from baserow.core.models import Workspace - User = get_user_model() LICENSE_CACHE_KEY_PREFIX = "license" diff --git a/premium/backend/src/baserow_premium/license/registries.py b/premium/backend/src/baserow_premium/license/registries.py index 8b01136d45..c7a0ef8926 100644 --- a/premium/backend/src/baserow_premium/license/registries.py +++ b/premium/backend/src/baserow_premium/license/registries.py @@ -2,12 +2,11 @@ import dataclasses from typing import Dict, List, Optional -from baserow_premium.license.models import License - from baserow.contrib.builder.handler import BuilderHandler from baserow.core.cache import local_cache from baserow.core.models import Workspace from baserow.core.registry import Instance, Registry +from baserow_premium.license.models import License @dataclasses.dataclass diff --git a/premium/backend/src/baserow_premium/migrations/0016_rowcommentsnotificationmode.py b/premium/backend/src/baserow_premium/migrations/0016_rowcommentsnotificationmode.py index 23b004a18f..59974ce27a 100644 --- a/premium/backend/src/baserow_premium/migrations/0016_rowcommentsnotificationmode.py +++ b/premium/backend/src/baserow_premium/migrations/0016_rowcommentsnotificationmode.py @@ -4,9 +4,8 @@ from django.conf import settings from django.db import migrations, models -import baserow_premium.row_comments.models - import baserow.core.fields +import baserow_premium.row_comments.models class Migration(migrations.Migration): diff --git a/premium/backend/src/baserow_premium/permission_manager.py b/premium/backend/src/baserow_premium/permission_manager.py index e562df0e04..6c7a8f64ff 100644 --- a/premium/backend/src/baserow_premium/permission_manager.py +++ b/premium/backend/src/baserow_premium/permission_manager.py @@ -4,10 +4,6 @@ from django.contrib.auth import get_user_model from django.db.models import Q, QuerySet -from baserow_premium.license.features import PREMIUM -from baserow_premium.license.handler import LicenseHandler -from baserow_premium.views.models import OWNERSHIP_TYPE_PERSONAL - from baserow.contrib.database.table.models import Table from baserow.contrib.database.views.operations import ( CanReceiveNotificationOnSubmitFormViewOperationType, @@ -59,6 +55,9 @@ from baserow.core.registries import PermissionManagerType, object_scope_type_registry from baserow.core.subjects import UserSubjectType from baserow.core.types import Actor, PermissionCheck +from baserow_premium.license.features import PREMIUM +from baserow_premium.license.handler import LicenseHandler +from baserow_premium.views.models import OWNERSHIP_TYPE_PERSONAL User = get_user_model() diff --git a/premium/backend/src/baserow_premium/plugins.py b/premium/backend/src/baserow_premium/plugins.py index e2e7fed03b..b6fbefbeab 100644 --- a/premium/backend/src/baserow_premium/plugins.py +++ b/premium/backend/src/baserow_premium/plugins.py @@ -1,10 +1,9 @@ from django.urls import include, path +from baserow.core.registries import Plugin from baserow_premium.api import urls as api_urls from baserow_premium.license.plugin import LicensePlugin -from baserow.core.registries import Plugin - class PremiumPlugin(Plugin): type = "premium" diff --git a/premium/backend/src/baserow_premium/populate.py b/premium/backend/src/baserow_premium/populate.py index 26738fdf24..80c2daebbf 100644 --- a/premium/backend/src/baserow_premium/populate.py +++ b/premium/backend/src/baserow_premium/populate.py @@ -1,6 +1,5 @@ -from baserow_premium.license.models import License - from baserow.core.models import Settings +from baserow_premium.license.models import License LICENSE = ( "eyJ2ZXJzaW9uIjogMSwgImlkIjogIjUzODczYmVkLWJlNTQtNDEwZS04N2EzLTE2OTM2" diff --git a/premium/backend/src/baserow_premium/row_comments/handler.py b/premium/backend/src/baserow_premium/row_comments/handler.py index 0d6dd2c3af..905e3fff18 100644 --- a/premium/backend/src/baserow_premium/row_comments/handler.py +++ b/premium/backend/src/baserow_premium/row_comments/handler.py @@ -3,6 +3,15 @@ from django.contrib.auth.models import AbstractUser from django.db.models import QuerySet +from baserow.contrib.database.rows.exceptions import RowDoesNotExist +from baserow.contrib.database.rows.handler import RowHandler +from baserow.contrib.database.table.handler import TableHandler +from baserow.core.handler import CoreHandler +from baserow.core.prosemirror.utils import ( + extract_mentioned_users_in_workspace, + is_valid_prosemirror_document, +) +from baserow.core.trash.handler import TrashHandler from baserow_premium.license.features import PREMIUM from baserow_premium.license.handler import LicenseHandler from baserow_premium.row_comments.exceptions import ( @@ -31,16 +40,6 @@ row_comments_notification_mode_updated, ) -from baserow.contrib.database.rows.exceptions import RowDoesNotExist -from baserow.contrib.database.rows.handler import RowHandler -from baserow.contrib.database.table.handler import TableHandler -from baserow.core.handler import CoreHandler -from baserow.core.prosemirror.utils import ( - extract_mentioned_users_in_workspace, - is_valid_prosemirror_document, -) -from baserow.core.trash.handler import TrashHandler - class RowCommentHandler: @classmethod diff --git a/premium/backend/src/baserow_premium/row_comments/models.py b/premium/backend/src/baserow_premium/row_comments/models.py index 0b765a1818..c7a4551e88 100644 --- a/premium/backend/src/baserow_premium/row_comments/models.py +++ b/premium/backend/src/baserow_premium/row_comments/models.py @@ -59,7 +59,7 @@ class RowCommentsNotificationModes(str, Enum): ALL_ROW_COMMENT_NOTIFICATION_MODES = [ - getattr(mode, "value") for mode in RowCommentsNotificationModes + mode.value for mode in RowCommentsNotificationModes ] diff --git a/premium/backend/src/baserow_premium/row_comments/notification_types.py b/premium/backend/src/baserow_premium/row_comments/notification_types.py index 6941c260d1..bacf44a97a 100644 --- a/premium/backend/src/baserow_premium/row_comments/notification_types.py +++ b/premium/backend/src/baserow_premium/row_comments/notification_types.py @@ -4,12 +4,6 @@ from django.dispatch import receiver from django.utils.translation import gettext as _ -from baserow_premium.row_comments.handler import RowCommentHandler -from baserow_premium.row_comments.models import ( - RowCommentsNotificationMode, - RowCommentsNotificationModes, -) - from baserow.core.notifications.handler import NotificationHandler from baserow.core.notifications.models import NotificationRecipient from baserow.core.notifications.registries import ( @@ -17,6 +11,11 @@ NotificationType, ) from baserow.core.prosemirror.utils import prosemirror_doc_to_plain_text +from baserow_premium.row_comments.handler import RowCommentHandler +from baserow_premium.row_comments.models import ( + RowCommentsNotificationMode, + RowCommentsNotificationModes, +) from .signals import row_comment_created, row_comment_updated diff --git a/premium/backend/src/baserow_premium/row_comments/receivers.py b/premium/backend/src/baserow_premium/row_comments/receivers.py index 5ef404a51d..42fe2eb986 100644 --- a/premium/backend/src/baserow_premium/row_comments/receivers.py +++ b/premium/backend/src/baserow_premium/row_comments/receivers.py @@ -1,8 +1,7 @@ from django.dispatch import receiver -from baserow_premium.row_comments.models import RowComment - from baserow.core.trash.signals import permanently_deleted +from baserow_premium.row_comments.models import RowComment @receiver(permanently_deleted, sender="row", dispatch_uid="row_comment_cleanup") diff --git a/premium/backend/src/baserow_premium/row_comments/row_metadata_types.py b/premium/backend/src/baserow_premium/row_comments/row_metadata_types.py index 2b18ab90fc..4506631e97 100644 --- a/premium/backend/src/baserow_premium/row_comments/row_metadata_types.py +++ b/premium/backend/src/baserow_premium/row_comments/row_metadata_types.py @@ -2,16 +2,16 @@ from django.db.models import Count +from rest_framework import serializers +from rest_framework.fields import Field + +from baserow.contrib.database.rows.registries import RowMetadataType from baserow_premium.row_comments.models import ( ALL_ROW_COMMENT_NOTIFICATION_MODES, ROW_COMMENT_NOTIFICATION_DEFAULT_MODE, RowComment, RowCommentsNotificationMode, ) -from rest_framework import serializers -from rest_framework.fields import Field - -from baserow.contrib.database.rows.registries import RowMetadataType class RowCommentCountMetadataType(RowMetadataType): diff --git a/premium/backend/src/baserow_premium/row_comments/trash_types.py b/premium/backend/src/baserow_premium/row_comments/trash_types.py index 1843299a31..337ad56199 100644 --- a/premium/backend/src/baserow_premium/row_comments/trash_types.py +++ b/premium/backend/src/baserow_premium/row_comments/trash_types.py @@ -2,13 +2,12 @@ from django.contrib.auth.models import AbstractUser -from baserow_premium.row_comments.models import RowComment -from baserow_premium.row_comments.operations import RestoreRowCommentOperationType -from baserow_premium.row_comments.signals import row_comment_restored - from baserow.contrib.database.table.models import Table from baserow.core.exceptions import TrashItemDoesNotExist from baserow.core.trash.registries import TrashableItemType +from baserow_premium.row_comments.models import RowComment +from baserow_premium.row_comments.operations import RestoreRowCommentOperationType +from baserow_premium.row_comments.signals import row_comment_restored class RowCommentTrashableItemType(TrashableItemType): diff --git a/premium/backend/src/baserow_premium/usage/handler.py b/premium/backend/src/baserow_premium/usage/handler.py index 2641b9ae2e..e056b3a767 100644 --- a/premium/backend/src/baserow_premium/usage/handler.py +++ b/premium/backend/src/baserow_premium/usage/handler.py @@ -1,12 +1,11 @@ from datetime import datetime, timezone from typing import Optional -from baserow_premium.license.plugin import LicensePlugin -from baserow_premium.plugins import PremiumPlugin - from baserow.core.models import Workspace from baserow.core.registries import plugin_registry from baserow.core.utils import grouper +from baserow_premium.license.plugin import LicensePlugin +from baserow_premium.plugins import PremiumPlugin class PremiumUsageHandler: diff --git a/premium/backend/src/baserow_premium/usage/tasks.py b/premium/backend/src/baserow_premium/usage/tasks.py index e1bda5e11d..6c46f57b1d 100644 --- a/premium/backend/src/baserow_premium/usage/tasks.py +++ b/premium/backend/src/baserow_premium/usage/tasks.py @@ -1,9 +1,8 @@ from django.conf import settings -from baserow_premium.usage.handler import PremiumUsageHandler - from baserow.config.celery import app from baserow.core.handler import CoreHandler +from baserow_premium.usage.handler import PremiumUsageHandler @app.task(queue=settings.BASEROW_ROLE_USAGE_QUEUE) diff --git a/premium/backend/src/baserow_premium/views/actions.py b/premium/backend/src/baserow_premium/views/actions.py index f08ed688e9..09b396608f 100644 --- a/premium/backend/src/baserow_premium/views/actions.py +++ b/premium/backend/src/baserow_premium/views/actions.py @@ -3,8 +3,6 @@ from django.contrib.auth.models import AbstractUser from django.utils.translation import gettext_lazy as _ -from baserow_premium.views.models import CalendarView - from baserow.contrib.database.action.scopes import ( VIEW_ACTION_CONTEXT, ViewActionScopeType, @@ -17,6 +15,7 @@ ActionTypeDescription, UndoableActionType, ) +from baserow_premium.views.models import CalendarView ICAL_SLUG_FIELD = "ical_slug" diff --git a/premium/backend/src/baserow_premium/views/decorator_types.py b/premium/backend/src/baserow_premium/views/decorator_types.py index dc0fb032cb..15df7086f3 100644 --- a/premium/backend/src/baserow_premium/views/decorator_types.py +++ b/premium/backend/src/baserow_premium/views/decorator_types.py @@ -1,8 +1,7 @@ +from baserow.contrib.database.views.registries import DecoratorType from baserow_premium.license.features import PREMIUM from baserow_premium.license.handler import LicenseHandler -from baserow.contrib.database.views.registries import DecoratorType - class PremiumDecoratorType(DecoratorType): def before_create_decoration(self, view, user): diff --git a/premium/backend/src/baserow_premium/views/decorator_value_provider_types.py b/premium/backend/src/baserow_premium/views/decorator_value_provider_types.py index dd7ee2d5c6..4db9dda016 100644 --- a/premium/backend/src/baserow_premium/views/decorator_value_provider_types.py +++ b/premium/backend/src/baserow_premium/views/decorator_value_provider_types.py @@ -1,7 +1,6 @@ from typing import Any, Callable, Dict, Optional, Set, Tuple from uuid import uuid4 -from baserow_premium.license.handler import LicenseHandler from loguru import logger from baserow.contrib.database.fields.field_types import SingleSelectFieldType @@ -11,6 +10,7 @@ DecoratorValueProviderType, view_filter_type_registry, ) +from baserow_premium.license.handler import LicenseHandler from ..license.features import PREMIUM from .decorator_types import BackgroundColorDecoratorType, LeftBorderColorDecoratorType diff --git a/premium/backend/src/baserow_premium/views/form_view_mode_types.py b/premium/backend/src/baserow_premium/views/form_view_mode_types.py index fd712b8e6d..e625e441e5 100644 --- a/premium/backend/src/baserow_premium/views/form_view_mode_types.py +++ b/premium/backend/src/baserow_premium/views/form_view_mode_types.py @@ -1,8 +1,7 @@ +from baserow.contrib.database.views.registries import FormViewModeType from baserow_premium.license.features import PREMIUM from baserow_premium.license.handler import LicenseHandler -from baserow.contrib.database.views.registries import FormViewModeType - class FormViewModeTypeSurvey(FormViewModeType): type = "survey" diff --git a/premium/backend/src/baserow_premium/views/handler.py b/premium/backend/src/baserow_premium/views/handler.py index 1f782d5aa2..9d7facac80 100644 --- a/premium/backend/src/baserow_premium/views/handler.py +++ b/premium/backend/src/baserow_premium/views/handler.py @@ -6,9 +6,6 @@ from django.contrib.auth.models import AbstractUser from django.db.models import Count, Q, QuerySet -from baserow_premium.views.exceptions import CalendarViewHasNoDateField -from baserow_premium.views.models import OWNERSHIP_TYPE_PERSONAL, TimelineView -from baserow_premium.views.view_types import TimelineViewType from rest_framework.request import Request from baserow.contrib.database.api.views.utils import ( @@ -26,6 +23,9 @@ view_ownership_type_registry, view_type_registry, ) +from baserow_premium.views.exceptions import CalendarViewHasNoDateField +from baserow_premium.views.models import OWNERSHIP_TYPE_PERSONAL, TimelineView +from baserow_premium.views.view_types import TimelineViewType def get_rows_grouped_by_single_select_field( diff --git a/premium/backend/src/baserow_premium/views/view_ownership_types.py b/premium/backend/src/baserow_premium/views/view_ownership_types.py index a48c046d4a..63dca57c79 100644 --- a/premium/backend/src/baserow_premium/views/view_ownership_types.py +++ b/premium/backend/src/baserow_premium/views/view_ownership_types.py @@ -1,8 +1,5 @@ from django.contrib.auth.models import AbstractUser -from baserow_premium.license.features import PREMIUM -from baserow_premium.license.handler import LicenseHandler - from baserow.contrib.database.table.operations import ( CreateRowDatabaseTableOperationType, ) @@ -16,6 +13,8 @@ from baserow.core.exceptions import PermissionDenied from baserow.core.handler import CoreHandler from baserow.core.models import Workspace +from baserow_premium.license.features import PREMIUM +from baserow_premium.license.handler import LicenseHandler class PersonalViewOwnershipType(ViewOwnershipType): diff --git a/premium/backend/src/baserow_premium/views/view_types.py b/premium/backend/src/baserow_premium/views/view_types.py index 75d9026f57..d65841b438 100644 --- a/premium/backend/src/baserow_premium/views/view_types.py +++ b/premium/backend/src/baserow_premium/views/view_types.py @@ -6,21 +6,6 @@ from django.db.models import Q from django.urls import include, path -from baserow_premium.api.views.calendar.serializers import ( - CalendarViewFieldOptionsSerializer, -) -from baserow_premium.api.views.kanban.errors import ( - ERROR_KANBAN_VIEW_FIELD_DOES_NOT_BELONG_TO_SAME_TABLE, -) -from baserow_premium.api.views.kanban.serializers import ( - KanbanViewFieldOptionsSerializer, -) -from baserow_premium.api.views.timeline.errors import ( - ERROR_TIMELINE_VIEW_HAS_INVALID_DATE_SETTINGS, -) -from baserow_premium.api.views.timeline.serializers import ( - TimelineViewFieldOptionsSerializer, -) from rest_framework.fields import BooleanField, CharField from rest_framework.serializers import PrimaryKeyRelatedField @@ -43,6 +28,21 @@ from baserow.contrib.database.views.models import View from baserow.contrib.database.views.registries import ViewType from baserow.core.registries import ImportExportConfig +from baserow_premium.api.views.calendar.serializers import ( + CalendarViewFieldOptionsSerializer, +) +from baserow_premium.api.views.kanban.errors import ( + ERROR_KANBAN_VIEW_FIELD_DOES_NOT_BELONG_TO_SAME_TABLE, +) +from baserow_premium.api.views.kanban.serializers import ( + KanbanViewFieldOptionsSerializer, +) +from baserow_premium.api.views.timeline.errors import ( + ERROR_TIMELINE_VIEW_HAS_INVALID_DATE_SETTINGS, +) +from baserow_premium.api.views.timeline.serializers import ( + TimelineViewFieldOptionsSerializer, +) from .exceptions import ( KanbanViewFieldDoesNotBelongToSameTable, @@ -212,9 +212,9 @@ def import_serialized( field_option_object = KanbanViewFieldOptions.objects.create( kanban_view=kanban_view, **field_option_copy ) - id_mapping["database_kanban_view_field_options"][ - field_option_id - ] = field_option_object.id + id_mapping["database_kanban_view_field_options"][field_option_id] = ( + field_option_object.id + ) return kanban_view @@ -462,9 +462,9 @@ def import_serialized( field_option_object = CalendarViewFieldOptions.objects.create( calendar_view=calendar_view, **field_option_copy ) - id_mapping["database_calendar_view_field_options"][ - field_option_id - ] = field_option_object.id + id_mapping["database_calendar_view_field_options"][field_option_id] = ( + field_option_object.id + ) return calendar_view @@ -684,18 +684,14 @@ def prepare_values(self, values, table, user): start_date_field_value = values.get("start_date_field", None) if start_date_field_value is not None: - values[ - "start_date_field" - ] = start_date_field_value = self.prepare_date_field_value( - start_date_field_value, table.id + values["start_date_field"] = start_date_field_value = ( + self.prepare_date_field_value(start_date_field_value, table.id) ) end_date_field_value = values.get("end_date_field", None) if end_date_field_value is not None: - values[ - "end_date_field" - ] = end_date_field_value = self.prepare_date_field_value( - end_date_field_value, table.id + values["end_date_field"] = end_date_field_value = ( + self.prepare_date_field_value(end_date_field_value, table.id) ) if ( @@ -786,9 +782,9 @@ def import_serialized( field_option_object = TimelineViewFieldOptions.objects.create( timeline_view=timeline_view, **field_option_copy ) - id_mapping["database_timeline_view_field_options"][ - field_option_id - ] = field_option_object.id + id_mapping["database_timeline_view_field_options"][field_option_id] = ( + field_option_object.id + ) return timeline_view diff --git a/premium/backend/src/baserow_premium/ws/row_comments/signals.py b/premium/backend/src/baserow_premium/ws/row_comments/signals.py index 7d4c71ea03..a2591fb4ad 100644 --- a/premium/backend/src/baserow_premium/ws/row_comments/signals.py +++ b/premium/backend/src/baserow_premium/ws/row_comments/signals.py @@ -1,11 +1,10 @@ from django.db import transaction from django.dispatch import receiver -from baserow_premium.api.row_comments.serializers import RowCommentSerializer -from baserow_premium.row_comments import signals as row_comment_signals - from baserow.ws.registries import page_registry from baserow.ws.tasks import broadcast_to_users +from baserow_premium.api.row_comments.serializers import RowCommentSerializer +from baserow_premium.row_comments import signals as row_comment_signals @receiver(row_comment_signals.row_comment_created) diff --git a/premium/backend/tests/baserow_premium_tests/api/dashboard/test_chart_widget_type_views.py b/premium/backend/tests/baserow_premium_tests/api/dashboard/test_chart_widget_type_views.py index 03b423460a..00f6a53294 100644 --- a/premium/backend/tests/baserow_premium_tests/api/dashboard/test_chart_widget_type_views.py +++ b/premium/backend/tests/baserow_premium_tests/api/dashboard/test_chart_widget_type_views.py @@ -1,15 +1,15 @@ from django.test.utils import override_settings import pytest +from rest_framework.reverse import reverse +from rest_framework.status import HTTP_200_OK + +from baserow.test_utils.helpers import AnyInt from baserow_premium.api.fields.exceptions import HTTP_400_BAD_REQUEST from baserow_premium.dashboard.widgets.models import ChartSeriesConfig from baserow_premium.integrations.local_baserow.models import ( LocalBaserowTableServiceAggregationSeries, ) -from rest_framework.reverse import reverse -from rest_framework.status import HTTP_200_OK - -from baserow.test_utils.helpers import AnyInt @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/api/dashboard/test_grouped_aggregate_rows_data_source_type.py b/premium/backend/tests/baserow_premium_tests/api/dashboard/test_grouped_aggregate_rows_data_source_type.py index cae30f24c0..71b4fdfd45 100644 --- a/premium/backend/tests/baserow_premium_tests/api/dashboard/test_grouped_aggregate_rows_data_source_type.py +++ b/premium/backend/tests/baserow_premium_tests/api/dashboard/test_grouped_aggregate_rows_data_source_type.py @@ -1,10 +1,4 @@ import pytest -from baserow_premium.integrations.local_baserow.models import ( - LocalBaserowGroupedAggregateRows, - LocalBaserowTableServiceAggregationGroupBy, - LocalBaserowTableServiceAggregationSeries, - LocalBaserowTableServiceAggregationSortBy, -) from rest_framework.reverse import reverse from rest_framework.status import HTTP_200_OK @@ -14,6 +8,12 @@ from baserow.core.formula.field import BASEROW_FORMULA_VERSION_INITIAL from baserow.core.formula.types import BASEROW_FORMULA_MODE_SIMPLE from baserow.test_utils.helpers import AnyDict, AnyInt +from baserow_premium.integrations.local_baserow.models import ( + LocalBaserowGroupedAggregateRows, + LocalBaserowTableServiceAggregationGroupBy, + LocalBaserowTableServiceAggregationSeries, + LocalBaserowTableServiceAggregationSortBy, +) @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/api/dashboard/test_pie_chart_widget_type_views.py b/premium/backend/tests/baserow_premium_tests/api/dashboard/test_pie_chart_widget_type_views.py index fb91ee3d2b..e3f5abc5a1 100644 --- a/premium/backend/tests/baserow_premium_tests/api/dashboard/test_pie_chart_widget_type_views.py +++ b/premium/backend/tests/baserow_premium_tests/api/dashboard/test_pie_chart_widget_type_views.py @@ -1,15 +1,15 @@ from django.test.utils import override_settings import pytest +from rest_framework.reverse import reverse +from rest_framework.status import HTTP_200_OK + +from baserow.test_utils.helpers import AnyInt from baserow_premium.api.fields.exceptions import HTTP_400_BAD_REQUEST from baserow_premium.dashboard.widgets.models import PieChartSeriesConfig from baserow_premium.integrations.local_baserow.models import ( LocalBaserowTableServiceAggregationSeries, ) -from rest_framework.reverse import reverse -from rest_framework.status import HTTP_200_OK - -from baserow.test_utils.helpers import AnyInt @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/api/fields/test_generate_formula_prompt.py b/premium/backend/tests/baserow_premium_tests/api/fields/test_generate_formula_prompt.py index 806b4ffc25..872d67e3ff 100644 --- a/premium/backend/tests/baserow_premium_tests/api/fields/test_generate_formula_prompt.py +++ b/premium/backend/tests/baserow_premium_tests/api/fields/test_generate_formula_prompt.py @@ -1,6 +1,5 @@ -from baserow_premium.prompts import get_formula_docs - from baserow.contrib.database.formula.registries import formula_function_registry +from baserow_premium.prompts import get_formula_docs def test_if_prompt_contains_all_formula_functions(): diff --git a/premium/backend/tests/baserow_premium_tests/api/license/test_premium_license_views.py b/premium/backend/tests/baserow_premium_tests/api/license/test_premium_license_views.py index 11e66bddf1..5d495be29c 100644 --- a/premium/backend/tests/baserow_premium_tests/api/license/test_premium_license_views.py +++ b/premium/backend/tests/baserow_premium_tests/api/license/test_premium_license_views.py @@ -5,7 +5,6 @@ import pytest import responses -from baserow_premium.license.models import License, LicenseUser from freezegun import freeze_time from rest_framework.status import ( HTTP_200_OK, @@ -15,6 +14,8 @@ HTTP_404_NOT_FOUND, ) +from baserow_premium.license.models import License, LicenseUser + VALID_ONE_SEAT_LICENSE = ( # id: "1", instance_id: "1" b"eyJ2ZXJzaW9uIjogMSwgImlkIjogIjEiLCAidmFsaWRfZnJvbSI6ICIyMDIxLTA4LTI5VDE5OjUyOjU3" diff --git a/premium/backend/tests/baserow_premium_tests/api/row_comments/test_row_comment_trashable_type.py b/premium/backend/tests/baserow_premium_tests/api/row_comments/test_row_comment_trashable_type.py index b411433eac..17be0662e9 100644 --- a/premium/backend/tests/baserow_premium_tests/api/row_comments/test_row_comment_trashable_type.py +++ b/premium/backend/tests/baserow_premium_tests/api/row_comments/test_row_comment_trashable_type.py @@ -1,13 +1,13 @@ from django.test.utils import override_settings import pytest -from baserow_premium.row_comments.handler import RowCommentHandler -from baserow_premium.row_comments.models import RowComment -from baserow_premium.row_comments.trash_types import RowCommentTrashableItemType from baserow.contrib.database.rows.handler import RowHandler from baserow.core.trash.exceptions import CannotRestoreChildBeforeParent from baserow.core.trash.handler import TrashHandler +from baserow_premium.row_comments.handler import RowCommentHandler +from baserow_premium.row_comments.models import RowComment +from baserow_premium.row_comments.trash_types import RowCommentTrashableItemType @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/api/row_comments/test_row_comments_views.py b/premium/backend/tests/baserow_premium_tests/api/row_comments/test_row_comments_views.py index 78648c7dbc..4c7030ad5c 100644 --- a/premium/backend/tests/baserow_premium_tests/api/row_comments/test_row_comments_views.py +++ b/premium/backend/tests/baserow_premium_tests/api/row_comments/test_row_comments_views.py @@ -3,8 +3,6 @@ from django.test.utils import override_settings import pytest -from baserow_premium.row_comments.handler import RowCommentsNotificationModes -from baserow_premium.row_comments.models import RowComment from freezegun import freeze_time from rest_framework.reverse import reverse from rest_framework.status import ( @@ -19,6 +17,8 @@ from baserow.core.models import TrashEntry from baserow.core.trash.handler import TrashHandler from baserow.test_utils.helpers import AnyInt +from baserow_premium.row_comments.handler import RowCommentsNotificationModes +from baserow_premium.row_comments.models import RowComment @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/api/views/views/test_calendar_views.py b/premium/backend/tests/baserow_premium_tests/api/views/views/test_calendar_views.py index eeb68cb110..b0e7da30ff 100644 --- a/premium/backend/tests/baserow_premium_tests/api/views/views/test_calendar_views.py +++ b/premium/backend/tests/baserow_premium_tests/api/views/views/test_calendar_views.py @@ -11,7 +11,6 @@ from django.test.utils import override_settings import pytest -from baserow_premium.views.models import CalendarView, CalendarViewFieldOptions from freezegun import freeze_time from icalendar import Calendar from rest_framework.response import Response @@ -35,6 +34,7 @@ is_dict_subset, setup_interesting_test_table, ) +from baserow_premium.views.models import CalendarView, CalendarViewFieldOptions def get_list_url(calendar_view_id: int) -> str: @@ -268,7 +268,7 @@ def test_list_all_rows(api_client, premium_data_fixture): datetime_from = datetime(2023, 1, 1) datetime_to = datetime(2023, 2, 1) queryparams_timestamps = ( - f"?from_timestamp={str(datetime_from)}" f"&to_timestamp={str(datetime_to)}" + f"?from_timestamp={str(datetime_from)}&to_timestamp={str(datetime_to)}" ) datetimes = [ datetime(2022, 12, 31), # not in range @@ -354,7 +354,7 @@ def test_list_all_rows_limit_offset(api_client, premium_data_fixture): datetime_from = datetime(2023, 1, 1) datetime_to = datetime(2023, 2, 1) queryparams_timestamps = ( - f"?from_timestamp={str(datetime_from)}" f"&to_timestamp={str(datetime_to)}" + f"?from_timestamp={str(datetime_from)}&to_timestamp={str(datetime_to)}" ) queryparams_limit_offset = f"&limit=3&offset=2" datetimes = [ @@ -442,7 +442,7 @@ def test_list_all_rows_invalid_from_to_timestamp( table=table, date_field=date_field ) queryparams_timestamps = ( - f"?from_timestamp={from_timestamp}" f"&to_timestamp={to_timestamp}" + f"?from_timestamp={from_timestamp}&to_timestamp={to_timestamp}" ) url = ( @@ -481,7 +481,7 @@ def test_list_all_rows_invalid_limit_offset( table=table, date_field=date_field ) - queryparams = f"?limit={limit}" f"&offset={offset}" + queryparams = f"?limit={limit}&offset={offset}" url = ( reverse("api:database:views:calendar:list", kwargs={"view_id": calendar.id}) @@ -825,7 +825,7 @@ def test_invalid_user_timezone_returns_error(api_client, premium_data_fixture): response = api_client.get( get_list_url(calendar.id) + "&user_timezone=NONSENSE", - **{"HTTP_AUTHORIZATION": f"JWT" f" {token}"}, + **{"HTTP_AUTHORIZATION": f"JWT {token}"}, ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -868,7 +868,7 @@ def test_too_wide_timerange_returns_error(api_client, premium_data_fixture): reverse("api:database:views:calendar:list", kwargs={"view_id": calendar.id}) + "?" + urlencode(queryparams_timestamps), - **{"HTTP_AUTHORIZATION": f"JWT" f" {token}"}, + **{"HTTP_AUTHORIZATION": f"JWT {token}"}, ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -1180,7 +1180,9 @@ def test_list_public_rows_limit_offset(api_client, premium_data_fixture): row = RowHandler().create_row( user, table, - values={f"field_{date_field.id}": f"2023-01-{day+10} {hour+10}:00"}, + values={ + f"field_{date_field.id}": f"2023-01-{day + 10} {hour + 10}:00" + }, ) response = api_client.get( @@ -1868,7 +1870,7 @@ def make_values(num): req_get = partial(api_client.get, format="json", HTTP_AUTHORIZATION=f"JWT {token}") params = { "from_timestamp": f"{start.isoformat()}", - "to_timestamp": f"{(start + timedelta(days=2+NUM_EVENTS)).isoformat()}", + "to_timestamp": f"{(start + timedelta(days=2 + NUM_EVENTS)).isoformat()}", } field_title_name = f"field_{field_title.id}" @@ -2038,7 +2040,7 @@ def make_values(num): req_get = partial(api_client.get, format="json", HTTP_AUTHORIZATION=f"JWT {token}") params = { "from_timestamp": f"{start.isoformat()}", - "to_timestamp": f"{(start + timedelta(days=2+NUM_EVENTS)).isoformat()}", + "to_timestamp": f"{(start + timedelta(days=2 + NUM_EVENTS)).isoformat()}", } params_filter_invalid = { @@ -2178,7 +2180,7 @@ def make_values(num): req_get = partial(api_client.get, format="json", HTTP_AUTHORIZATION=f"JWT {token}") params = { "from_timestamp": f"{start.isoformat()}", - "to_timestamp": f"{(start + timedelta(days=2+NUM_EVENTS)).isoformat()}", + "to_timestamp": f"{(start + timedelta(days=2 + NUM_EVENTS)).isoformat()}", } field_title_name = f"field_{field_title.id}" field_description_name = f"field_{field_description.id}" diff --git a/premium/backend/tests/baserow_premium_tests/api/views/views/test_kanban_views.py b/premium/backend/tests/baserow_premium_tests/api/views/views/test_kanban_views.py index bdcf506b09..ead62baf08 100644 --- a/premium/backend/tests/baserow_premium_tests/api/views/views/test_kanban_views.py +++ b/premium/backend/tests/baserow_premium_tests/api/views/views/test_kanban_views.py @@ -4,7 +4,6 @@ from django.test.utils import override_settings import pytest -from baserow_premium.views.models import KanbanView, KanbanViewFieldOptions from rest_framework.status import ( HTTP_200_OK, HTTP_400_BAD_REQUEST, @@ -23,6 +22,7 @@ from baserow.core.action.handler import ActionHandler from baserow.core.action.registries import action_type_registry from baserow.test_utils.helpers import assert_undo_redo_actions_are_valid +from baserow_premium.views.models import KanbanView, KanbanViewFieldOptions @pytest.mark.django_db @@ -261,7 +261,7 @@ def test_list_with_specific_select_options(api_client, premium_data_fixture): url = reverse("api:database:views:kanban:list", kwargs={"view_id": kanban.id}) response = api_client.get( f"{url}?select_option={option_a.id}", - **{"HTTP_AUTHORIZATION": f"JWT" f" {token}"}, + **{"HTTP_AUTHORIZATION": f"JWT {token}"}, ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -272,7 +272,7 @@ def test_list_with_specific_select_options(api_client, premium_data_fixture): url = reverse("api:database:views:kanban:list", kwargs={"view_id": kanban.id}) response = api_client.get( f"{url}?select_option=null", - **{"HTTP_AUTHORIZATION": f"JWT" f" {token}"}, + **{"HTTP_AUTHORIZATION": f"JWT {token}"}, ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -283,7 +283,7 @@ def test_list_with_specific_select_options(api_client, premium_data_fixture): url = reverse("api:database:views:kanban:list", kwargs={"view_id": kanban.id}) response = api_client.get( f"{url}?select_option={option_a.id}&select_option=null", - **{"HTTP_AUTHORIZATION": f"JWT" f" {token}"}, + **{"HTTP_AUTHORIZATION": f"JWT {token}"}, ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -359,7 +359,7 @@ def test_list_all_rows_with_limit_and_offset(api_client, premium_data_fixture): url = reverse("api:database:views:kanban:list", kwargs={"view_id": kanban.id}) response = api_client.get( f"{url}?select_option={option_a.id},1,1", - **{"HTTP_AUTHORIZATION": f"JWT" f" {token}"}, + **{"HTTP_AUTHORIZATION": f"JWT {token}"}, ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -371,7 +371,7 @@ def test_list_all_rows_with_limit_and_offset(api_client, premium_data_fixture): url = reverse("api:database:views:kanban:list", kwargs={"view_id": kanban.id}) response = api_client.get( f"{url}?select_option={option_a.id},1,1&select_option=null,2,0", - **{"HTTP_AUTHORIZATION": f"JWT" f" {token}"}, + **{"HTTP_AUTHORIZATION": f"JWT {token}"}, ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -387,7 +387,7 @@ def test_list_all_rows_with_limit_and_offset(api_client, premium_data_fixture): url = reverse("api:database:views:kanban:list", kwargs={"view_id": kanban.id}) response = api_client.get( f"{url}?select_option={option_a.id},2,0&select_option=null&limit=1&offset=1", - **{"HTTP_AUTHORIZATION": f"JWT" f" {token}"}, + **{"HTTP_AUTHORIZATION": f"JWT {token}"}, ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -697,7 +697,7 @@ def test_list_kanban_rows_adhoc_filtering_query_param_filter( url = reverse("api:database:views:kanban:list", kwargs={"view_id": kanban_view.id}) get_params = [f"filter__field_{text_field.id}__contains=a"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -711,7 +711,7 @@ def test_list_kanban_rows_adhoc_filtering_query_param_filter( f"filter_type=OR", ] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -720,7 +720,7 @@ def test_list_kanban_rows_adhoc_filtering_query_param_filter( url = reverse("api:database:views:kanban:list", kwargs={"view_id": kanban_view.id}) get_params = [f"filter__field_{text_field_hidden.id}__contains=y"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -729,7 +729,7 @@ def test_list_kanban_rows_adhoc_filtering_query_param_filter( url = reverse("api:database:views:kanban:list", kwargs={"view_id": kanban_view.id}) get_params = [f"filter__field_{text_field.id}__random=y"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -738,7 +738,7 @@ def test_list_kanban_rows_adhoc_filtering_query_param_filter( url = reverse("api:database:views:kanban:list", kwargs={"view_id": kanban_view.id}) get_params = [f"filter__field_{text_field.id}__higher_than=1"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -817,7 +817,7 @@ def test_list_kanban_rows_adhoc_filtering_invalid_advanced_filters( for filters, error_detail in expected_errors: get_params = [f"filters={filters}"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -876,7 +876,7 @@ def test_list_kanban_rows_adhoc_filtering_advanced_filters_are_preferred_to_othe f"filter_type=AND", ] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -935,7 +935,7 @@ def test_list_kanban_rows_adhoc_filtering_overrides_existing_filters( "filters=" + json.dumps(advanced_filters), ] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -991,7 +991,7 @@ def test_list_kanban_rows_adhoc_filtering_advanced_filters( } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -1020,7 +1020,7 @@ def test_list_kanban_rows_adhoc_filtering_advanced_filters( } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -1061,7 +1061,7 @@ def test_list_kanban_rows_adhoc_filtering_advanced_filters( } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -1079,7 +1079,7 @@ def test_list_kanban_rows_adhoc_filtering_advanced_filters( } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -1097,7 +1097,7 @@ def test_list_kanban_rows_adhoc_filtering_advanced_filters( } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -1115,7 +1115,7 @@ def test_list_kanban_rows_adhoc_filtering_advanced_filters( } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -1128,7 +1128,7 @@ def test_list_kanban_rows_adhoc_filtering_advanced_filters( ]: get_params = [f"filters={filters}"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -2290,7 +2290,7 @@ def test_list_rows_public_with_query_param_filter(api_client, premium_data_fixtu "api:database:views:kanban:public_rows", kwargs={"slug": kanban_view.slug} ) get_params = [f"filter__field_{public_field.id}__contains=a"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert response_json["rows"]["null"]["count"] == 1 @@ -2304,25 +2304,25 @@ def test_list_rows_public_with_query_param_filter(api_client, premium_data_fixtu f"filter__field_{public_field.id}__contains=b", f"filter_type=OR", ] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert response_json["rows"]["null"]["count"] == 2 get_params = [f"filter__field_{hidden_field.id}__contains=y"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_FILTER_FIELD_NOT_FOUND" get_params = [f"filter__field_{public_field.id}__random=y"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST" get_params = [f"filter__field_{public_field.id}__higher_than=1"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD" @@ -2367,7 +2367,7 @@ def test_list_rows_public_with_query_param_advanced_filters( ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert response_json["rows"]["null"]["count"] == 1 @@ -2394,7 +2394,7 @@ def test_list_rows_public_with_query_param_advanced_filters( ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert response_json["rows"]["null"]["count"] == 2 @@ -2433,7 +2433,7 @@ def test_list_rows_public_with_query_param_advanced_filters( ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert response_json["rows"]["null"]["count"] == 2 @@ -2449,7 +2449,7 @@ def test_list_rows_public_with_query_param_advanced_filters( ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_FILTER_FIELD_NOT_FOUND" @@ -2465,7 +2465,7 @@ def test_list_rows_public_with_query_param_advanced_filters( ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST" @@ -2481,7 +2481,7 @@ def test_list_rows_public_with_query_param_advanced_filters( ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD" @@ -2492,7 +2492,7 @@ def test_list_rows_public_with_query_param_advanced_filters( json.dumps({"filter_type": "OR", "filters": "invalid"}), ]: get_params = [f"filters={filters}"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_FILTERS_PARAM_VALIDATION_ERROR" diff --git a/premium/backend/tests/baserow_premium_tests/api/views/views/test_premium_views.py b/premium/backend/tests/baserow_premium_tests/api/views/views/test_premium_views.py index 35258b7dc8..9107d2b0f3 100644 --- a/premium/backend/tests/baserow_premium_tests/api/views/views/test_premium_views.py +++ b/premium/backend/tests/baserow_premium_tests/api/views/views/test_premium_views.py @@ -2,14 +2,14 @@ from django.test.utils import override_settings import pytest +from rest_framework.status import HTTP_200_OK, HTTP_402_PAYMENT_REQUIRED + +from baserow.contrib.database.rows.handler import RowHandler from baserow_premium.views.models import ( CalendarViewFieldOptions, KanbanViewFieldOptions, TimelineViewFieldOptions, ) -from rest_framework.status import HTTP_200_OK, HTTP_402_PAYMENT_REQUIRED - -from baserow.contrib.database.rows.handler import RowHandler @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/api/views/views/test_preview_public_view_export.py b/premium/backend/tests/baserow_premium_tests/api/views/views/test_preview_public_view_export.py index 5e5a555a28..a5c46a7a2f 100644 --- a/premium/backend/tests/baserow_premium_tests/api/views/views/test_preview_public_view_export.py +++ b/premium/backend/tests/baserow_premium_tests/api/views/views/test_preview_public_view_export.py @@ -6,7 +6,6 @@ from django.test.utils import override_settings import pytest -from baserow_premium.api.views.signers import export_public_view_signer from rest_framework.status import ( HTTP_200_OK, HTTP_400_BAD_REQUEST, @@ -15,6 +14,7 @@ ) from baserow.contrib.database.export.models import ExportJob +from baserow_premium.api.views.signers import export_public_view_signer @pytest.mark.django_db @@ -167,7 +167,7 @@ def test_create_public_view_export( file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename) assert file_path.isfile() - expected = "\ufeff" "id\n" + expected = "\ufeffid\n" with open(file_path, "r", encoding="utf-8") as written_file: assert written_file.read() == expected @@ -230,7 +230,7 @@ def test_create_public_view_export_respecting_view_visible_fields( file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename) assert file_path.isfile() - expected = "\ufeff" "id,text_field\n1,Something\n" + expected = "\ufeffid,text_field\n1,Something\n" with open(file_path, "r", encoding="utf-8") as written_file: assert written_file.read() == expected @@ -294,7 +294,7 @@ def test_create_public_view_export_respecting_view_filters( file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename) assert file_path.isfile() - expected = "\ufeff" "id,text_field\n2,world\n" + expected = "\ufeffid,text_field\n2,world\n" with open(file_path, "r", encoding="utf-8") as written_file: assert written_file.read() == expected @@ -367,7 +367,7 @@ def test_create_public_view_export_respecting_ad_hoc_filters( file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename) assert file_path.isfile() - expected = "\ufeff" "id,text_field\n2,world\n" + expected = "\ufeffid,text_field\n2,world\n" with open(file_path, "r", encoding="utf-8") as written_file: assert written_file.read() == expected @@ -480,7 +480,7 @@ def test_create_public_view_export_respecting_ad_hoc_order_by( file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename) assert file_path.isfile() - expected = "\ufeff" "id,text_field\n2,world\n1,hello\n" + expected = "\ufeffid,text_field\n2,world\n1,hello\n" with open(file_path, "r", encoding="utf-8") as written_file: assert written_file.read() == expected @@ -587,7 +587,7 @@ def test_create_public_view_export_respecting_include_visible_fields_in_order( file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename) assert file_path.isfile() - expected = "\ufeff" "id,text_field2,text_field\n" + expected = "\ufeffid,text_field2,text_field\n" with open(file_path, "r", encoding="utf-8") as written_file: assert written_file.read() == expected @@ -644,7 +644,7 @@ def test_create_public_view_export_respecting_include_visible_fields_in_order_wr file_path = tmpdir.join(settings.EXPORT_FILES_DIRECTORY, filename) assert file_path.isfile() - expected = "\ufeff" "id,text_field2,text_field\n" + expected = "\ufeffid,text_field2,text_field\n" with open(file_path, "r", encoding="utf-8") as written_file: assert written_file.read() == expected diff --git a/premium/backend/tests/baserow_premium_tests/api/views/views/test_timeline_views.py b/premium/backend/tests/baserow_premium_tests/api/views/views/test_timeline_views.py index a64418ce53..c0b3d7bd78 100644 --- a/premium/backend/tests/baserow_premium_tests/api/views/views/test_timeline_views.py +++ b/premium/backend/tests/baserow_premium_tests/api/views/views/test_timeline_views.py @@ -226,7 +226,7 @@ def test_list_timeline_rows_adhoc_filtering_query_param_filter( ) get_params = [f"filter__field_{text_field.id}__contains=a"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -242,7 +242,7 @@ def test_list_timeline_rows_adhoc_filtering_query_param_filter( f"filter_type=OR", ] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -253,7 +253,7 @@ def test_list_timeline_rows_adhoc_filtering_query_param_filter( ) get_params = [f"filter__field_{text_field_hidden.id}__contains=y"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -264,7 +264,7 @@ def test_list_timeline_rows_adhoc_filtering_query_param_filter( ) get_params = [f"filter__field_{text_field.id}__random=y"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -275,7 +275,7 @@ def test_list_timeline_rows_adhoc_filtering_query_param_filter( ) get_params = [f"filter__field_{text_field.id}__higher_than=1"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -353,7 +353,7 @@ def test_list_timeline_rows_adhoc_filtering_invalid_advanced_filters( for filters, error_detail in expected_errors: get_params = [f"filters={filters}"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -408,7 +408,7 @@ def test_list_timeline_rows_adhoc_filtering_advanced_filters_are_preferred_to_ot f"filter_type=AND", ] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -463,7 +463,7 @@ def test_list_timeline_rows_adhoc_filtering_overrides_existing_filters( "filters=" + json.dumps(advanced_filters), ] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -515,7 +515,7 @@ def test_list_timeline_rows_adhoc_filtering_advanced_filters( } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -544,7 +544,7 @@ def test_list_timeline_rows_adhoc_filtering_advanced_filters( } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -585,7 +585,7 @@ def test_list_timeline_rows_adhoc_filtering_advanced_filters( } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -603,7 +603,7 @@ def test_list_timeline_rows_adhoc_filtering_advanced_filters( } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_200_OK @@ -621,7 +621,7 @@ def test_list_timeline_rows_adhoc_filtering_advanced_filters( } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -639,7 +639,7 @@ def test_list_timeline_rows_adhoc_filtering_advanced_filters( } get_params = ["filters=" + json.dumps(advanced_filters)] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -652,7 +652,7 @@ def test_list_timeline_rows_adhoc_filtering_advanced_filters( ]: get_params = [f"filters={filters}"] response = api_client.get( - f'{url}?{"&".join(get_params)}', HTTP_AUTHORIZATION=f"JWT {token}" + f"{url}?{'&'.join(get_params)}", HTTP_AUTHORIZATION=f"JWT {token}" ) response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST @@ -1290,7 +1290,7 @@ def test_list_rows_public_with_query_param_filter(api_client, premium_data_fixtu "api:database:views:timeline:public_rows", kwargs={"slug": timeline_view.slug} ) get_params = [f"filter__field_{public_field.id}__contains=a"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert len(response_json["results"]) == 1 @@ -1304,7 +1304,7 @@ def test_list_rows_public_with_query_param_filter(api_client, premium_data_fixtu f"filter__field_{public_field.id}__contains=b", f"filter_type=OR", ] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert len(response_json["results"]) == 2 @@ -1313,7 +1313,7 @@ def test_list_rows_public_with_query_param_filter(api_client, premium_data_fixtu "api:database:views:timeline:public_rows", kwargs={"slug": timeline_view.slug} ) get_params = [f"filter__field_{hidden_field.id}__contains=y"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_FILTER_FIELD_NOT_FOUND" @@ -1322,7 +1322,7 @@ def test_list_rows_public_with_query_param_filter(api_client, premium_data_fixtu "api:database:views:timeline:public_rows", kwargs={"slug": timeline_view.slug} ) get_params = [f"filter__field_{public_field.id}__random=y"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST" @@ -1331,7 +1331,7 @@ def test_list_rows_public_with_query_param_filter(api_client, premium_data_fixtu "api:database:views:timeline:public_rows", kwargs={"slug": timeline_view.slug} ) get_params = [f"filter__field_{public_field.id}__higher_than=1"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD" @@ -1499,7 +1499,7 @@ def test_list_rows_public_with_query_param_advanced_filters( ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert len(response_json["results"]) == 1 @@ -1526,7 +1526,7 @@ def test_list_rows_public_with_query_param_advanced_filters( ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert len(response_json["results"]) == 2 @@ -1565,7 +1565,7 @@ def test_list_rows_public_with_query_param_advanced_filters( ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_200_OK assert len(response_json["results"]) == 2 @@ -1581,7 +1581,7 @@ def test_list_rows_public_with_query_param_advanced_filters( ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_FILTER_FIELD_NOT_FOUND" @@ -1597,7 +1597,7 @@ def test_list_rows_public_with_query_param_advanced_filters( ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_VIEW_FILTER_TYPE_DOES_NOT_EXIST" @@ -1613,7 +1613,7 @@ def test_list_rows_public_with_query_param_advanced_filters( ], } get_params = ["filters=" + json.dumps(advanced_filters)] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_VIEW_FILTER_TYPE_UNSUPPORTED_FIELD" @@ -1624,7 +1624,7 @@ def test_list_rows_public_with_query_param_advanced_filters( json.dumps({"filter_type": "OR", "filters": "invalid"}), ]: get_params = [f"filters={filters}"] - response = api_client.get(f'{url}?{"&".join(get_params)}') + response = api_client.get(f"{url}?{'&'.join(get_params)}") response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json["error"] == "ERROR_FILTERS_PARAM_VALIDATION_ERROR" diff --git a/premium/backend/tests/baserow_premium_tests/conftest.py b/premium/backend/tests/baserow_premium_tests/conftest.py index 0d31ab93f0..069f789161 100644 --- a/premium/backend/tests/baserow_premium_tests/conftest.py +++ b/premium/backend/tests/baserow_premium_tests/conftest.py @@ -5,14 +5,14 @@ # noinspection PyUnresolvedReferences import pytest + +from baserow.core.cache import local_cache +from baserow.test_utils.pytest_conftest import * # noqa: F403, F401 from baserow_premium.license.license_types import PremiumLicenseType from baserow_premium.license.plugin import LicensePlugin from baserow_premium.license.registries import LicenseType, license_type_registry from baserow_premium.plugins import PremiumPlugin -from baserow.core.cache import local_cache -from baserow.test_utils.pytest_conftest import * # noqa: F403, F401 - @pytest.fixture def premium_data_fixture(fake, data_fixture): diff --git a/premium/backend/tests/baserow_premium_tests/dashboard/test_chart_widget_type.py b/premium/backend/tests/baserow_premium_tests/dashboard/test_chart_widget_type.py index 678f0ec8b3..5b0e3d06c1 100644 --- a/premium/backend/tests/baserow_premium_tests/dashboard/test_chart_widget_type.py +++ b/premium/backend/tests/baserow_premium_tests/dashboard/test_chart_widget_type.py @@ -3,15 +3,15 @@ from django.test.utils import override_settings import pytest -from baserow_premium.integrations.local_baserow.models import ( - LocalBaserowGroupedAggregateRows, -) from baserow.contrib.dashboard.data_sources.models import DashboardDataSource from baserow.contrib.dashboard.data_sources.service import DashboardDataSourceService from baserow.contrib.dashboard.widgets.service import WidgetService from baserow.contrib.dashboard.widgets.trash_types import WidgetTrashableItemType from baserow.core.trash.handler import TrashHandler +from baserow_premium.integrations.local_baserow.models import ( + LocalBaserowGroupedAggregateRows, +) @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/dashboard/test_dashboard_application_types_charts.py b/premium/backend/tests/baserow_premium_tests/dashboard/test_dashboard_application_types_charts.py index 0ce047d79a..67837fb71b 100644 --- a/premium/backend/tests/baserow_premium_tests/dashboard/test_dashboard_application_types_charts.py +++ b/premium/backend/tests/baserow_premium_tests/dashboard/test_dashboard_application_types_charts.py @@ -6,13 +6,6 @@ from django.test.utils import override_settings import pytest -from baserow_premium.dashboard.widgets.models import ChartSeriesConfig, ChartWidget -from baserow_premium.integrations.local_baserow.models import ( - LocalBaserowGroupedAggregateRows, - LocalBaserowTableServiceAggregationGroupBy, - LocalBaserowTableServiceAggregationSeries, - LocalBaserowTableServiceAggregationSortBy, -) from baserow.contrib.dashboard.application_types import DashboardApplicationType from baserow.contrib.dashboard.data_sources.models import DashboardDataSource @@ -25,6 +18,13 @@ from baserow.core.registries import ImportExportConfig from baserow.core.utils import ChildProgressBuilder, Progress from baserow.test_utils.helpers import AnyInt +from baserow_premium.dashboard.widgets.models import ChartSeriesConfig, ChartWidget +from baserow_premium.integrations.local_baserow.models import ( + LocalBaserowGroupedAggregateRows, + LocalBaserowTableServiceAggregationGroupBy, + LocalBaserowTableServiceAggregationSeries, + LocalBaserowTableServiceAggregationSortBy, +) @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/dashboard/test_pie_chart_widget_type.py b/premium/backend/tests/baserow_premium_tests/dashboard/test_pie_chart_widget_type.py index 4f38d33996..8e5824bd2e 100644 --- a/premium/backend/tests/baserow_premium_tests/dashboard/test_pie_chart_widget_type.py +++ b/premium/backend/tests/baserow_premium_tests/dashboard/test_pie_chart_widget_type.py @@ -7,14 +7,6 @@ from django.test.utils import override_settings import pytest -from baserow_premium.dashboard.widgets.models import ( - PieChartSeriesConfig, - PieChartWidget, -) -from baserow_premium.integrations.local_baserow.models import ( - LocalBaserowGroupedAggregateRows, - LocalBaserowTableServiceAggregationSeries, -) from baserow.contrib.dashboard.application_types import DashboardApplicationType from baserow.contrib.dashboard.data_sources.models import DashboardDataSource @@ -29,6 +21,14 @@ from baserow.core.registries import ImportExportConfig from baserow.core.trash.handler import TrashHandler from baserow.core.utils import ChildProgressBuilder, Progress +from baserow_premium.dashboard.widgets.models import ( + PieChartSeriesConfig, + PieChartWidget, +) +from baserow_premium.integrations.local_baserow.models import ( + LocalBaserowGroupedAggregateRows, + LocalBaserowTableServiceAggregationSeries, +) @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/export/test_premium_export_types.py b/premium/backend/tests/baserow_premium_tests/export/test_premium_export_types.py index ca3b909ac3..26cfe096a6 100644 --- a/premium/backend/tests/baserow_premium_tests/export/test_premium_export_types.py +++ b/premium/backend/tests/baserow_premium_tests/export/test_premium_export_types.py @@ -5,7 +5,6 @@ from django.test.utils import override_settings import pytest -from baserow_premium.license.exceptions import FeaturesNotAvailableError from openpyxl import load_workbook from baserow.contrib.database.export.handler import ExportHandler @@ -13,6 +12,7 @@ from baserow.contrib.database.rows.handler import RowHandler from baserow.core.storage import get_default_storage from baserow.test_utils.helpers import setup_interesting_test_table +from baserow_premium.license.exceptions import FeaturesNotAvailableError @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_filters.py b/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_filters.py index 20659d2e0f..a2e724fa06 100644 --- a/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_filters.py +++ b/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_filters.py @@ -513,9 +513,9 @@ def test_ai_field_choice_output_not_compatible_with_text_only_filters( filter_type = view_filter_type_registry.get("length_is_lower_than") is_compatible = filter_type.field_is_compatible(ai_field) - assert ( - not is_compatible - ), "AI field with choice output should NOT be compatible with length_is_lower_than" + assert not is_compatible, ( + "AI field with choice output should NOT be compatible with length_is_lower_than" + ) @pytest.mark.django_db @@ -549,9 +549,9 @@ def test_ai_field_choice_output_is_compatible_with_select_filters( for filter_type_name in select_filter_types: filter_type = view_filter_type_registry.get(filter_type_name) is_compatible = filter_type.field_is_compatible(ai_field) - assert ( - is_compatible - ), f"AI field with choice output should be compatible with {filter_type_name}" + assert is_compatible, ( + f"AI field with choice output should be compatible with {filter_type_name}" + ) @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_handler.py b/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_handler.py index 101e1f3fce..06849f0a5a 100644 --- a/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_handler.py +++ b/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_handler.py @@ -1,14 +1,14 @@ from unittest.mock import patch import pytest -from baserow_premium.fields.exceptions import AiFieldOutputParserException -from baserow_premium.fields.handler import AIFieldHandler from baserow.core.generative_ai.exceptions import ( GenerativeAITypeDoesNotExist, ModelDoesNotBelongToType, ) from baserow.core.generative_ai.registries import generative_ai_model_type_registry +from baserow_premium.fields.exceptions import AiFieldOutputParserException +from baserow_premium.fields.handler import AIFieldHandler @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_output_types.py b/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_output_types.py index 00b5aab6a4..8f1ff9d9a6 100644 --- a/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_output_types.py +++ b/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_output_types.py @@ -1,7 +1,6 @@ import enum import pytest -from baserow_premium.fields.ai_field_output_types import get_strict_enum_output_parser from langchain_core.prompts import PromptTemplate from baserow.core.generative_ai.registries import ( @@ -9,6 +8,7 @@ generative_ai_model_type_registry, ) from baserow.core.jobs.handler import JobHandler +from baserow_premium.fields.ai_field_output_types import get_strict_enum_output_parser def test_strict_enum_output_parser(): diff --git a/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_type.py b/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_type.py index 22af6b0410..2e9b19c40e 100644 --- a/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_type.py +++ b/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_type.py @@ -1,8 +1,6 @@ from django.shortcuts import reverse import pytest -from baserow_premium.fields.field_types import AIFieldType -from baserow_premium.fields.models import AIField from pytest_unordered import unordered from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND @@ -13,6 +11,8 @@ from baserow.contrib.database.table.handler import TableHandler from baserow.core.cache import local_cache from baserow.core.db import specific_iterator +from baserow_premium.fields.field_types import AIFieldType +from baserow_premium.fields.models import AIField @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_visitors.py b/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_visitors.py index ec1cd16166..673284a05c 100644 --- a/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_visitors.py +++ b/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_visitors.py @@ -1,7 +1,7 @@ import pytest -from baserow_premium.fields.visitors import replace_field_id_references from baserow.core.formula import BaserowFormulaSyntaxError +from baserow_premium.fields.visitors import replace_field_id_references @pytest.mark.field_ai diff --git a/premium/backend/tests/baserow_premium_tests/fields/test_generate_ai_values_job_execution.py b/premium/backend/tests/baserow_premium_tests/fields/test_generate_ai_values_job_execution.py index 7cdad415e9..0f623e9394 100644 --- a/premium/backend/tests/baserow_premium_tests/fields/test_generate_ai_values_job_execution.py +++ b/premium/backend/tests/baserow_premium_tests/fields/test_generate_ai_values_job_execution.py @@ -1,13 +1,14 @@ """ Tests for GenerateAIValuesJob execution in all modes. """ + from unittest.mock import patch import pytest -from baserow_premium.fields.models import GenerateAIValuesJob from baserow.contrib.database.rows.handler import RowHandler from baserow.core.jobs.handler import JobHandler +from baserow_premium.fields.models import GenerateAIValuesJob @pytest.mark.django_db @@ -289,9 +290,9 @@ def test_job_execution_empty_string_vs_null(patched_rows_updated, premium_data_f rows[2].refresh_from_db() value_after_job = getattr(rows[2], field.db_column) # If only_empty works, this should still be "Has value", not the generated value - assert ( - value_after_job == "Has value" - ), f"Expected 'Has value' but got '{value_after_job}'" + assert value_after_job == "Has value", ( + f"Expected 'Has value' but got '{value_after_job}'" + ) @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/fields/test_generate_ai_values_job_type.py b/premium/backend/tests/baserow_premium_tests/fields/test_generate_ai_values_job_type.py index cab76d9f19..e522879915 100644 --- a/premium/backend/tests/baserow_premium_tests/fields/test_generate_ai_values_job_type.py +++ b/premium/backend/tests/baserow_premium_tests/fields/test_generate_ai_values_job_type.py @@ -1,13 +1,13 @@ """ Tests for GenerateAIValuesJob creation, validation, and job limiting. """ + from io import BytesIO from unittest.mock import patch from django.test.utils import override_settings import pytest -from baserow_premium.fields.models import GenerateAIValuesJob from baserow.contrib.database.fields.exceptions import FieldDoesNotExist from baserow.contrib.database.fields.handler import FieldHandler @@ -19,6 +19,7 @@ from baserow.core.jobs.handler import JobHandler from baserow.core.storage import get_default_storage from baserow.core.user_files.handler import UserFileHandler +from baserow_premium.fields.models import GenerateAIValuesJob @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/fixtures.py b/premium/backend/tests/baserow_premium_tests/fixtures.py index 7a90676dd0..5703be275e 100644 --- a/premium/backend/tests/baserow_premium_tests/fixtures.py +++ b/premium/backend/tests/baserow_premium_tests/fixtures.py @@ -1,3 +1,6 @@ +from baserow.contrib.database.fields.models import Field +from baserow.core.prosemirror.schema import schema +from baserow.core.prosemirror.utils import prosemirror_doc_from_plain_text from baserow_premium.fields.models import AIField from baserow_premium.license.models import License, LicenseUser from baserow_premium.row_comments.models import RowComment @@ -10,10 +13,6 @@ TimelineViewFieldOptions, ) -from baserow.contrib.database.fields.models import Field -from baserow.core.prosemirror.schema import schema -from baserow.core.prosemirror.utils import prosemirror_doc_from_plain_text - VALID_ONE_SEAT_LICENSE = ( # id: "1", instance_id: "1" b"eyJ2ZXJzaW9uIjogMSwgImlkIjogIjEiLCAidmFsaWRfZnJvbSI6ICIyMDIxLTA4LTI5VDE5OjUyOjU3" @@ -225,9 +224,9 @@ def create_ai_field(self, user=None, create_field=True, **kwargs): kwargs["ai_generative_ai_model"] = "test_1" if "ai_prompt" not in kwargs: - kwargs[ - "ai_prompt" - ] = "'What is your purpose? Answer with a maximum of 10 words.'" + kwargs["ai_prompt"] = ( + "'What is your purpose? Answer with a maximum of 10 words.'" + ) field = AIField.objects.create(**kwargs) diff --git a/premium/backend/tests/baserow_premium_tests/generative_ai/test_ai_parallel_execution.py b/premium/backend/tests/baserow_premium_tests/generative_ai/test_ai_parallel_execution.py index 490ff3ddb4..8cf793c592 100644 --- a/premium/backend/tests/baserow_premium_tests/generative_ai/test_ai_parallel_execution.py +++ b/premium/backend/tests/baserow_premium_tests/generative_ai/test_ai_parallel_execution.py @@ -1,13 +1,13 @@ from io import BytesIO import pytest -from baserow_premium.fields.job_types import AIValueGenerator from baserow.contrib.database.rows.handler import RowHandler from baserow.core.generative_ai.exceptions import GenerativeAIPromptError from baserow.core.storage import get_default_storage from baserow.core.user_files.handler import UserFileHandler from baserow.core.utils import Progress +from baserow_premium.fields.job_types import AIValueGenerator @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/generative_ai/test_managers.py b/premium/backend/tests/baserow_premium_tests/generative_ai/test_managers.py index 572e7f7998..dda1bc8460 100644 --- a/premium/backend/tests/baserow_premium_tests/generative_ai/test_managers.py +++ b/premium/backend/tests/baserow_premium_tests/generative_ai/test_managers.py @@ -5,13 +5,13 @@ from django.conf import settings import pytest -from baserow_premium.generative_ai.managers import AIFileManager from baserow.contrib.database.rows.handler import RowHandler from baserow.core.storage import get_default_storage from baserow.core.user_files.handler import UserFileHandler from baserow.test_utils.fixtures.generative_ai import TestGenerativeAIWithFilesModelType from baserow.test_utils.helpers import AnyStr +from baserow_premium.generative_ai.managers import AIFileManager @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/integrations/local_baserow/service_types/test_grouped_aggregate_rows_service_type.py b/premium/backend/tests/baserow_premium_tests/integrations/local_baserow/service_types/test_grouped_aggregate_rows_service_type.py index de917b0dcd..7ee471b584 100644 --- a/premium/backend/tests/baserow_premium_tests/integrations/local_baserow/service_types/test_grouped_aggregate_rows_service_type.py +++ b/premium/backend/tests/baserow_premium_tests/integrations/local_baserow/service_types/test_grouped_aggregate_rows_service_type.py @@ -4,15 +4,6 @@ from django.contrib.contenttypes.models import ContentType import pytest -from baserow_premium.integrations.local_baserow.models import ( - LocalBaserowGroupedAggregateRows, - LocalBaserowTableServiceAggregationGroupBy, - LocalBaserowTableServiceAggregationSeries, - LocalBaserowTableServiceAggregationSortBy, -) -from baserow_premium.integrations.local_baserow.service_types import ( - LocalBaserowGroupedAggregateRowsUserServiceType, -) from pytest_unordered import unordered from rest_framework.exceptions import ValidationError @@ -23,6 +14,15 @@ from baserow.core.services.handler import ServiceHandler from baserow.core.services.registries import service_type_registry from baserow.test_utils.pytest_conftest import FakeDispatchContext +from baserow_premium.integrations.local_baserow.models import ( + LocalBaserowGroupedAggregateRows, + LocalBaserowTableServiceAggregationGroupBy, + LocalBaserowTableServiceAggregationSeries, + LocalBaserowTableServiceAggregationSortBy, +) +from baserow_premium.integrations.local_baserow.service_types import ( + LocalBaserowGroupedAggregateRowsUserServiceType, +) def test_grouped_aggregate_rows_service_get_schema_name(): diff --git a/premium/backend/tests/baserow_premium_tests/license/test_license_handler.py b/premium/backend/tests/baserow_premium_tests/license/test_license_handler.py index d8b7f6f06b..ab0da75d84 100644 --- a/premium/backend/tests/baserow_premium_tests/license/test_license_handler.py +++ b/premium/backend/tests/baserow_premium_tests/license/test_license_handler.py @@ -7,6 +7,14 @@ import pytest import responses +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import padding +from freezegun import freeze_time +from rest_framework.status import HTTP_200_OK + +from baserow.core.cache import local_cache +from baserow.core.exceptions import IsNotAdminError from baserow_premium.license.exceptions import ( FeaturesNotAvailableError, InvalidLicenseError, @@ -21,14 +29,6 @@ from baserow_premium.license.features import PREMIUM from baserow_premium.license.handler import LicenseHandler from baserow_premium.license.models import License, LicenseUser -from cryptography.exceptions import InvalidSignature -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import padding -from freezegun import freeze_time -from rest_framework.status import HTTP_200_OK - -from baserow.core.cache import local_cache -from baserow.core.exceptions import IsNotAdminError VALID_ONE_SEAT_LICENSE = ( # id: "1", instance_id: "1" diff --git a/premium/backend/tests/baserow_premium_tests/license/test_license_models.py b/premium/backend/tests/baserow_premium_tests/license/test_license_models.py index a99bea19c7..0b33e06302 100644 --- a/premium/backend/tests/baserow_premium_tests/license/test_license_models.py +++ b/premium/backend/tests/baserow_premium_tests/license/test_license_models.py @@ -3,9 +3,10 @@ from django.test.utils import override_settings import pytest -from baserow_premium.license.models import License from freezegun import freeze_time +from baserow_premium.license.models import License + VALID_ONE_SEAT_LICENSE = ( # id: "1", instance_id: "1" b"eyJ2ZXJzaW9uIjogMSwgImlkIjogIjEiLCAidmFsaWRfZnJvbSI6ICIyMDIxLTA4LTI5VDE5OjUyOjU3" diff --git a/premium/backend/tests/baserow_premium_tests/license/test_license_tasks.py b/premium/backend/tests/baserow_premium_tests/license/test_license_tasks.py index 50b97d6dc7..3d3631ecc7 100644 --- a/premium/backend/tests/baserow_premium_tests/license/test_license_tasks.py +++ b/premium/backend/tests/baserow_premium_tests/license/test_license_tasks.py @@ -1,6 +1,7 @@ from unittest.mock import patch import pytest + from baserow_premium.license.handler import LicenseHandler diff --git a/premium/backend/tests/baserow_premium_tests/row_comments/test_row_comments_actions.py b/premium/backend/tests/baserow_premium_tests/row_comments/test_row_comments_actions.py index e3053e26ed..5df3f02ad6 100644 --- a/premium/backend/tests/baserow_premium_tests/row_comments/test_row_comments_actions.py +++ b/premium/backend/tests/baserow_premium_tests/row_comments/test_row_comments_actions.py @@ -1,9 +1,9 @@ from django.test.utils import override_settings import pytest -from baserow_premium.row_comments.actions import CreateRowCommentActionType from baserow.core.action.registries import action_type_registry +from baserow_premium.row_comments.actions import CreateRowCommentActionType @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/row_comments/test_row_comments_handler.py b/premium/backend/tests/baserow_premium_tests/row_comments/test_row_comments_handler.py index 89abed3225..1e488d016c 100644 --- a/premium/backend/tests/baserow_premium_tests/row_comments/test_row_comments_handler.py +++ b/premium/backend/tests/baserow_premium_tests/row_comments/test_row_comments_handler.py @@ -3,6 +3,10 @@ from django.test.utils import override_settings import pytest +from freezegun import freeze_time + +from baserow.core.exceptions import UserNotInWorkspace +from baserow.core.handler import CoreHandler from baserow_premium.license.exceptions import FeaturesNotAvailableError from baserow_premium.row_comments.exceptions import ( InvalidRowCommentException, @@ -14,10 +18,6 @@ RowCommentsNotificationModes, ) from baserow_premium.row_comments.models import RowComment, RowCommentsNotificationMode -from freezegun import freeze_time - -from baserow.core.exceptions import UserNotInWorkspace -from baserow.core.handler import CoreHandler @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/row_comments/test_row_comments_notification_types.py b/premium/backend/tests/baserow_premium_tests/row_comments/test_row_comments_notification_types.py index 3be756d3aa..86ffa63481 100644 --- a/premium/backend/tests/baserow_premium_tests/row_comments/test_row_comments_notification_types.py +++ b/premium/backend/tests/baserow_premium_tests/row_comments/test_row_comments_notification_types.py @@ -5,14 +5,6 @@ from django.test import override_settings import pytest -from baserow_premium.row_comments.handler import ( - RowCommentHandler, - RowCommentsNotificationModes, -) -from baserow_premium.row_comments.notification_types import ( - RowCommentMentionNotificationType, - RowCommentNotificationType, -) from freezegun import freeze_time from pytest_unordered import unordered from rest_framework.status import HTTP_200_OK, HTTP_204_NO_CONTENT @@ -21,6 +13,14 @@ from baserow.core.notifications.handler import NotificationHandler from baserow.core.notifications.models import NotificationRecipient from baserow.test_utils.helpers import AnyInt +from baserow_premium.row_comments.handler import ( + RowCommentHandler, + RowCommentsNotificationModes, +) +from baserow_premium.row_comments.notification_types import ( + RowCommentMentionNotificationType, + RowCommentNotificationType, +) @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/row_comments/test_row_comments_trash_types.py b/premium/backend/tests/baserow_premium_tests/row_comments/test_row_comments_trash_types.py index 0a2a92fa18..167bd28367 100644 --- a/premium/backend/tests/baserow_premium_tests/row_comments/test_row_comments_trash_types.py +++ b/premium/backend/tests/baserow_premium_tests/row_comments/test_row_comments_trash_types.py @@ -1,9 +1,9 @@ from django.test.utils import override_settings import pytest -from baserow_premium.row_comments.models import RowComment from baserow.core.trash.handler import TrashHandler +from baserow_premium.row_comments.models import RowComment @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/test_ical_utils.py b/premium/backend/tests/baserow_premium_tests/test_ical_utils.py index 8d703dd52e..d8a5d5efc3 100644 --- a/premium/backend/tests/baserow_premium_tests/test_ical_utils.py +++ b/premium/backend/tests/baserow_premium_tests/test_ical_utils.py @@ -5,6 +5,8 @@ from django.utils.timezone import override import pytest + +from baserow.contrib.database.views.handler import ViewHandler from baserow_premium.ical_utils import ( build_calendar, description_maker, @@ -13,8 +15,6 @@ ) from baserow_premium.views.models import CalendarView -from baserow.contrib.database.views.handler import ViewHandler - NUM_EVENTS = 20 BASEROW_ICAL_VIEW_MAX_EVENTS = 5 diff --git a/premium/backend/tests/baserow_premium_tests/views/test_calendar_view_type.py b/premium/backend/tests/baserow_premium_tests/views/test_calendar_view_type.py index 0e1083648e..7047ba80c6 100644 --- a/premium/backend/tests/baserow_premium_tests/views/test_calendar_view_type.py +++ b/premium/backend/tests/baserow_premium_tests/views/test_calendar_view_type.py @@ -9,14 +9,6 @@ from django.urls import reverse import pytest -from baserow_premium.ical_utils import build_calendar -from baserow_premium.views.exceptions import CalendarViewHasNoDateField -from baserow_premium.views.handler import ( - generate_per_day_intervals, - get_rows_grouped_by_date_field, - to_midnight, -) -from baserow_premium.views.models import CalendarView, CalendarViewFieldOptions from icalendar import Calendar from rest_framework.response import Response from rest_framework.status import HTTP_200_OK @@ -36,6 +28,14 @@ assert_undo_redo_actions_are_valid, setup_interesting_test_table, ) +from baserow_premium.ical_utils import build_calendar +from baserow_premium.views.exceptions import CalendarViewHasNoDateField +from baserow_premium.views.handler import ( + generate_per_day_intervals, + get_rows_grouped_by_date_field, + to_midnight, +) +from baserow_premium.views.models import CalendarView, CalendarViewFieldOptions @pytest.mark.django_db @@ -1238,7 +1238,7 @@ def make_values(num, for_model, for_fields): uid_url = urlparse(evt.get("uid")) assert uid_url.netloc assert uid_url.path == ( - f"/database/{table.database_id}/table/{table.id}/{calendar_view.id}/row/{idx+1}" + f"/database/{table.database_id}/table/{table.id}/{calendar_view.id}/row/{idx + 1}" ) assert evt.get("summary") diff --git a/premium/backend/tests/baserow_premium_tests/views/test_kanban_view_type.py b/premium/backend/tests/baserow_premium_tests/views/test_kanban_view_type.py index eb830eadd2..8277bdd99c 100644 --- a/premium/backend/tests/baserow_premium_tests/views/test_kanban_view_type.py +++ b/premium/backend/tests/baserow_premium_tests/views/test_kanban_view_type.py @@ -7,13 +7,13 @@ from django.test.utils import CaptureQueriesContext import pytest -from baserow_premium.views.exceptions import KanbanViewFieldDoesNotBelongToSameTable -from baserow_premium.views.models import KanbanViewFieldOptions from baserow.contrib.database.fields.handler import FieldHandler from baserow.contrib.database.views.handler import ViewHandler from baserow.contrib.database.views.registries import view_type_registry from baserow.core.registries import ImportExportConfig +from baserow_premium.views.exceptions import KanbanViewFieldDoesNotBelongToSameTable +from baserow_premium.views.models import KanbanViewFieldOptions @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/views/test_premium_form_view_mode_types.py b/premium/backend/tests/baserow_premium_tests/views/test_premium_form_view_mode_types.py index 01c33c199b..13546b794e 100644 --- a/premium/backend/tests/baserow_premium_tests/views/test_premium_form_view_mode_types.py +++ b/premium/backend/tests/baserow_premium_tests/views/test_premium_form_view_mode_types.py @@ -1,10 +1,10 @@ from django.test.utils import override_settings import pytest -from baserow_premium.license.exceptions import FeaturesNotAvailableError from baserow.contrib.database.views.handler import ViewHandler from baserow.contrib.database.views.models import FormView +from baserow_premium.license.exceptions import FeaturesNotAvailableError @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/views/test_premium_view_decorator_types.py b/premium/backend/tests/baserow_premium_tests/views/test_premium_view_decorator_types.py index 4df0ac072b..2f76dfb77c 100644 --- a/premium/backend/tests/baserow_premium_tests/views/test_premium_view_decorator_types.py +++ b/premium/backend/tests/baserow_premium_tests/views/test_premium_view_decorator_types.py @@ -1,10 +1,10 @@ from django.test.utils import override_settings import pytest -from baserow_premium.license.exceptions import FeaturesNotAvailableError from baserow.contrib.database.views.handler import ViewHandler from baserow.contrib.database.views.models import ViewDecoration +from baserow_premium.license.exceptions import FeaturesNotAvailableError @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/views/test_premium_view_decorator_value_provider_types.py b/premium/backend/tests/baserow_premium_tests/views/test_premium_view_decorator_value_provider_types.py index d734e13592..5a602f7ef8 100644 --- a/premium/backend/tests/baserow_premium_tests/views/test_premium_view_decorator_value_provider_types.py +++ b/premium/backend/tests/baserow_premium_tests/views/test_premium_view_decorator_value_provider_types.py @@ -2,10 +2,6 @@ from django.test.utils import override_settings import pytest -from baserow_premium.license.exceptions import FeaturesNotAvailableError -from baserow_premium.views.decorator_value_provider_types import ( - ConditionalColorValueProviderType, -) from rest_framework.status import HTTP_400_BAD_REQUEST from baserow.contrib.database.fields.handler import FieldHandler @@ -14,6 +10,10 @@ from baserow.contrib.database.views.registries import view_type_registry from baserow.core.registries import ImportExportConfig from baserow.test_utils.helpers import AnyStr +from baserow_premium.license.exceptions import FeaturesNotAvailableError +from baserow_premium.views.decorator_value_provider_types import ( + ConditionalColorValueProviderType, +) @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/views/test_premium_view_handler.py b/premium/backend/tests/baserow_premium_tests/views/test_premium_view_handler.py index 5f41522ee5..c3ebae3adc 100644 --- a/premium/backend/tests/baserow_premium_tests/views/test_premium_view_handler.py +++ b/premium/backend/tests/baserow_premium_tests/views/test_premium_view_handler.py @@ -1,6 +1,4 @@ import pytest -from baserow_premium.views.handler import get_rows_grouped_by_single_select_field -from baserow_premium.views.models import OWNERSHIP_TYPE_PERSONAL from baserow.contrib.database.views.exceptions import ViewDoesNotExist, ViewNotInTable from baserow.contrib.database.views.handler import ViewHandler @@ -10,6 +8,8 @@ View, ) from baserow.core.exceptions import PermissionDenied +from baserow_premium.views.handler import get_rows_grouped_by_single_select_field +from baserow_premium.views.models import OWNERSHIP_TYPE_PERSONAL @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/views/test_premium_view_models.py b/premium/backend/tests/baserow_premium_tests/views/test_premium_view_models.py index 507a4a7afd..ab50dcf9d7 100644 --- a/premium/backend/tests/baserow_premium_tests/views/test_premium_view_models.py +++ b/premium/backend/tests/baserow_premium_tests/views/test_premium_view_models.py @@ -1,6 +1,7 @@ from django.db import IntegrityError import pytest + from baserow_premium.views.models import ( CalendarViewFieldOptions, KanbanViewFieldOptions, diff --git a/premium/backend/tests/baserow_premium_tests/views/test_premium_view_notification_types.py b/premium/backend/tests/baserow_premium_tests/views/test_premium_view_notification_types.py index 9fa38e01c4..50ec3732a9 100644 --- a/premium/backend/tests/baserow_premium_tests/views/test_premium_view_notification_types.py +++ b/premium/backend/tests/baserow_premium_tests/views/test_premium_view_notification_types.py @@ -4,11 +4,11 @@ from django.test.utils import override_settings import pytest -from baserow_premium.views.models import OWNERSHIP_TYPE_PERSONAL from pytest_unordered import unordered from rest_framework.status import HTTP_200_OK from baserow.contrib.database.views.handler import ViewHandler +from baserow_premium.views.models import OWNERSHIP_TYPE_PERSONAL @pytest.mark.django_db(transaction=True) diff --git a/premium/backend/tests/baserow_premium_tests/views/test_premium_ws_view_signals.py b/premium/backend/tests/baserow_premium_tests/views/test_premium_ws_view_signals.py index e6d9b00bfa..d2326336a2 100644 --- a/premium/backend/tests/baserow_premium_tests/views/test_premium_ws_view_signals.py +++ b/premium/backend/tests/baserow_premium_tests/views/test_premium_ws_view_signals.py @@ -3,12 +3,12 @@ from django.db import transaction import pytest -from baserow_premium.views.models import OWNERSHIP_TYPE_PERSONAL from baserow.contrib.database.api.views.serializers import ViewSerializer from baserow.contrib.database.views.handler import ViewHandler from baserow.contrib.database.views.models import OWNERSHIP_TYPE_COLLABORATIVE from baserow.contrib.database.views.registries import view_type_registry +from baserow_premium.views.models import OWNERSHIP_TYPE_PERSONAL @pytest.mark.django_db(transaction=True) diff --git a/premium/backend/tests/baserow_premium_tests/views/test_timeline_view_type.py b/premium/backend/tests/baserow_premium_tests/views/test_timeline_view_type.py index d58adc1443..2e95cda6b9 100644 --- a/premium/backend/tests/baserow_premium_tests/views/test_timeline_view_type.py +++ b/premium/backend/tests/baserow_premium_tests/views/test_timeline_view_type.py @@ -4,9 +4,6 @@ from django.core.files.storage import FileSystemStorage import pytest -from baserow_premium.views.exceptions import TimelineViewHasInvalidDateSettings -from baserow_premium.views.handler import get_timeline_view_filtered_queryset -from baserow_premium.views.models import TimelineViewFieldOptions from baserow.contrib.database.action.scopes import ViewActionScopeType from baserow.contrib.database.fields.exceptions import ( @@ -24,6 +21,9 @@ assert_undo_redo_actions_are_valid, setup_interesting_test_table, ) +from baserow_premium.views.exceptions import TimelineViewHasInvalidDateSettings +from baserow_premium.views.handler import get_timeline_view_filtered_queryset +from baserow_premium.views.models import TimelineViewFieldOptions @pytest.mark.django_db diff --git a/premium/backend/tests/baserow_premium_tests/views/test_view_change_ownership_type.py b/premium/backend/tests/baserow_premium_tests/views/test_view_change_ownership_type.py index 848a5710ee..9ae1870f32 100644 --- a/premium/backend/tests/baserow_premium_tests/views/test_view_change_ownership_type.py +++ b/premium/backend/tests/baserow_premium_tests/views/test_view_change_ownership_type.py @@ -2,10 +2,10 @@ from django.test import override_settings import pytest -from baserow_premium.views.models import OWNERSHIP_TYPE_PERSONAL from rest_framework.status import HTTP_200_OK from baserow.contrib.database.views.models import OWNERSHIP_TYPE_COLLABORATIVE +from baserow_premium.views.models import OWNERSHIP_TYPE_PERSONAL @override_settings(PERMISSION_MANAGERS=["basic"]) diff --git a/premium/backend/tests/baserow_premium_tests/views/test_view_ownership_type_permission_manager.py b/premium/backend/tests/baserow_premium_tests/views/test_view_ownership_type_permission_manager.py index 8efa087219..86643d5a5c 100644 --- a/premium/backend/tests/baserow_premium_tests/views/test_view_ownership_type_permission_manager.py +++ b/premium/backend/tests/baserow_premium_tests/views/test_view_ownership_type_permission_manager.py @@ -1,5 +1,4 @@ import pytest -from baserow_premium.permission_manager import ViewOwnershipPermissionManagerType from baserow.core.registries import object_scope_type_registry, operation_type_registry from baserow_enterprise.role.operations import ( @@ -9,6 +8,7 @@ from baserow_enterprise.views.operations import ( ListenToAllRestrictedViewEventsOperationType, ) +from baserow_premium.permission_manager import ViewOwnershipPermissionManagerType @pytest.mark.view_ownership diff --git a/premium/backend/tests/baserow_premium_tests/ws/test_ws_row_comments_signals.py b/premium/backend/tests/baserow_premium_tests/ws/test_ws_row_comments_signals.py index 782ad06337..369a6ed76b 100644 --- a/premium/backend/tests/baserow_premium_tests/ws/test_ws_row_comments_signals.py +++ b/premium/backend/tests/baserow_premium_tests/ws/test_ws_row_comments_signals.py @@ -3,15 +3,15 @@ from django.test.utils import override_settings import pytest +from freezegun import freeze_time + +from baserow.core.db import transaction_atomic +from baserow.core.trash.handler import TrashHandler from baserow_premium.row_comments.handler import ( RowCommentHandler, RowCommentsNotificationModes, ) from baserow_premium.row_comments.trash_types import RowCommentTrashableItemType -from freezegun import freeze_time - -from baserow.core.db import transaction_atomic -from baserow.core.trash.handler import TrashHandler @pytest.mark.django_db(transaction=True)