Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions backend/pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -52,3 +52,5 @@ markers =
data_sync: All tests related to data sync functionality
replica: All tests related to db replicas
workspace_search: All tests related to workspace search functionality
enable_all_signals: Disables signal deferral for this test (all signals enabled)
enable_signals: Enables specific signals for this test (accepts dotted callable paths)
3 changes: 2 additions & 1 deletion backend/src/baserow/contrib/automation/workflows/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
from baserow.contrib.automation.workflows.types import UpdatedAutomationWorkflow
from baserow.core.cache import global_cache, local_cache
from baserow.core.exceptions import IdDoesNotExist
from baserow.core.psycopg import is_unique_violation_error
from baserow.core.registries import ImportExportConfig
from baserow.core.services.exceptions import DispatchException
from baserow.core.storage import ExportZipFile, get_default_storage
Expand Down Expand Up @@ -247,7 +248,7 @@ def update_workflow(
try:
workflow.save()
except IntegrityError as e:
if "unique constraint" in e.args[0] and "name" in e.args[0]:
if is_unique_violation_error(e) and "name" in str(e):
raise AutomationWorkflowNameNotUnique(
name=workflow.name, automation_id=workflow.automation_id
) from e
Expand Down
11 changes: 9 additions & 2 deletions backend/src/baserow/contrib/builder/domains/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from baserow.core.db import specific_iterator
from baserow.core.exceptions import IdDoesNotExist
from baserow.core.models import Workspace
from baserow.core.psycopg import is_unique_violation_error
from baserow.core.registries import ImportExportConfig, application_type_registry
from baserow.core.storage import get_default_storage
from baserow.core.trash.handler import TrashHandler
Expand Down Expand Up @@ -131,7 +132,13 @@ def create_domain(
prepared_values["domain_name"] = prepared_values["domain_name"].lower()

domain = model_class(builder=builder, order=last_order, **prepared_values)
domain.save()

try:
domain.save()
except IntegrityError as error:
if is_unique_violation_error(error):
raise DomainNameNotUniqueError(prepared_values["domain_name"])
raise error

return domain

Expand Down Expand Up @@ -171,7 +178,7 @@ def update_domain(self, domain: Domain, **kwargs) -> Domain:
try:
domain.save()
except IntegrityError as error:
if "unique" in str(error) and "domain_name" in prepared_values:
if is_unique_violation_error(error):
raise DomainNameNotUniqueError(prepared_values["domain_name"])
raise error

Expand Down
5 changes: 3 additions & 2 deletions backend/src/baserow/contrib/builder/pages/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
)
from baserow.core.cache import global_cache
from baserow.core.exceptions import IdDoesNotExist
from baserow.core.psycopg import is_unique_violation_error
from baserow.core.storage import ExportZipFile
from baserow.core.user_sources.user_source_user import UserSourceUser
from baserow.core.utils import ChildProgressBuilder, MirrorDict, find_unused_name
Expand Down Expand Up @@ -201,9 +202,9 @@ def update_page(self, page: Page, **kwargs) -> Page:
try:
page.save()
except IntegrityError as e:
if "unique constraint" in e.args[0] and "name" in e.args[0]:
if is_unique_violation_error(e) and "name" in e.args[0]:
raise PageNameNotUnique(name=page.name, builder_id=page.builder_id)
if "unique constraint" in e.args[0] and "path" in e.args[0]:
if is_unique_violation_error(e) and "path" in e.args[0]:
raise PagePathNotUnique(path=page.path, builder_id=page.builder_id)
raise e

Expand Down
61 changes: 60 additions & 1 deletion backend/src/baserow/test_utils/pytest_conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import os
import sys
import threading
from contextlib import contextmanager
from contextlib import ExitStack, contextmanager
from datetime import date, datetime
from decimal import Decimal
from functools import partial
Expand Down Expand Up @@ -48,6 +48,22 @@
SKIP_FLAGS = ["disabled-in-ci", "once-per-day-in-ci"]
COMMAND_LINE_FLAG_PREFIX = "--run-"

# List of heavy callables to defer by default during tests
# Used by defer_heavy_signals fixture
DEFAULT_DEFERRED_CALLABLES: List[str] = [
"baserow.ws.tasks.broadcast_to_channel_group.delay",
"baserow.ws.tasks.broadcast_to_users.delay",
"baserow.ws.tasks.broadcast_to_permitted_users.delay",
"baserow.ws.tasks.broadcast_to_group.delay",
"baserow.ws.tasks.broadcast_to_groups.delay",
"baserow.ws.tasks.broadcast_application_created.delay",
"baserow.contrib.database.search.tasks.schedule_update_search_data.delay",
"baserow.contrib.database.search.tasks.update_search_data.delay",
"baserow.contrib.database.table.tasks.update_table_usage.delay",
"baserow.core.notifications.tasks.send_queued_notifications_to_users.delay",
"baserow.contrib.database.views.tasks.update_view_index.delay",
]


# Provides a new fake instance for each class. Solve uniqueness problem sometimes.
@pytest.fixture(scope="class", autouse=True)
Expand Down Expand Up @@ -153,6 +169,41 @@ def clear_cache():
yield


@pytest.fixture(autouse=True)
def defer_heavy_signals(request):
"""
Defer heavy callables by default to speed up tests.

Opt-out options:
@pytest.mark.enable_all_signals - Enable all deferred signals
@pytest.mark.enable_signals("path.to.callable") - Enable specific signal(s)
"""

if request.node.get_closest_marker("enable_all_signals"):
yield
return

signals_to_defer = set(DEFAULT_DEFERRED_CALLABLES)

enable_signals_marker = request.node.get_closest_marker("enable_signals")
if enable_signals_marker:
signals_to_enable = set()
if enable_signals_marker.args:
signals_to_enable.update(enable_signals_marker.args)
if enable_signals_marker.kwargs.get("signals"):
signals_to_enable.update(enable_signals_marker.kwargs["signals"])
signals_to_defer -= signals_to_enable

if not signals_to_defer:
yield
return

with ExitStack() as stack:
for name in signals_to_defer:
stack.enter_context(patch(name, lambda *args, **kwargs: None))
yield


@pytest.fixture
def reset_schema(django_db_blocker):
yield
Expand Down Expand Up @@ -498,6 +549,14 @@ def pytest_configure(config):
f"{flag}: mark test so it only runs when the "
f"{COMMAND_LINE_FLAG_PREFIX}{flag} flag is provided to pytest",
)
config.addinivalue_line(
"markers",
"enable_all_signals: Disables signal deferral for this test",
)
config.addinivalue_line(
"markers",
"enable_signals(callable_paths): Enables specific signals for this test",
)
pytest_configure.already_run = True


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -596,3 +596,26 @@ def test_get_workflow_history_permission_error(api_client, data_fixture):
"detail": "You don't have the required permission to execute this operation.",
"error": "PERMISSION_DENIED",
}


@pytest.mark.django_db
def test_rename_workflow_using_existing_workflow_name(api_client, data_fixture):
user, token = data_fixture.create_user_and_token()
automation = data_fixture.create_automation_application(user)
workflow_1 = data_fixture.create_automation_workflow(
user, automation=automation, name="test1", order=1
)
workflow_2 = data_fixture.create_automation_workflow(
user, automation=automation, name="test2", order=2
)

url = reverse(API_URL_WORKFLOW_ITEM, kwargs={"workflow_id": workflow_2.id})
response = api_client.patch(
url,
{"name": workflow_1.name},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)

assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_AUTOMATION_WORKFLOW_NAME_NOT_UNIQUE"
Original file line number Diff line number Diff line change
Expand Up @@ -1183,6 +1183,10 @@ def test_dispatch_data_source_with_adhoc_sortings(api_client, data_fixture):


@pytest.mark.django_db(transaction=True)
@pytest.mark.enable_signals(
"baserow.contrib.database.search.tasks.schedule_update_search_data.delay",
"baserow.contrib.database.search.tasks.update_search_data.delay",
)
def test_dispatch_data_source_with_adhoc_search(api_client, data_fixture):
with transaction.atomic():
user, token = data_fixture.create_user_and_token()
Expand Down
21 changes: 21 additions & 0 deletions backend/tests/baserow/contrib/builder/api/pages/test_page_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -709,3 +709,24 @@ def test_delete_shared_page(api_client, data_fixture):

assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_SHARED_PAGE_READ_ONLY"


@pytest.mark.django_db
def test_rename_page_using_existing_page_name(api_client, data_fixture):
user, token = data_fixture.create_user_and_token()
builder = data_fixture.create_builder_application(user=user)
page_1 = data_fixture.create_builder_page(builder=builder, order=1, name="test1")
page_2 = data_fixture.create_builder_page(builder=builder, order=1, name="test2")

url = reverse("api:builder:pages:item", kwargs={"page_id": page_2.id})
response = api_client.patch(
url,
{
"name": page_1.name,
},
format="json",
HTTP_AUTHORIZATION=f"JWT {token}",
)

assert response.status_code == HTTP_400_BAD_REQUEST
assert response.json()["error"] == "ERROR_PAGE_NAME_NOT_UNIQUE"
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from baserow.contrib.builder.domains.domain_types import CustomDomainType
from baserow.contrib.builder.domains.exceptions import (
DomainDoesNotExist,
DomainNameNotUniqueError,
DomainNotInBuilder,
)
from baserow.contrib.builder.domains.handler import DomainHandler
Expand Down Expand Up @@ -68,6 +69,21 @@ def test_create_domain(data_fixture):
assert domain.domain_name == "test.com"


@pytest.mark.django_db
def test_create_domain_with_duplicate_name(data_fixture):
builder = data_fixture.create_builder_application()
domain_name = "test.com"

DomainHandler().create_domain(CustomDomainType(), builder, domain_name=domain_name)

with pytest.raises(DomainNameNotUniqueError) as exc_info:
DomainHandler().create_domain(
CustomDomainType(), builder, domain_name=domain_name
)

assert exc_info.value.domain_name == domain_name


@pytest.mark.django_db
def test_delete_domain(data_fixture):
domain = data_fixture.create_builder_custom_domain()
Expand All @@ -88,6 +104,24 @@ def test_update_domain(data_fixture):
assert domain.domain_name == "new.com"


@pytest.mark.django_db
def test_update_domain_with_duplicate_name(data_fixture):
builder = data_fixture.create_builder_application()
domain = data_fixture.create_builder_custom_domain(
domain_name="test.com", builder=builder
)

existing_domain = "other.com"
DomainHandler().create_domain(
CustomDomainType(), builder, domain_name=existing_domain
)

with pytest.raises(DomainNameNotUniqueError) as exc_info:
DomainHandler().update_domain(domain, domain_name=existing_domain)

assert exc_info.value.domain_name == existing_domain


@pytest.mark.django_db
def test_order_domains(data_fixture):
builder = data_fixture.create_builder_application()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3678,6 +3678,10 @@ def test_get_row_adjacent_view_invalid_requests(api_client, data_fixture):

@pytest.mark.django_db(transaction=True)
@pytest.mark.parametrize("search_mode", ALL_SEARCH_MODES)
@pytest.mark.enable_signals(
"baserow.contrib.database.search.tasks.schedule_update_search_data.delay",
"baserow.contrib.database.search.tasks.update_search_data.delay",
)
def test_get_row_adjacent_search(api_client, data_fixture, search_mode):
user, jwt_token = data_fixture.create_user_and_token(
email="test@test.nl", password="password", first_name="Test1"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1249,6 +1249,10 @@ def test_list_rows_public_with_query_param_advanced_filters(api_client, data_fix

@pytest.mark.django_db(transaction=True)
@pytest.mark.parametrize("search_mode", ALL_SEARCH_MODES)
@pytest.mark.enable_signals(
"baserow.contrib.database.search.tasks.schedule_update_search_data.delay",
"baserow.contrib.database.search.tasks.update_search_data.delay",
)
def test_list_rows_public_only_searches_by_visible_columns(
api_client, data_fixture, search_mode
):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4024,6 +4024,10 @@ def test_list_rows_public_filters_by_visible_and_hidden_columns(

@pytest.mark.django_db(transaction=True)
@pytest.mark.parametrize("search_mode", ALL_SEARCH_MODES)
@pytest.mark.enable_signals(
"baserow.contrib.database.search.tasks.schedule_update_search_data.delay",
"baserow.contrib.database.search.tasks.update_search_data.delay",
)
def test_list_rows_public_only_searches_by_visible_columns(
api_client, data_fixture, search_mode
):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1172,6 +1172,9 @@ def test_view_cant_update_allow_public_export(data_fixture, api_client):


@pytest.mark.django_db(transaction=True)
@pytest.mark.enable_signals(
"baserow.contrib.database.views.tasks.update_view_index.delay"
)
def test_loading_a_sortable_view_will_create_an_index(api_client, data_fixture):
user, token = data_fixture.create_user_and_token()
table = data_fixture.create_database_table(user=user)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,11 @@
assert_undo_redo_actions_are_valid,
)

pytestmark = pytest.mark.enable_signals(
"baserow.core.notifications.tasks.send_queued_notifications_to_users.delay",
"baserow.ws.tasks.broadcast_to_users.delay",
)


@pytest.mark.django_db(transaction=True)
@patch("baserow.ws.tasks.broadcast_to_users.apply")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1185,6 +1185,10 @@ def test_link_row_field_type_api_row_views(api_client, data_fixture):

@pytest.mark.django_db(transaction=True)
@pytest.mark.field_link_row
@pytest.mark.enable_signals(
"baserow.contrib.database.search.tasks.schedule_update_search_data.delay",
"baserow.contrib.database.search.tasks.update_search_data.delay",
)
def test_import_export_link_row_field(data_fixture):
user = data_fixture.create_user()
imported_workspace = data_fixture.create_workspace(user=user)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -832,6 +832,10 @@ def test_multiple_collaborators_field_type_values_can_be_stringified(data_fixtur

@pytest.mark.django_db(transaction=True)
@pytest.mark.field_multiple_collaborators
@pytest.mark.enable_signals(
"baserow.contrib.database.search.tasks.schedule_update_search_data.delay",
"baserow.contrib.database.search.tasks.update_search_data.delay",
)
def test_multiple_collaborators_field_type_values_can_be_searched(data_fixture):
mario = data_fixture.create_user(first_name="Mario")
luigi = data_fixture.create_user(first_name="Luigi")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1163,6 +1163,7 @@ def test_create_rows_action_row_history_with_undo_redo(
)
@pytest.mark.django_db
@pytest.mark.row_history
@pytest.mark.enable_signals("baserow.ws.tasks.broadcast_to_users.delay")
def test_delete_rows_action_row_history_with_undo_redo(
data_fixture, action_type: "ActionType", input_values: Callable
):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,11 @@
from baserow.contrib.database.table.handler import TableHandler
from baserow.core.user_files.handler import UserFileHandler

pytestmark = pytest.mark.enable_signals(
"baserow.contrib.database.search.tasks.schedule_update_search_data.delay",
"baserow.contrib.database.search.tasks.update_search_data.delay",
)


@pytest.mark.django_db(transaction=True)
def test_search_compatibility_between_current_and_postgres(data_fixture, tmpdir):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@
from baserow.core.trash.handler import TrashHandler
from baserow.core.utils import Progress

pytestmark = pytest.mark.enable_signals(
"baserow.contrib.database.search.tasks.schedule_update_search_data.delay",
"baserow.contrib.database.search.tasks.update_search_data.delay",
)


def test_escape_query():
# Spacing is standardized.
Expand Down
Loading
Loading