diff --git a/alembic_migration/versions/27bf1b7197a6_add_coverages.py b/alembic_migration/versions/27bf1b7197a6_add_coverages.py new file mode 100644 index 000000000..d068e1b83 --- /dev/null +++ b/alembic_migration/versions/27bf1b7197a6_add_coverages.py @@ -0,0 +1,39 @@ +"""Add coverages + +Revision ID: 335e0bc4df28 +Revises: 6b40cb9c7c3d +Create Date: 2025-11-18 14:15:26.377504 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '335e0bc4df28' +down_revision = '6b40cb9c7c3d' +branch_labels = None +depends_on = None + +def upgrade(): + coverage_type = sa.Enum('fr-idf', 'fr-ne', 'fr-nw', 'fr-se', 'fr-sw', name='coverage_type', schema='guidebook') + op.create_table('coverages', + sa.Column('coverage_type', coverage_type, nullable=True), + sa.Column('document_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['document_id'], ['guidebook.documents.document_id'], ), + sa.PrimaryKeyConstraint('document_id'), + schema='guidebook' + ) + op.create_table('coverages_archives', + sa.Column('coverage_type', coverage_type, nullable=True), + sa.Column('id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['id'], ['guidebook.documents_archives.id'], ), + sa.PrimaryKeyConstraint('id'), + schema='guidebook' + ) + + +def downgrade(): + op.drop_table('coverages_archives', schema='guidebook') + op.drop_table('coverages', schema='guidebook') + sa.Enum('fr-idf', 'fr-ne', 'fr-nw', 'fr-se', 'fr-sw', name='coverage_type', schema='guidebook').drop(op.get_bind()) diff --git a/c2corg_api/__init__.py b/c2corg_api/__init__.py index 2d9e2cde8..67105d915 100644 --- a/c2corg_api/__init__.py +++ b/c2corg_api/__init__.py @@ -101,26 +101,31 @@ def configure_anonymous(settings, config): config.registry.anonymous_user_id = account_id +def delete_waypoint_stopareas(connection, waypoint_id): + # Delete existing stopareas for waypoint + delete_relation_query = text( + """ + DELETE FROM guidebook.waypoints_stopareas + WHERE waypoint_id = :waypoint_id + """ + ) + + connection.execute( + delete_relation_query, + { + "waypoint_id": waypoint_id, + }, + ) + + @event.listens_for(DocumentGeometry, "after_insert") @event.listens_for(DocumentGeometry, "after_update") def process_new_waypoint(mapper, connection, geometry): """Processes a new waypoint to find its public transports after inserting it into documents_geometries.""" - log.debug("Entering process_new_waypoint callback") + # Check if document is a waypoint waypoint_id = geometry.document_id - max_distance_waypoint_to_stoparea = int( - os.getenv("MAX_DISTANCE_WAYPOINT_TO_STOPAREA") - ) - walking_speed = float(os.getenv("WALKING_SPEED")) - max_stop_area_for_1_waypoint = int(os.getenv("MAX_STOP_AREA_FOR_1_WAYPOINT")) # noqa: E501 - api_key = os.getenv("NAVITIA_API_KEY") - max_duration = int(max_distance_waypoint_to_stoparea / walking_speed) - - # Augmenter le nombre d'arrêts récupérés pour avoir plus de choix (comme dans le bash) # noqa: E501 - max_stop_area_fetched = max_stop_area_for_1_waypoint * 3 - - # Check if document is a waypoint document_type = connection.execute( text( """ @@ -134,6 +139,18 @@ def process_new_waypoint(mapper, connection, geometry): if document_type != "w": return + log.debug("Entering process_new_waypoint callback") + max_distance_waypoint_to_stoparea = int( + os.getenv("MAX_DISTANCE_WAYPOINT_TO_STOPAREA") + ) + walking_speed = float(os.getenv("WALKING_SPEED")) + max_stop_area_for_1_waypoint = int(os.getenv("MAX_STOP_AREA_FOR_1_WAYPOINT")) # noqa: E501 + api_key = os.getenv("NAVITIA_API_KEY") + max_duration = int(max_distance_waypoint_to_stoparea / walking_speed) + + # Augmenter le nombre d'arrêts récupérés pour avoir plus de choix (comme dans le bash) # noqa: E501 + max_stop_area_fetched = max_stop_area_for_1_waypoint * 3 + waypoint_type = connection.execute( text( """ @@ -182,7 +199,8 @@ def process_new_waypoint(mapper, connection, geometry): places_data = places_response.json() if "places_nearby" not in places_data or not places_data["places_nearby"]: - log.warning(f"No Navitia stops found for the waypoint {waypoint_id}") + log.warning(f"No Navitia stops found for the waypoint {waypoint_id}; deleting previously registered stops") # noqa: E501 + delete_waypoint_stopareas(connection, waypoint_id) return # --- NOUVEAU : Filtrage par diversité de transport (comme dans bash) --- @@ -226,23 +244,11 @@ def process_new_waypoint(mapper, connection, geometry): known_transports.update(current_stop_transports) selected_count += 1 - # Delete existing stopareas for waypoint - delete_relation_query = text( - """ - DELETE FROM guidebook.waypoints_stopareas - WHERE waypoint_id = :waypoint_id - """ - ) - - connection.execute( - delete_relation_query, - { - "waypoint_id": waypoint_id, - }, - ) - log.warning(f"Selected {selected_count} stops out of {len(places_data['places_nearby'])} for waypoint {waypoint_id}") # noqa: E501 + log.warning("Deleting previously registered stops") + delete_waypoint_stopareas(connection, waypoint_id) + # Traiter uniquement les arrêts sélectionnés for place in selected_stops: stop_id = place["id"] @@ -363,7 +369,7 @@ def calculate_route_duration(mapper, connection, route): jour du script bash. """ route_id = route.document_id - log.warn(f"Calculating duration for route ID: {route_id}") + log.warning(f"Calculating duration for route ID: {route_id}") # Récupération des activités et normalisation des dénivelés activities = route.activities if route.activities is not None else [] @@ -440,7 +446,7 @@ def _calculate_climbing_duration(route, height_diff_up, height_diff_down, route_ return None # Pas de données utilisables pour le calcul dm = dp / v_diff - log.warn(f"Calculated climbing route duration for route {route_id} (activity {activity}, no difficulties_height): {dm:.2f} hours") # noqa: E501 + log.warning(f"Calculated climbing route duration for route {route_id} (activity {activity}, no difficulties_height): {dm:.2f} hours") # noqa: E501 return dm # CAS 2: Le dénivelé des difficultés est renseigné @@ -448,7 +454,7 @@ def _calculate_climbing_duration(route, height_diff_up, height_diff_down, route_ # Vérification de cohérence if dp > 0 and d_diff > dp: - log.warn(f"Route {route_id}: Inconsistent difficulties_height ({d_diff}m) > height_diff_up ({dp}m). Returning NULL.") # noqa: E501 + log.warning(f"Route {route_id}: Inconsistent difficulties_height ({d_diff}m) > height_diff_up ({dp}m). Returning NULL.") # noqa: E501 return None # Calcul du temps des difficultés @@ -466,7 +472,7 @@ def _calculate_climbing_duration(route, height_diff_up, height_diff_down, route_ # Calcul final selon le cadrage: max(t_diff, t_app) + 0.5 * min(t_diff, t_app) # noqa: E501 dm = max(t_diff, t_app) + 0.5 * min(t_diff, t_app) - log.warn(f"Calculated climbing route duration for route {route_id} (activity {activity}): {dm:.2f} hours (t_diff={t_diff:.2f}, t_app={t_app:.2f})") # noqa: E501 + log.warning(f"Calculated climbing route duration for route {route_id} (activity {activity}): {dm:.2f} hours (t_diff={t_diff:.2f}, t_app={t_app:.2f})") # noqa: E501 return dm @@ -517,7 +523,7 @@ def _calculate_standard_duration(activity, route, height_diff_up, height_diff_do else: dm = (dv / 2) + dh - log.warn(f"Calculated standard route duration for route {route_id} (activity {activity}): {dm:.2f} hours") # noqa: E501 + log.warning(f"Calculated standard route duration for route {route_id} (activity {activity}): {dm:.2f} hours") # noqa: E501 return dm @@ -531,8 +537,9 @@ def _validate_and_convert_duration(min_duration, route_id): or min_duration < min_duration_hours or min_duration > max_duration_hours ): - log.warn( - f"Route {route_id}: Calculated duration ({min_duration:.2f} hours if not None) is out of bounds (min={min_duration_hours}h, max={max_duration_hours}h) or NULL. Setting duration to NULL." # noqa: E501 + min_duration_str = "None" if min_duration is None else f"{min_duration:.2f}" # noqa: E501 + log.warning( + f"Route {route_id}: Calculated duration (min_duration={min_duration_str}) is out of bounds (min={min_duration_hours}h, max={max_duration_hours}h) or NULL. Setting duration to NULL." # noqa: E501 ) return None @@ -551,6 +558,6 @@ def _update_route_duration(connection, route_id, calculated_duration_in_days): ), {"duration": calculated_duration_in_days, "route_id": route_id}, ) - log.warn( + log.warning( f"Route {route_id}: Database updated with calculated_duration = {calculated_duration_in_days} days." # noqa: E501 ) diff --git a/c2corg_api/models/__init__.py b/c2corg_api/models/__init__.py index f3f8d4b5c..e6cb04924 100644 --- a/c2corg_api/models/__init__.py +++ b/c2corg_api/models/__init__.py @@ -24,6 +24,7 @@ class BaseMixin(object): # all models, for which tables should be created, must be listed here: from c2corg_api.models import document # noqa +from c2corg_api.models import coverage # noqa from c2corg_api.models import waypoint # noqa from c2corg_api.models import route # noqa from c2corg_api.models import document_history # noqa @@ -59,6 +60,7 @@ class BaseMixin(object): topo_map.MAP_TYPE: topo_map.TopoMap, area.AREA_TYPE: area.Area, outing.OUTING_TYPE: outing.Outing, + coverage.COVERAGE_TYPE: coverage.Coverage, } document_locale_types = { @@ -72,4 +74,5 @@ class BaseMixin(object): topo_map.MAP_TYPE: document.DocumentLocale, area.AREA_TYPE: document.DocumentLocale, outing.OUTING_TYPE: outing.OutingLocale, + coverage.COVERAGE_TYPE: document.DocumentLocale, } diff --git a/c2corg_api/models/common/attributes.py b/c2corg_api/models/common/attributes.py index ecf4dafca..6f21de152 100644 --- a/c2corg_api/models/common/attributes.py +++ b/c2corg_api/models/common/attributes.py @@ -785,3 +785,11 @@ 'highline', 'waterline' ] + +coverage_types = [ + 'fr-idf', + 'fr-ne', + 'fr-nw', + 'fr-se', + 'fr-sw' +] diff --git a/c2corg_api/models/common/document_types.py b/c2corg_api/models/common/document_types.py index d7fb2a854..3425dc919 100644 --- a/c2corg_api/models/common/document_types.py +++ b/c2corg_api/models/common/document_types.py @@ -9,6 +9,7 @@ WAYPOINT_TYPE = 'w' BOOK_TYPE = 'b' XREPORT_TYPE = 'x' +COVERAGE_TYPE = 'v' ALL = [ AREA_TYPE, ARTICLE_TYPE, IMAGE_TYPE, MAP_TYPE, OUTING_TYPE, ROUTE_TYPE, diff --git a/c2corg_api/models/common/fields_coverage.py b/c2corg_api/models/common/fields_coverage.py new file mode 100644 index 000000000..4aaacfee9 --- /dev/null +++ b/c2corg_api/models/common/fields_coverage.py @@ -0,0 +1,21 @@ +DEFAULT_FIELDS = [ + 'coverage_type' + 'geometry.geom_detail' +] + +DEFAULT_REQUIRED = [ + 'coverage_type', + 'geometry', + 'geometry.geom_detail' +] + +LISTING_FIELDS = [ + 'coverage_type', + 'geometry.geom_detail' +] + +fields_coverage = { + 'fields': DEFAULT_FIELDS, + 'required': DEFAULT_REQUIRED, + 'listing': LISTING_FIELDS +} diff --git a/c2corg_api/models/common/sortable_search_attributes.py b/c2corg_api/models/common/sortable_search_attributes.py index 946d0b97b..5b72aa0d7 100644 --- a/c2corg_api/models/common/sortable_search_attributes.py +++ b/c2corg_api/models/common/sortable_search_attributes.py @@ -348,3 +348,39 @@ 'slope_40_45': 3, 'slope_gt_45': 4 } + + +search_attr_by_field = { + 'quality': sortable_quality_types, + 'access_time': sortable_access_times, + 'paragliding_rating': sortable_paragliding_ratings, + 'durations': sortable_route_duration_types, + 'ski_rating': sortable_ski_ratings, + 'ski_exposition': sortable_exposition_ratings, + 'labande_ski_rating': sortable_labande_ski_ratings, + 'labande_global_rating': sortable_global_ratings, + 'global_rating': sortable_global_ratings, + 'engagement_rating': sortable_engagement_ratings, + 'risk_rating': sortable_risk_ratings, + 'equipment_rating': sortable_equipment_ratings, + 'ice_rating': sortable_ice_ratings, + 'mixed_rating': sortable_mixed_ratings, + 'exposition_rock_rating': sortable_exposition_rock_ratings, + 'rock_free_rating': sortable_climbing_ratings, + 'rock_required_rating': sortable_climbing_ratings, + 'aid_rating': sortable_aid_ratings, + 'via_ferrata_rating': sortable_via_ferrata_ratings, + 'hiking_rating': sortable_hiking_ratings, + 'hiking_mtb_exposition': sortable_exposition_ratings, + 'snowshoe_rating': sortable_snowshoe_ratings, + 'mtb_up_rating': sortable_mtb_up_ratings, + 'mtb_down_rating': sortable_mtb_down_ratings, + 'frequentation': sortable_frequentation_types, + 'condition_rating': sortable_condition_ratings, + 'snow_quality': sortable_snow_quality_ratings, + 'snow_quantity': sortable_snow_quality_ratings, + 'glacier_rating': sortable_glacier_ratings, + 'severity': sortable_severities, + 'avalanche_level': sortable_avalanche_levels, + 'avalanche_slope': sortable_avalanche_slopes +} diff --git a/c2corg_api/models/coverage.py b/c2corg_api/models/coverage.py new file mode 100644 index 000000000..17004c7e3 --- /dev/null +++ b/c2corg_api/models/coverage.py @@ -0,0 +1,96 @@ +from c2corg_api.models import Base, schema +from c2corg_api.models.enums import coverage_types +from c2corg_api.models.document import ( + schema_document_locale, + ArchiveDocument, + Document, + get_geometry_schema_overrides, + schema_attributes) +from c2corg_api.models.schema_utils import get_update_schema, \ + get_create_schema, restrict_schema +from c2corg_api.models.utils import copy_attributes +from c2corg_api.models.common.fields_coverage import fields_coverage +from colanderalchemy import SQLAlchemySchemaNode +from sqlalchemy import ( + Column, + Integer, + ForeignKey +) +from c2corg_api.models.common import document_types + +COVERAGE_TYPE = document_types.COVERAGE_TYPE + + +class _CoverageMixin(object): + coverage_type = Column(coverage_types) + + +attributes = ['coverage_type'] + + +class Coverage(_CoverageMixin, Document): + __tablename__ = 'coverages' + + document_id = Column( + Integer, + ForeignKey(schema + '.documents.document_id'), primary_key=True) + + __mapper_args__ = { + 'polymorphic_identity': COVERAGE_TYPE, + 'inherit_condition': Document.document_id == document_id + } + + def to_archive(self): + coverage = ArchiveCoverage() + super(Coverage, self)._to_archive(coverage) + copy_attributes(self, coverage, attributes) + + return coverage + + def update(self, other): + super(Coverage, self).update(other) + copy_attributes(other, self, attributes) + + +schema_coverage_locale = schema_document_locale +schema_coverage_attributes = list(schema_attributes) + + +class ArchiveCoverage(_CoverageMixin, ArchiveDocument): + """ + """ + __tablename__ = 'coverages_archives' + + id = Column( + Integer, + ForeignKey(schema + '.documents_archives.id'), primary_key=True) + + __mapper_args__ = { + 'polymorphic_identity': COVERAGE_TYPE, + 'inherit_condition': ArchiveDocument.id == id + } + + __table_args__ = Base.__table_args__ + + +schema_coverage = SQLAlchemySchemaNode( + Coverage, + # whitelisted attributes + includes=schema_coverage_attributes + attributes, + overrides={ + 'document_id': { + 'missing': None + }, + 'version': { + 'missing': None + }, + 'locales': { + 'children': [schema_coverage_locale] + }, + 'geometry': get_geometry_schema_overrides(['POLYGON']) + }) + +schema_create_coverage = get_create_schema(schema_coverage) +schema_update_coverage = get_update_schema(schema_coverage) +schema_listing_coverage = restrict_schema( + schema_coverage, fields_coverage.get('listing')) diff --git a/c2corg_api/models/enums.py b/c2corg_api/models/enums.py index e0eb5a6ac..b57469310 100644 --- a/c2corg_api/models/enums.py +++ b/c2corg_api/models/enums.py @@ -159,3 +159,5 @@ def enum(name, types): 'snow_quality_ratings', attributes.snow_quality_ratings) snow_quantity_ratings = enum( 'snow_quantity_ratings', attributes.snow_quantity_ratings) +coverage_types = enum( + 'coverage_types', attributes.coverage_types) diff --git a/c2corg_api/models/schema_utils.py b/c2corg_api/models/schema_utils.py index 8f30dcd26..3871df4d6 100644 --- a/c2corg_api/models/schema_utils.py +++ b/c2corg_api/models/schema_utils.py @@ -89,6 +89,8 @@ class SchemaAssociations(MappingSchema): Sequence(), SchemaAssociationDoc(), missing=None) outings = SchemaNode( Sequence(), SchemaAssociationDoc(), missing=None) + coverages = SchemaNode( + Sequence(), SchemaAssociationDoc(), missing=None) def get_create_schema(document_schema): diff --git a/c2corg_api/scripts/migration/documents/coverage.py b/c2corg_api/scripts/migration/documents/coverage.py new file mode 100644 index 000000000..aa53aa0ed --- /dev/null +++ b/c2corg_api/scripts/migration/documents/coverage.py @@ -0,0 +1,90 @@ +from c2corg_api.models import enums +from c2corg_api.models.coverage import ArchiveCoverage, Coverage, COVERAGE_TYPE +from c2corg_api.models.document import DocumentLocale, ArchiveDocumentLocale, \ + DOCUMENT_TYPE +from c2corg_api.scripts.migration.documents.document import MigrateDocuments, \ + DEFAULT_QUALITY + + +class MigrateCoverages(MigrateDocuments): + + def get_name(self): + return 'coverages' + + def get_model_document(self, locales): + return DocumentLocale if locales else Coverage + + def get_model_archive_document(self, locales): + return ArchiveDocumentLocale if locales else ArchiveCoverage + + def get_document_geometry(self, document_in, version): + return dict( + document_id=document_in.id, + id=document_in.id, + version=version, + geom_detail=document_in.geom + ) + + def get_count_query(self): + return ( + 'select count(*) ' + 'from app_coverages_archives aa join coverages a on aa.id = a.id ' + 'where a.redirects_to is null;' + ) + + def get_query(self): + return ( + 'select ' + ' aa.id, aa.document_archive_id, aa.is_latest_version, ' + ' aa.is_protected, aa.redirects_to, ' + ' ST_Force2D(ST_SetSRID(aa.geom, 3857)) geom, ' + ' aa.coverage_type ' + 'from app_coverages_archives aa join coverages a on aa.id = a.id ' + 'where a.redirects_to is null ' + 'order by aa.id, aa.document_archive_id;' + ) + + def get_count_query_locales(self): + return ( + 'select count(*) ' + 'from app_coverages_i18n_archives aa ' + 'join coverages a on aa.id = a.id ' + 'where a.redirects_to is null;' + ) + + def get_query_locales(self): + return ( + 'select ' + ' aa.id, aa.document_i18n_archive_id, aa.is_latest_version, ' + ' aa.culture, aa.name, aa.description ' + 'from app_coverages_i18n_archives aa ' + 'join coverages a on aa.id = a.id ' + 'where a.redirects_to is null ' + 'order by aa.id, aa.culture, aa.document_i18n_archive_id;' + ) + + def get_document(self, document_in, version): + return dict( + document_id=document_in.id, + type=COVERAGE_TYPE, + version=version, + protected=document_in.is_protected, + redirects_to=document_in.redirects_to, + coverage_type=self.convert_type( + document_in.coverage_type, enums.coverage_types), + quality=DEFAULT_QUALITY + ) + + def get_document_locale(self, document_in, version): + description = self.convert_tags(document_in.description) + description, summary = self.extract_summary(description) + return dict( + document_id=document_in.id, + id=document_in.document_i18n_archive_id, + type=DOCUMENT_TYPE, + version=version, + lang=document_in.culture, + title=document_in.name, + description=description, + summary=summary + ) diff --git a/c2corg_api/scripts/migration/migrate.py b/c2corg_api/scripts/migration/migrate.py index 818a03295..9c8b2bdd3 100644 --- a/c2corg_api/scripts/migration/migrate.py +++ b/c2corg_api/scripts/migration/migrate.py @@ -6,6 +6,7 @@ MigrateAreaAssociations from c2corg_api.scripts.migration.climbing_site_routes import \ CreateClimbingSiteRoutes +from c2corg_api.scripts.migration.documents.coverage import MigrateCoverages from c2corg_api.scripts.migration.documents.xreports import MigrateXreports from c2corg_api.scripts.migration.documents.area import MigrateAreas from c2corg_api.scripts.migration.documents.associations import \ @@ -97,6 +98,7 @@ def main(argv=sys.argv): MigrateBooks(connection_source, session, batch_size).migrate() MigrateVersions(connection_source, session, batch_size).migrate() MigrateAssociations(connection_source, session, batch_size).migrate() + MigrateCoverages(connection_source, session, batch_size).migrate() CreateClimbingSiteRoutes(connection_source, session, batch_size).migrate() SetRouteTitlePrefix(connection_source, session, batch_size).migrate() SetDefaultGeometries(connection_source, session, batch_size).migrate() diff --git a/c2corg_api/search/__init__.py b/c2corg_api/search/__init__.py index 8b2186bc6..7b81cf5d1 100644 --- a/c2corg_api/search/__init__.py +++ b/c2corg_api/search/__init__.py @@ -9,6 +9,7 @@ from c2corg_api.models.user_profile import USERPROFILE_TYPE from c2corg_api.models.waypoint import WAYPOINT_TYPE from c2corg_api.models.xreport import XREPORT_TYPE +from c2corg_api.models.coverage import COVERAGE_TYPE from c2corg_api.search.mappings.area_mapping import SearchArea from c2corg_api.search.mappings.article_mapping import SearchArticle from c2corg_api.search.mappings.book_mapping import SearchBook @@ -19,6 +20,7 @@ from c2corg_api.search.mappings.user_mapping import SearchUser from c2corg_api.search.mappings.waypoint_mapping import SearchWaypoint from c2corg_api.search.mappings.xreport_mapping import SearchXreport +from c2corg_api.search.mappings.coverage_mapping import SearchCoverage from elasticsearch import Elasticsearch from elasticsearch_dsl import Search from elasticsearch_dsl.connections import connections @@ -114,5 +116,6 @@ def get_text_query_on_title(search_term, search_lang=None): ROUTE_TYPE: SearchRoute, MAP_TYPE: SearchTopoMap, USERPROFILE_TYPE: SearchUser, - WAYPOINT_TYPE: SearchWaypoint + WAYPOINT_TYPE: SearchWaypoint, + COVERAGE_TYPE: SearchCoverage, } diff --git a/c2corg_api/search/mappings/coverage_mapping.py b/c2corg_api/search/mappings/coverage_mapping.py new file mode 100644 index 000000000..2e3a1af04 --- /dev/null +++ b/c2corg_api/search/mappings/coverage_mapping.py @@ -0,0 +1,29 @@ +from c2corg_api.models.coverage import COVERAGE_TYPE, Coverage +from c2corg_api.search.mapping import SearchDocument, BaseMeta +from c2corg_api.search.mapping_types import QueryableMixin, QEnum + + +class SearchCoverage(SearchDocument): + class Meta(BaseMeta): + doc_type = COVERAGE_TYPE + + coverage_type = QEnum('ctyp', model_field=Coverage.coverage_type) + + FIELDS = ['coverage_type'] + + @staticmethod + def to_search_document(document, index): + search_document = SearchDocument.to_search_document( + document, index, include_areas=False) + + if document.redirects_to: + return search_document + + SearchDocument.copy_fields( + search_document, document, SearchCoverage.FIELDS) + + return search_document + + +SearchCoverage.queryable_fields = \ + QueryableMixin.get_queryable_fields(SearchCoverage) diff --git a/c2corg_api/search/utils.py b/c2corg_api/search/utils.py index e06b27a6c..681e80c10 100644 --- a/c2corg_api/search/utils.py +++ b/c2corg_api/search/utils.py @@ -1,4 +1,13 @@ +import logging +from operator import and_, or_ import re +from shapely.geometry import Polygon +from geoalchemy2.shape import from_shape +from sqlalchemy import nullslast +from geoalchemy2.functions import ST_Intersects, ST_Transform +from c2corg_api.models.utils import ArrayOfEnum + +log = logging.getLogger(__name__) BBCODE_TAGS = [ 'b', 'i', 'u', 's', 'q', 'c', 'sup', 'ind', 'url', 'email', 'acr(onym)?', @@ -31,3 +40,279 @@ def strip_bbcodes(s): def get_title(title, title_prefix): return title_prefix + ' : ' + title if title_prefix else title + + +def build_sqlalchemy_filters( + search_dict, # elastic search dict + document_model, # the model (waypoint, routes, etc...) + # for multicriteria search (ex : searching a waypoint by area id) + filter_map, + # the Geometry model (where ce access to geometry) + geometry_model, # most likely always DocumentGeometry + # the mapper for range enum + range_enum_map, # most likely always search_attr_by_field + # the column for the title + # (ex: Waypoint -> title, Route -> title and title_prefix) + title_columns=None +): + """ + Build SQLAlchemy filter for documents (Waypoint, Route, etc.) + based on filters that would normally be used by ElasticSearch + + this can then be used to filter directly in a DB query + + Usage Example : + + search = build_query(params, meta_params, WAYPOINT_TYPE) + + search_dict = search.to_dict() + + filter_conditions, sort_expressions, needs_locale_join, langs = + build_sqlalchemy_filters( + search_dict=search_dict, + document_model=Waypoint, + filter_map={"areas": Area,}, + geometry_model=DocumentGeometry, + range_enum_map=search_attr_by_field, + title_columns=[DocumentLocale.title] + ) + + query = DBSession.query(Waypoint) + .filter(filter_conditions) + .order_by(*sort_expressions) + + """ + + filters = search_dict.get("query", {}).get("bool", {}).get("filter", []) + if len(filters) > 0: + must_list = search_dict.get("query", {}).get("bool", {}).get("must", []) # noqa + else: + must_list = [search_dict.get("query", {})] + + filter_conditions = [] + needs_locale_join = False + langs = [] + + # corresponds to the elastic search ?q= which looks for title + # use title_columns to specify in which columns to look for + query_value = None + for item in must_list: + mm = item.get("multi_match") + if mm: + query_value = mm.get("query") + break + + if query_value and title_columns: + needs_locale_join = True + like_clauses = [col.ilike(f"%{query_value}%") for col in title_columns] + if len(like_clauses) == 2: + filter_conditions.append(or_(*like_clauses)) + elif len(like_clauses) == 1: + filter_conditions.append(like_clauses[0]) + + # loop over all elastic search filters + for f in filters: + for filter_key, param in f.items(): + + for param_key, param_value in param.items(): + + # available_locales to get langs + if param_key == "available_locales": + langs = param_value if isinstance( + param_value, list) else [param_value] + + # geometry-based filtering -> bbox + elif param_key == "geom": + col = getattr(geometry_model, "geom") + polygon = Polygon([ + (param_value["left"], param_value["bottom"]), + (param_value["right"], param_value["bottom"]), + (param_value["right"], param_value["top"]), + (param_value["left"], param_value["top"]), + (param_value["left"], param_value["bottom"]), + ]) + polygon_wkb = from_shape(polygon, srid=4326) + filter_conditions.append(ST_Intersects( + ST_Transform(col, 4326), polygon_wkb)) + + # special cases of documents associated to other doc + elif param_key in filter_map: + col = getattr(filter_map[param_key], "document_id") + if isinstance(param_value, list): + checks = [col == v for v in param_value] + if checks: + or_expr = checks[0] + for check in checks[1:]: + or_expr = or_expr | check + filter_conditions.append(or_expr) + else: + filter_conditions.append(col == param_value) + + # generic attribute filters on the document model + else: + col = getattr(document_model, param_key) + column = col.property.columns[0] + col_type = column.type + + # for range attributes + if filter_key == "range": + filter_conditions.append( + build_range_expression( + col, + param_value, + range_enum_map.get(param_key), + col_type + ) + ) + + # for terms + elif filter_key == "terms": + values = param_value if isinstance( + param_value, (list, tuple)) else [param_value] + filter_conditions.append( + build_terms_expression(col, values, col_type) + ) + + # for term + elif filter_key == "term": + filter_conditions.append( + build_term_expression(col, param_value, col_type) + ) + + else: + continue + + # combine and conditions + final_filter = combine_conditions(filter_conditions) + + # build sort expressions + sort_expressions = build_sort_expressions( + search_dict.get("sort", []), document_model + ) + + # return each valuable variable to be used later in a sql alchemy query + return final_filter, sort_expressions, needs_locale_join, langs + + +def build_range_expression(col, param_value, enum_map, col_type): + """ + build sql alchemy filter for range expressions + """ + gte = param_value.get("gte") + lte = param_value.get("lte") + + # ENUM RANGE (enum_map: value -> number) + if enum_map: + values = [] + if gte is not None and lte is not None: + if gte == lte: + values = [val for val, num in enum_map.items() if num == gte] + else: + values = [val for val, num in enum_map.items() if num >= + gte and num <= lte] + elif gte is not None: + values = [val for val, num in enum_map.items() if num >= gte] + elif lte is not None: + values = [val for val, num in enum_map.items() if num <= lte] + + # if col type is an array of enum + if isinstance(col_type, ArrayOfEnum): + checks = [col.any(v) for v in values] + else: + checks = [col == v for v in values] + + if not checks: + return False + + # build OR by folding with | + or_expr = checks[0] + for check in checks[1:]: + or_expr = or_expr | check + return or_expr + + # NUMERIC RANGE + clauses = [] + if gte is not None: + clauses.append(col >= gte) + if lte is not None: + clauses.append(col <= lte) + if not clauses: + return False + return and_(*clauses) + + +def build_terms_expression(col, values, col_type): + """ + build sql alchemy filter for terms expressions + """ + # normalize values to list/tuple + values = values if isinstance(values, (list, tuple)) else [values] + if not values: + return True + + if isinstance(col_type, ArrayOfEnum): + checks = [col.any(v) for v in values] + if not checks: + return True + # build OR by folding with | + or_expr = checks[0] + for check in checks[1:]: + or_expr = or_expr | check + return or_expr + + # non-array enum + if len(values) == 1: + return col == values[0] + return col.in_(values) + + +def build_term_expression(col, value, col_type): + """ + build sql alchemy filter for term expressions + """ + if isinstance(col_type, ArrayOfEnum): + return col.any(value) + return col == value + + +def combine_conditions(conditions): + """ + useful functions to combine conditions to later apply them in a .filter + """ + if not conditions: + return True + if len(conditions) == 1: + return conditions[0] + expr = conditions[0] + for c in conditions[1:]: + expr = expr & c + return expr + + +def build_sort_expressions(sort_config, document_model): + """ + build sql alchemy sort expressions + """ + sort_expressions = [] + + for sort in sort_config: + if sort == "undefined": + continue + + # DESC + if hasattr(sort, "items"): + for attr, order in sort.items(): + col = ( + getattr(document_model, "document_id") + if attr == "id" else getattr(document_model, attr) + ) + sort_expressions.append( + nullslast(col.desc() if order == "desc" else col.asc()) + ) + + # ASC + else: + col = getattr(document_model, sort) + sort_expressions.append(nullslast(col.asc())) + + return sort_expressions diff --git a/c2corg_api/views/coverage.py b/c2corg_api/views/coverage.py new file mode 100644 index 000000000..407edcd35 --- /dev/null +++ b/c2corg_api/views/coverage.py @@ -0,0 +1,142 @@ + +from c2corg_api.views.validation import validate_associations, \ + validate_pagination, validate_preferred_lang_param +from c2corg_api.views import cors_policy, restricted_json_view +from cornice.resource import resource, view +from cornice.validators import colander_body_validator +from shapely.geometry import Point, shape +from c2corg_api.views.document import DocumentRest, make_validator_create, \ + make_validator_update +from c2corg_api.views.validation import validate_cook_param, validate_id, \ + validate_lang_param +from c2corg_api.views.document_schemas import coverage_documents_config +from c2corg_api.models.utils import wkb_to_shape +import functools +import json +import logging +from c2corg_api.models import DBSession +from c2corg_api.models.common.fields_coverage import fields_coverage +from c2corg_api.models.coverage import COVERAGE_TYPE, Coverage, \ + schema_coverage, schema_create_coverage, schema_update_coverage + + +log = logging.getLogger(__name__) + +validate_coverage_create = make_validator_create( + fields_coverage.get('required')) +validate_coverage_update = make_validator_update( + fields_coverage.get('required')) +validate_associations_create = functools.partial( + validate_associations, COVERAGE_TYPE, True) +validate_associations_update = functools.partial( + validate_associations, COVERAGE_TYPE, False) + + +@resource(collection_path='/coverages', path='/coverages/{id}', + cors_policy=cors_policy) +class CoverageRest(DocumentRest): + + def __init__(self, request, context=None): + self.request = request + + @view(validators=[validate_pagination, validate_preferred_lang_param]) + def collection_get(self): + return self._collection_get(COVERAGE_TYPE, coverage_documents_config) + + @view(validators=[validate_id, validate_lang_param, validate_cook_param]) + def get(self): + return self._get( + coverage_documents_config, schema_coverage, include_areas=False) + + @restricted_json_view( + schema=schema_create_coverage, + validators=[ + colander_body_validator, + validate_coverage_create, + validate_associations_create]) + def collection_post(self): + return self._collection_post(schema_coverage, allow_anonymous=False) + + @restricted_json_view( + schema=schema_update_coverage, + validators=[ + colander_body_validator, + validate_id, + validate_coverage_update, + validate_associations_update]) + def put(self): + return self._put(Coverage, schema_coverage) + + +@resource(path='/getcoverage', cors_policy=cors_policy) +class WaypointCoverageRest(DocumentRest): + + def __init__(self, request, context=None): + self.request = request + + @view(validators=[]) + def get(self): + """Returns the coverage from a longitude and a latitude""" + + lon = float(self.request.GET['lon']) + lat = float(self.request.GET['lat']) + + return get_coverage(lon, lat) + + +@resource(path='/getpolygoncoverage', cors_policy=cors_policy) +class PolygonCoverage(DocumentRest): + + def __init__(self, request, context=None): + self.request = request + + @view(validators=[]) + def post(self): + """Returns the coverages from a geom_detail type polygon + (geom_detail has to be EPSG 4326 since isochrone is 4326)""" + geom_detail = json.loads( + (json.loads(self.request.body)['geom_detail'])) + polygon = shape(geom_detail) + return get_coverages(polygon) + + +def get_coverage(lon, lat): + """get the coverage that contains a point(lon, lat)""" + pt = Point(lon, lat) + + coverage_found = None + + coverages = DBSession.query(Coverage).all() + + for coverage in coverages: + geom = coverage.geometry.geom_detail + + # convert WKB → Shapely polygon + poly = wkb_to_shape(geom) + + if poly.contains(pt): + coverage_found = coverage + break + + if (coverage_found): + return coverage_found.coverage_type + else: + return None + + +def get_coverages(polygon): + """get all the coverages that intersects a polygon""" + coverage_found = [] + + coverages = DBSession.query(Coverage).all() + + for coverage in coverages: + geom = coverage.geometry.geom_detail + + # convert WKB → Shapely polygon + poly = wkb_to_shape(geom) + + if poly.contains(polygon) or poly.intersects(polygon): + coverage_found.append(coverage.coverage_type) + + return coverage_found diff --git a/c2corg_api/views/document_delete.py b/c2corg_api/views/document_delete.py index 6d557f6df..107b42710 100644 --- a/c2corg_api/views/document_delete.py +++ b/c2corg_api/views/document_delete.py @@ -1,3 +1,4 @@ +from c2corg_api.models.coverage import COVERAGE_TYPE, ArchiveCoverage, Coverage from c2corg_api.security.acl import ACLDefault from c2corg_api.models import DBSession, article, image from c2corg_api.models.area_association import AreaAssociation @@ -352,6 +353,8 @@ def _get_models(document_type): return Article, None, ArchiveArticle, None if document_type == BOOK_TYPE: return Book, None, ArchiveBook, None + if document_type == COVERAGE_TYPE: + return Coverage, None, ArchiveCoverage, None if document_type == XREPORT_TYPE: return Xreport, XreportLocale, ArchiveXreport, ArchiveXreportLocale assert False diff --git a/c2corg_api/views/document_schemas.py b/c2corg_api/views/document_schemas.py index db08b12b9..90a842736 100644 --- a/c2corg_api/views/document_schemas.py +++ b/c2corg_api/views/document_schemas.py @@ -2,6 +2,8 @@ from c2corg_api.models.article import ARTICLE_TYPE, Article, \ schema_listing_article from c2corg_api.models.book import BOOK_TYPE, Book, schema_listing_book +from c2corg_api.models.coverage import COVERAGE_TYPE, Coverage, \ + schema_listing_coverage from c2corg_api.models.image import IMAGE_TYPE, Image, schema_listing_image from c2corg_api.models.outing import OUTING_TYPE, Outing, schema_outing from c2corg_api.models.xreport import XREPORT_TYPE, Xreport, \ @@ -27,6 +29,7 @@ from c2corg_api.models.common.fields_image import fields_image from c2corg_api.models.common.fields_topo_map import fields_topo_map from c2corg_api.models.common.fields_user_profile import fields_user_profile +from c2corg_api.models.common.fields_coverage import fields_coverage from functools import lru_cache @@ -253,6 +256,10 @@ def adapt_waypoint_schema_for_type(waypoint_type, field_list_type): WAYPOINT_TYPE, Waypoint, schema_waypoint, clazz_locale=WaypointLocale, fields=fields_waypoint, adapt_schema=waypoint_listing_schema_adaptor) +# coverages +coverage_documents_config = GetDocumentsConfig( + COVERAGE_TYPE, Coverage, schema_listing_coverage, + listing_fields=fields_coverage['listing']) document_configs = { WAYPOINT_TYPE: waypoint_documents_config, @@ -264,5 +271,6 @@ def adapt_waypoint_schema_for_type(waypoint_type, field_list_type): XREPORT_TYPE: xreport_documents_config, MAP_TYPE: topo_map_documents_config, ARTICLE_TYPE: article_documents_config, - USERPROFILE_TYPE: user_profile_documents_config + USERPROFILE_TYPE: user_profile_documents_config, + COVERAGE_TYPE: coverage_documents_config } diff --git a/c2corg_api/views/navitia.py b/c2corg_api/views/navitia.py index aaa44f7bd..692cc7c87 100644 --- a/c2corg_api/views/navitia.py +++ b/c2corg_api/views/navitia.py @@ -1,8 +1,43 @@ +import json +import logging import os import requests +import redis +import uuid +import time +import threading +import ast +from c2corg_api.models import DBSession +from c2corg_api.models.area import Area +from c2corg_api.models.utils import wkb_to_shape +from c2corg_api.models.waypoint import Waypoint, schema_waypoint +from c2corg_api.views.coverage import get_coverage +from c2corg_api.views.document import LIMIT_DEFAULT +from c2corg_api.views.waypoint import build_reachable_waypoints_query +from c2corg_api.views.route import build_reachable_route_query_with_waypoints +from shapely.geometry import Point from pyramid.httpexceptions import HTTPBadRequest, HTTPInternalServerError # noqa: E501 +from pyramid.response import Response from cornice.resource import resource, view -from c2corg_api.views import cors_policy +from c2corg_api.views import cors_policy, to_json_dict +from c2corg_api.models.route import schema_route +from c2corg_api.models.area import schema_listing_area +from shapely.geometry import shape +from pyproj import Transformer + + +log = logging.getLogger(__name__) + +# When editing these constants, make sure to edit them in the front too +# (itinevert-service) +MAX_ROUTE_THRESHOLD = 50 +MAX_TRIP_DURATION = 240 +MIN_TRIP_DURATION = 20 + +# redis to store job's value (progress, result, error...) +REDIS_HOST = "redis" +REDIS_PORT = 6379 +REDIS_DB = 0 def validate_navitia_params(request, **kwargs): @@ -105,3 +140,787 @@ def get(self): raise HTTPInternalServerError(f'Network error: {str(e)}') except Exception as e: raise HTTPInternalServerError(f'Internal error: {str(e)}') + + +def validate_journey_reachable_params(request, **kwargs): + """Validates the required parameters for the journey reachable doc route""" + required_params = ['from', 'datetime', 'datetime_represents', + 'walking_speed', 'max_walking_duration_to_pt'] + + for param in required_params: + if param not in request.params: + request.errors.add( + 'querystring', + param, + f'Paramètre {param} requis') + + +@resource(path='/navitia/journeyreachableroutes/start', cors_policy=cors_policy) # noqa +class StartNavitiaJourneyReachableRoutesRest: + def __init__(self, request, context=None): + self.request = request + + @view(validators=[validate_journey_reachable_params]) + def get(self): + """ + start job to retrieve journey reachable routes + returns job id + """ + return start_job_background(compute_journey_reachable_routes, self.request) # noqa + + +@resource(path='/navitia/journeyreachablewaypoints/start', cors_policy=cors_policy) # noqa +class StartNavitiaJourneyReachableWaypointsRest: + def __init__(self, request, context=None): + self.request = request + + @view(validators=[validate_journey_reachable_params]) + def get(self): + """ + start job to retrieve journey reachable waypoints + returns job id + """ + return start_job_background(compute_journey_reachable_waypoints, self.request) # noqa + + +@resource(path='/navitia/journeyreachableroutes/result/{job_id}', cors_policy=cors_policy) # noqa +class NavitiaJourneyReachableRoutesResultRest: + def __init__(self, request, context=None): + self.request = request + + @view() + def get(self): + """ + get the result of the job : get journey reachable routes + returns the result + """ + r = redis_client() + job_id = self.request.matchdict.get("job_id") + return read_result_from_redis(r, job_id) + + +@resource(path='/navitia/journeyreachablewaypoints/result/{job_id}', cors_policy=cors_policy) # noqa +class NavitiaJourneyReachableWaypointsResultRest: + def __init__(self, request, context=None): + self.request = request + + @view() + def get(self): + """ + get the result of the job : get journey reachable waypoints + returns the result + """ + r = redis_client() + job_id = self.request.matchdict.get("job_id") + return read_result_from_redis(r, job_id) + +# Progress endpoints + + +@resource(path='/navitia/journeyreachableroutes/progress/{job_id}', cors_policy=cors_policy) # noqa +class NavitiaJourneyReachableRoutesProgressRest: + def __init__(self, request, context=None): + self.request = request + + @view() + def get(self): + """ + monitor progress of job id for journey reachable routes + """ + r = redis_client() + job_id = self.request.matchdict.get("job_id") + return Response(app_iter=progress_stream(r, job_id), content_type="text/event-stream") # noqa + + +@resource(path='/navitia/journeyreachablewaypoints/progress/{job_id}', cors_policy=cors_policy) # noqa +class NavitiaJourneyReachableWaypointsProgressRest: + def __init__(self, request, context=None): + self.request = request + + @view() + def get(self): + """ + monitor progress of job id for journey reachable waypoints + """ + r = redis_client() + job_id = self.request.matchdict.get("job_id") + return Response(app_iter=progress_stream(r, job_id), content_type="text/event-stream") # noqa + + +def compute_journey_reachable_routes(job_id, request): + """ + Get all waypoints matching filters in params, that are reachable + (means there exists a Navitia journey for at least one of + their waypoints of type access). + + NOTE : the number of routes after applying filters, + has to be < MAX_ROUTE_THRESHOLD, + to reduce number of queries towards Navitia journey API + + the result can be found inside redis + """ + r = redis_client() + try: + meta_params = extract_meta_params(request) + journey_params = extract_journey_params(request) + query = build_reachable_route_query_with_waypoints( + request.GET, meta_params) + results = query.all() + + if len(results) > MAX_ROUTE_THRESHOLD: + raise HTTPBadRequest( + "Couldn't proceed with computation : Too much routes found.") + + areas_map = collect_areas_from_results(results, 1) + wp_objects = collect_waypoints_from_results(results) + + total = len(wp_objects) + log.info("Number of NAVITIA journey queries : %d", total) + r.set(f"job:{job_id}:total", total) + + count = found = not_found = 0 + navitia_wp_map = {} + + for wp in wp_objects: + result = is_wp_journey_reachable( + to_json_dict(wp, schema_waypoint), journey_params) + navitia_wp_map[wp.document_id] = result + count += 1 + if result: + found += 1 + else: + not_found += 1 + _store_job_progress(r, job_id, count, found, not_found) + + routes = [] + for route, areas, waypoints in results: + journey_exists = any(navitia_wp_map.get( + wp.get("document_id")) for wp in waypoints) + if not journey_exists: + continue + json_areas = [] + for area in (areas or []): + area_obj = areas_map.get(area.get("document_id")) + if area_obj: + json_areas.append(to_json_dict( + area_obj, schema_listing_area)) + route.areas = json_areas + routes.append(to_json_dict(route, schema_route, True)) + + r.set(f"job:{job_id}:result", json.dumps( + {'documents': routes, 'total': len(routes)})) + r.set(f"job:{job_id}:status", "done") + except Exception as exc: + log.exception(str(exc)) + r.set(f"job:{job_id}:status", "error") + r.set(f"job:{job_id}:error", str(exc)) + + +def compute_journey_reachable_waypoints(job_id, request): + """ + Get all routes matching filters in params, that are reachable + (means there exists a Navitia journey for at least one of + their waypoints of type access). + + NOTE : the waypoints have to be filtered by one area (and not more) + to reduce number of request towards Navitia Journey API + + the result can be found inside redis + """ + r = redis_client() + try: + meta_params = extract_meta_params(request) + journey_params = extract_journey_params(request) + + # Ensure areas filter is provided and normalized + areas_param = None + try: + areas_param = request.GET['a'] + if isinstance(areas_param, str): + areas_list = areas_param.split(",") + else: + areas_list = list(areas_param) + except Exception: + areas_list = None + + if areas_list is None: + raise HTTPBadRequest('Missing filter : area is required') + if len(areas_list) > 1: + raise HTTPBadRequest('Only one filtering area is allowed') + + query = build_reachable_waypoints_query(request.GET, meta_params) + results = query.all() + + areas_map = collect_areas_from_results(results, 1) + + total = len(results) + log.info("Number of NAVITIA journey queries : %d", total) + r.set(f"job:{job_id}:total", total) + + count = found = not_found = 0 + waypoints = [] + + for waypoint, areas in results: + count += 1 + r.publish(f"job:{job_id}:events", f"not_found:{not_found}") + reachable = is_wp_journey_reachable(to_json_dict( + waypoint, schema_waypoint), journey_params) + if reachable: + found += 1 + json_areas = [] + for area in (areas or []): + area_obj = areas_map.get(area.get("document_id")) + if area_obj: + json_areas.append(to_json_dict( + area_obj, schema_listing_area)) + waypoint.areas = json_areas + waypoints.append(to_json_dict(waypoint, schema_waypoint, True)) + else: + not_found += 1 + _store_job_progress(r, job_id, count, found, not_found) + + r.set(f"job:{job_id}:result", json.dumps( + {'documents': waypoints, 'total': len(waypoints)})) + r.set(f"job:{job_id}:status", "done") + except Exception as exc: + log.exception("Error computing reachable waypoints") + r.set(f"job:{job_id}:status", "error") + r.set(f"job:{job_id}:error", str(exc)) + + +def validate_isochrone_reachable_params(request, **kwargs): + """Validates the required parameters + for the isochrone reachable doc route""" + required_params = ['from', 'datetime', + 'datetime_represents', 'boundary_duration'] + + for param in required_params: + if param not in request.params: + request.errors.add( + 'querystring', + param, + f'Paramètre {param} requis') + + +@resource(path='/navitia/isochronesreachableroutes', cors_policy=cors_policy) +class NavitiaIsochronesReachableRoutesRest: + def __init__(self, request, context=None): + self.request = request + + @view(validators=[validate_isochrone_reachable_params]) + def get(self): + """ + Get all routes matching filters in params, that have at least + one waypoint of type access that is inside the isochron. + The isochron is created by querying navitia api + with specific parameters, see validate_isochrone_reachable_params func + """ + try: + meta_params = extract_meta_params(self.request) + + isochrone_params = extract_isochrone_params(self.request) + + query = build_reachable_route_query_with_waypoints( + self.request.GET, meta_params) + + results = query.all() + + areas_map = collect_areas_from_results(results, 1) + + response = get_navitia_isochrone(isochrone_params) + + routes = [] + geojson = "" + # if isochrone found + if (len(response["isochrones"]) > 0): + geojson = response["isochrones"][0]["geojson"] + isochrone_geom = shape(geojson) + + wp_objects = collect_waypoints_from_results(results) + + navitia_wp_map = {wp.document_id: is_wp_in_isochrone( + to_json_dict(wp, schema_waypoint), isochrone_geom + ) for wp in wp_objects} + + for route, areas, waypoints in results: + # check if a journey exists for route + # (at least one wp has a journey associated) + one_wp_in_isochrone = False + for wp in waypoints: + wp_id = wp.get("document_id") + one_wp_in_isochrone |= navitia_wp_map.get(wp_id) + + if one_wp_in_isochrone: + json_areas = [] + + if areas is None: + areas = [] + + for area in areas: + area_obj = areas_map.get(area.get("document_id")) + if area_obj: + json_areas.append(to_json_dict( + area_obj, schema_listing_area)) + + # assign JSON areas to the waypoint + route.areas = json_areas + route_dict = to_json_dict(route, schema_route, True) + routes.append(route_dict) + + return { + 'documents': routes, + 'total': len(routes), + 'isochron_geom': geojson + } + except Exception as e: + return json.dumps(ast.literal_eval(str(e))) + + +@resource( + path='/navitia/isochronesreachablewaypoints', + cors_policy=cors_policy +) +class NavitiaIsochronesReachableWaypointsRest: + def __init__(self, request, context=None): + self.request = request + + @view(validators=[validate_isochrone_reachable_params]) + def get(self): + """ + Get all waypoints matching filters in params, + that are inside the isochron. + The isochron is created by querying navitia api + with specific parameters, see validate_isochrone_reachable_params func + """ + try: + + meta_params = extract_meta_params(self.request) + + isochrone_params = extract_isochrone_params(self.request) + + query = build_reachable_waypoints_query( + self.request.GET, meta_params) + + results = query.all() + + # manage areas for waypoints + areas_map = collect_areas_from_results(results, 1) + + response = get_navitia_isochrone(isochrone_params) + + waypoints = [] + geojson = "" + # if isochrone found + if (len(response["isochrones"]) > 0): + geojson = response["isochrones"][0]["geojson"] + isochrone_geom = shape(geojson) + + for waypoint, areas in results: + # check if wp is in isochrone + if is_wp_in_isochrone( + to_json_dict(waypoint, schema_waypoint), isochrone_geom + ): + json_areas = [] + if areas is None: + areas = [] + + for area in areas: + area_obj = areas_map.get(area.get("document_id")) + if area_obj: + json_areas.append(to_json_dict( + area_obj, schema_listing_area)) + + # assign JSON areas to the waypoint + waypoint.areas = json_areas + wp_dict = to_json_dict(waypoint, schema_waypoint, True) + waypoints.append(wp_dict) + + return { + 'documents': waypoints, + 'total': len(waypoints), + "isochron_geom": geojson + } + except Exception as e: + return json.dumps(ast.literal_eval(str(e))) + + +@resource(path='/navitia/areainisochrone', cors_policy=cors_policy) +class AreaInIsochroneRest: + def __init__(self, request, context=None): + self.request = request + + @view(validators=[]) + def post(self): + """ + returns all areas that are inside + or that intersects an isochrone geometry + + make sure the geom_detail in body is epsg:3857 + """ + polygon = shape(json.loads(json.loads( + self.request.body)['geom_detail'])) + + query = ( + DBSession.query(Area).filter(Area.area_type == 'range') + ) + + results = query.all() + + areas = [] + + for area in results: + if (polygon.intersects(wkb_to_shape(area.geometry.geom_detail))): + areas.append(area.document_id) + + return areas + + +def is_wp_journey_reachable(waypoint, journey_params): + """ + Query the navitia Journey api and returns true + if the waypoint is reachable (at least one journey has been found) + NOTE : the journey's departure time has to be + the same day as the datetime's day in journey_params + """ + # enhance journey params with the 'to' parameter, + # from the waypoint geometry. + geom = shape(json.loads(waypoint.get("geometry").get("geom"))) + + src_epsg = 3857 + transformer = Transformer.from_crs( + f"EPSG:{src_epsg}", "EPSG:4326", always_xy=True) + lon, lat = transformer.transform(geom.x, geom.y) + + journey_params['to'] = f"{lon};{lat}" + + destination_coverage = get_coverage(lon, lat) + + try: + # Récupération de la clé API depuis les variables d'environnement + api_key = os.getenv('NAVITIA_API_KEY') + if not api_key: + raise HTTPInternalServerError( + 'Configuration API Navitia manquante') + + response = {} + + if (destination_coverage): + # Appel à l'API Navitia Journey with coverage + response = requests.get( + f'https://api.navitia.io/v1/coverage/{destination_coverage}/journeys', # noqa: E501 + params=journey_params, + headers={'Authorization': api_key}, + timeout=30 + ) + else: + # Appel à l'API Navitia Journey + response = requests.get( + 'https://api.navitia.io/v1/journeys', + params=journey_params, + headers={'Authorization': api_key}, + timeout=30 + ) + + # Vérification du statut de la réponse + if response.status_code == 401: + raise HTTPInternalServerError('Authentication error with Navitia API') # noqa + elif response.status_code == 400: + raise HTTPBadRequest('Invalid parameters for Navitia API') + elif response.status_code == 404: + # no_destination -> public transport not reachable from destination + # no_origin -> public transport not reachable from origin + # these do not count as proper errors, + # more like the wp is just not reachable + if response.json()['error']['id'] != 'no_destination' and \ + response.json()['error']['id'] != 'no_origin': + raise HTTPInternalServerError(response.json()['error']) + return False + elif not response.ok: + raise HTTPInternalServerError(f'Navitia API error: {response.status_code}') # noqa: E501 + else: + # code 200 OK + # make sure the waypoint is reachable if at least one journey's + # departure date time is the same day as the day in journey_params + for journey in response.json().get('journeys', []): + journey_day = int(journey['departure_date_time'][6:8]) + param_day = int(journey_params['datetime'][6:8]) + if journey_day == param_day: + return True + + return False + + except requests.exceptions.Timeout: + raise HTTPInternalServerError( + 'Timeout when calling the Navitia API') + except requests.exceptions.RequestException as e: + raise HTTPInternalServerError(f'{str(e)}') + except Exception as e: + raise HTTPInternalServerError(f'{str(e)}') + + +def get_navitia_isochrone(isochrone_params): + """ + Query the navitia Isochrones api, and returns the isochrone object + """ + lon = isochrone_params.get("from").split(";")[0] + lat = isochrone_params.get("from").split(";")[1] + source_coverage = get_coverage(lon, lat) + + try: + # Récupération de la clé API depuis les variables d'environnement + api_key = os.getenv('NAVITIA_API_KEY') + if not api_key: + raise HTTPInternalServerError( + 'Configuration API Navitia manquante') + + response = {} + + if (source_coverage): + # Appel à l'API Navitia Journey with coverage + response = requests.get( + f'https://api.navitia.io/v1/coverage/{source_coverage}/isochrones', # noqa: E501 + params=isochrone_params, + headers={'Authorization': api_key}, + timeout=30 + ) + else: + # can't call isochrones api without coverage + raise HTTPInternalServerError( + 'Coverage not found for source') + + # Vérification du statut de la réponse + if response.status_code == 401: + raise HTTPInternalServerError( + 'Authentication error with Navitia API') + elif response.status_code == 400: + raise HTTPBadRequest('Invalid parameters for Navitia API') + elif response.status_code == 404: + # no_destination -> public transport not reachable from destination + # no_origin -> public transport not reachable from origin + # these do not count as proper errors, + # more like the wp is just not reachable + raise HTTPInternalServerError(response.json()['error']) + elif not response.ok: + raise HTTPInternalServerError(f'Navitia API error: {response.status_code}') # noqa: E501 + else: + # Retour des données JSON + return response.json() + + except requests.exceptions.Timeout: + raise HTTPInternalServerError( + 'Timeout when calling the Navitia API') + except requests.exceptions.RequestException as e: + raise HTTPInternalServerError(f'{str(e)}') + except Exception as e: + raise HTTPInternalServerError(f'{str(e)}') + + +def is_wp_in_isochrone(waypoint, isochrone_geom): + """ + Returns true if waypoints is contained in isochrone_geom + """ + # get lon & lat + geom = shape(json.loads(waypoint.get("geometry").get("geom"))) + + src_epsg = 3857 + transformer = Transformer.from_crs( + f"EPSG:{src_epsg}", "EPSG:4326", always_xy=True) + lon, lat = transformer.transform(geom.x, geom.y) + pt = Point(lon, lat) + + return isochrone_geom.contains(pt) + + +def extract_meta_params(request): + """ + Extract meta parameters such as offset, limit and lang + """ + v = request.validated + return { + 'offset': v.get('offset', 0), + 'limit': v.get('limit', LIMIT_DEFAULT), + 'lang': v.get('lang'), + } + + +def extract_journey_params(request): + """ + Extract parameters for journey query + """ + return { + 'from': request.params.get('from'), + 'datetime': request.params.get('datetime'), + 'datetime_represents': request.params.get('datetime_represents'), + 'walking_speed': request.params.get('walking_speed'), + 'max_walking_duration_to_pt': request.params.get('max_walking_duration_to_pt'), # noqa: E501 + 'to': '' + } + + +def extract_isochrone_params(request): + """ + Extract parameters for isochrone query + + NOTE : the boundary duration is bounded by constants + MAX_TRIP_DURATION and MIN_TRIP_DURATION + if the boundary duration goes beyond limits, + it is set to the limit it goes past. + """ + params = { + 'from': request.params.get('from'), + 'datetime': request.params.get('datetime'), + 'boundary_duration[]': request.params.get('boundary_duration'), + 'datetime_represents': request.params.get('datetime_represents') + } + # normalize boundary + bd = params['boundary_duration[]'] + if len(bd.split(",")) == 1: + duration = int(bd) + params['boundary_duration[]'] = max( + min( + duration, + MAX_TRIP_DURATION * 60 + ), + MIN_TRIP_DURATION * 60 + ) + return params + + +def collect_areas_from_results(results, area_index): + """ + Extract all area document_ids from results, load Area objects from DB, + and return {document_id: Area}. + """ + area_ids = set() + + for row in results: + areas = row[area_index] + + if not areas: + continue + + for area in areas: + doc_id = area.get("document_id") + if doc_id: + area_ids.add(doc_id) + + area_objects = DBSession.query(Area).filter( + Area.document_id.in_(area_ids) + ).all() + + return {a.document_id: a for a in area_objects} + + +def collect_waypoints_from_results(results): + """ + Extract all waypoint document_ids from results, + load Waypoint objects from DB, + and return {document_id: Waypoint}. + """ + wp_ids = set() + + for route, areas, waypoints in results: + if not waypoints: + continue + + for wp in waypoints: + doc_id = wp.get("document_id") + if doc_id: + wp_ids.add(doc_id) + + wp_objects = DBSession.query(Waypoint).filter( + Waypoint.document_id.in_(wp_ids) + ).all() + + return {wp for wp in wp_objects} + + +def redis_client(): + """ fast way to get redis client """ + return redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB) + + +def start_job_background(target, request): + """ start a job in the background, + target is the query function to execute in bg + request is the request to pass to the function""" + job_id = str(uuid.uuid4()) + r = redis_client() + r.set(f"job:{job_id}:progress", 0) + r.set(f"job:{job_id}:status", "running") + threading.Thread(target=target, args=( + job_id, request), daemon=True).start() + return {"job_id": job_id} + + +def get_job_status(r, job_id): + """ returns the ongoing job status """ + status = r.get(f"job:{job_id}:status") + if status is None: + return None, {"error": "unknown_job_id"} + status = status.decode() + return status, status + + +def read_result_from_redis(r, job_id): + """ returns the result from redis """ + status = r.get(f"job:{job_id}:status") + if status is None: + return {"error": "unknown_job_id"} + status = status.decode() + if status == "running": + return {"status": "running"} + if status == "error": + error_msg = r.get(f"job:{job_id}:error") + return {"status": "error", "message": error_msg.decode() if error_msg else "unknown error"} # noqa + if status == "done": + data = r.get(f"job:{job_id}:result") + if not data: + return {"status": "error", "message": "missing_result"} + return {"status": "done", "result": json.loads(data)} + return {"error": "unknown_status", "status": status} + + +def progress_stream(r, job_id, poll_interval=0.5): + """ yield the job progress """ + while True: + raw_progress = r.get(f"job:{job_id}:progress") + raw_found = r.get(f"job:{job_id}:found") + raw_not_found = r.get(f"job:{job_id}:not_found") + raw_total = r.get(f"job:{job_id}:total") + + progress = int(raw_progress) if raw_progress is not None else 0 + found = int(raw_found) if raw_found is not None else 0 + not_found = int(raw_not_found) if raw_not_found is not None else 0 + total = int(raw_total) if raw_total is not None else 0 + + payload = {"progress": progress, "total": total, + "found": found, "not_found": not_found} + yield (f"data: {json.dumps(payload)}\n\n").encode("utf-8") + + status = r.get(f"job:{job_id}:status") + if status and status.decode() == "done": + yield b"event: done\ndata: done\n\n" + break + elif status and status.decode() == "error": + payload = r.get(f"job:{job_id}:error") + json_payload = json.dumps(ast.literal_eval(payload.decode())) + yield f"event: error\ndata: {json_payload}\n\n".encode("utf-8") + break + + time.sleep(poll_interval) + + +def _store_job_progress(r, job_id, count, found, not_found): + """ + store job progress which is : + progress : the number of queries done + found : the number of successful queries + not_found: the number of unsuccessful queries + """ + r.set(f"job:{job_id}:progress", count) + r.set(f"job:{job_id}:found", found) + r.set(f"job:{job_id}:not_found", not_found) + r.publish(f"job:{job_id}:events", f"progress:{count}") + r.publish(f"job:{job_id}:events", f"found:{found}") + r.publish(f"job:{job_id}:events", f"not_found:{not_found}") diff --git a/c2corg_api/views/route.py b/c2corg_api/views/route.py index 2ab7f9cb3..3add9a03a 100644 --- a/c2corg_api/views/route.py +++ b/c2corg_api/views/route.py @@ -7,6 +7,7 @@ from c2corg_api.models.document import DocumentLocale, DocumentGeometry from c2corg_api.models.outing import Outing from c2corg_api.models.waypoint import WAYPOINT_TYPE, Waypoint +from c2corg_api.search.utils import build_sqlalchemy_filters from c2corg_api.security.acl import ACLDefault from c2corg_api.views.document_associations import get_first_column from c2corg_api.views.document_info import DocumentInfoRest @@ -34,6 +35,19 @@ from sqlalchemy.orm import load_only from sqlalchemy.sql.expression import text, or_, column, union +from operator import and_ +from c2corg_api.models.area import Area +from c2corg_api.models.area_association import AreaAssociation +from c2corg_api.search.search_filters import build_query +from c2corg_api.views import to_json_dict +from c2corg_api.views.document import (LIMIT_DEFAULT) +from c2corg_api.models.waypoint_stoparea import (WaypointStoparea) +from c2corg_api.models.area import schema_listing_area +from sqlalchemy import func, literal_column +from c2corg_api.models.common.sortable_search_attributes import \ + search_attr_by_field + + validate_route_create = make_validator_create( fields_route, 'activities', activities) validate_route_update = make_validator_update( @@ -202,6 +216,71 @@ def get(self): update_all_pt_rating(waypoint_extrapolation) +@resource(path='/reachableroutes', cors_policy=cors_policy) +class ReachableRouteRest(DocumentRest): + + def __init__(self, request, context=None): + self.request = request + + @view(validators=[validate_pagination, validate_preferred_lang_param]) + def get(self): + """Returns a list of object {documents: Route[], total: Integer} -> + documents: routes reachable within offset and limit + total: number of documents returned by query without offset and limit""" # noqa: E501 + + validated = self.request.validated + + meta_params = { + 'offset': validated.get('offset', 0), + 'limit': validated.get('limit', LIMIT_DEFAULT), + 'lang': validated.get('lang') + } + + query = build_reachable_route_query(self.request.GET, meta_params) + + count = query.count() + + results = ( + query + .limit(meta_params['limit']) + .offset(meta_params['offset']) + .all() + ) + + areas_id = set() + for route, areas in results: + if areas is None: + continue + for area in areas: + area_id = area.get("document_id") + if area_id is not None: + areas_id.add(area_id) + + areas_objects = DBSession.query(Area).filter( + Area.document_id.in_(areas_id)).all() + + areas_map = {area.document_id: area for area in areas_objects} + + routes = [] + for route, areas in results: + json_areas = [] + if areas is None: + areas = [] + + for area in areas: + area_obj = areas_map.get(area.get("document_id")) + if area_obj: + json_areas.append(to_json_dict( + area_obj, schema_listing_area)) + + # assign JSON areas to the route + route.areas = json_areas + wp_dict = to_json_dict(route, schema_route, True) + routes.append(wp_dict) + + return {'documents': routes, 'total': count} + + def set_default_geometry(linked_waypoints, route, user_id): """When creating a new route, set the default geometry to the middle point of a given track, if not to the geometry of the associated main waypoint @@ -499,7 +578,7 @@ def worst_rating(rating1, rating2): # Return the best starting rating if it's not a crossing if not (route_types and bool( set(["traverse", "raid", "expedition"]) & set(route_types) - )): + )): return best_starting_rating # If no ending point is provided @@ -513,3 +592,154 @@ def worst_rating(rating1, rating2): # Return the worst of the two ratings return worst_rating(best_starting_rating, best_ending_rating) + + +def build_reachable_route_query(params, meta_params): + """build the query based on params and meta params. + this includes every filters on route, + as well as offset + limit, sort, bbox... + returns a list of routes reachable + (can be accessible by public transports), filtered with params + """ + search = build_query(params, meta_params, ROUTE_TYPE) + search_dict = search.to_dict() + + filter_conditions, sort_expressions, needs_locale_join, langs = \ + build_sqlalchemy_filters( + search_dict, + document_model=Route, + filter_map={'areas': Area, 'waypoints': Waypoint}, + geometry_model=DocumentGeometry, + range_enum_map=search_attr_by_field, + title_columns=[DocumentLocale.title, RouteLocale.title_prefix] + ) + + # perform query + query = DBSession.query(Route, func.jsonb_agg(func.distinct( + func.jsonb_build_object( + literal_column("'document_id'"), Area.document_id + ))).label("areas")). \ + select_from(Association). \ + join(Route, or_( + Route.document_id == Association.child_document_id, + Route.document_id == Association.parent_document_id + )). \ + join(Waypoint, and_( + or_( + Waypoint.document_id == Association.child_document_id, + Waypoint.document_id == Association.parent_document_id + ), + Waypoint.waypoint_type == 'access' + )). \ + join( + WaypointStoparea, + WaypointStoparea.waypoint_id == Waypoint.document_id + ). \ + join( + DocumentGeometry, + Waypoint.document_id == DocumentGeometry.document_id + ). \ + join( + AreaAssociation, + AreaAssociation.document_id == Route.document_id + ). \ + join(Area, Area.document_id == AreaAssociation.area_id) + + if (needs_locale_join or len(langs) > 0): + query = query.join( + DocumentLocale, + Route.document_id == DocumentLocale.document_id + ) + + if (needs_locale_join): + query = query.join(RouteLocale, RouteLocale.id == DocumentLocale.id) + + if (len(langs) > 0): + query = query.filter(DocumentLocale.lang.in_(langs)) + + query = query. \ + filter(filter_conditions). \ + order_by(*sort_expressions). \ + group_by(Route). \ + distinct() + + return query + + +def build_reachable_route_query_with_waypoints(params, meta_params): + """build the query based on params and meta params. + this includes every filters on route, + as well as offset + limit, sort, bbox... + returns a list of routes reachable + (accessible by common transports), filtered with params + """ + search = build_query(params, meta_params, ROUTE_TYPE) + search_dict = search.to_dict() + + filter_conditions, sort_expressions, needs_locale_join, langs = \ + build_sqlalchemy_filters( + search_dict, + document_model=Route, + filter_map={'areas': Area, 'waypoints': Waypoint}, + geometry_model=DocumentGeometry, + range_enum_map=search_attr_by_field, + title_columns=[DocumentLocale.title, RouteLocale.title_prefix] + ) + + # perform query + query = DBSession.query(Route, + func.jsonb_agg(func.distinct( + func.jsonb_build_object( + literal_column( + "'document_id'"), Area.document_id + ))).label("areas"), + func.jsonb_agg(func.distinct( + func.jsonb_build_object( + literal_column( + "'document_id'"), Waypoint.document_id + ))).label("waypoints")). \ + select_from(Association). \ + join(Route, or_( + Route.document_id == Association.child_document_id, + Route.document_id == Association.parent_document_id + )). \ + join(Waypoint, and_( + or_( + Waypoint.document_id == Association.child_document_id, + Waypoint.document_id == Association.parent_document_id + ), + Waypoint.waypoint_type == 'access' + )). \ + join( + WaypointStoparea, + WaypointStoparea.waypoint_id == Waypoint.document_id + ). \ + join( + DocumentGeometry, + Waypoint.document_id == DocumentGeometry.document_id + ). \ + join( + AreaAssociation, + AreaAssociation.document_id == Route.document_id + ). \ + join(Area, Area.document_id == AreaAssociation.area_id) + + if (needs_locale_join or len(langs) > 0): + query = query.join( + DocumentLocale, + Route.document_id == DocumentLocale.document_id + ) + + if (needs_locale_join): + query = query.join(RouteLocale, RouteLocale.id == DocumentLocale.id) + + if (len(langs) > 0): + query = query.filter(DocumentLocale.lang.in_(langs)) + + query = query. \ + filter(filter_conditions). \ + order_by(*sort_expressions). \ + group_by(Route). \ + distinct() + + return query diff --git a/c2corg_api/views/waypoint.py b/c2corg_api/views/waypoint.py index a1d4ce97a..8ec6fe163 100644 --- a/c2corg_api/views/waypoint.py +++ b/c2corg_api/views/waypoint.py @@ -1,10 +1,12 @@ import functools +import logging from c2corg_api.models import DBSession from c2corg_api.models.association import Association from c2corg_api.models.document import UpdateType from c2corg_api.models.outing import Outing from c2corg_api.models.route import Route, RouteLocale, ROUTE_TYPE +from c2corg_api.search.utils import build_sqlalchemy_filters from c2corg_api.views.document_associations import get_first_column from c2corg_api.views.document_info import DocumentInfoRest from c2corg_api.views.document_listings import get_documents_for_ids @@ -22,7 +24,7 @@ schema_create_waypoint) from c2corg_api.views.document import ( - DocumentRest, make_validator_create, make_validator_update, + LIMIT_MAX, DocumentRest, make_validator_create, make_validator_update, NUM_RECENT_OUTINGS) from c2corg_api.views import cors_policy, restricted_json_view from c2corg_api.views.validation import validate_id, validate_pagination, \ @@ -34,6 +36,22 @@ from sqlalchemy.orm.util import aliased from sqlalchemy.sql.elements import literal_column from sqlalchemy.sql.expression import and_, text, union, column +from operator import or_ +from c2corg_api.models.area import Area +from c2corg_api.models.area_association import AreaAssociation +from c2corg_api.models.document import DocumentGeometry, DocumentLocale +from c2corg_api.search.search_filters import build_query +from c2corg_api.views import to_json_dict +from c2corg_api.views.document import ( + LIMIT_DEFAULT) +from c2corg_api.models.waypoint_stoparea import ( + WaypointStoparea) +from c2corg_api.models.area import schema_listing_area +from sqlalchemy import func +from c2corg_api.models.common.sortable_search_attributes import \ + search_attr_by_field + +log = logging.getLogger(__name__) # the number of routes that are included for waypoints NUM_ROUTES = 400 @@ -287,7 +305,7 @@ def set_recent_outings(waypoint, lang): join( with_query_waypoints, with_query_waypoints.c.document_id == t_route_wp.parent_document_id - ). \ + ). \ distinct(). \ count() @@ -369,9 +387,9 @@ def _get_select_children(waypoint): cte('waypoint_grandchildren') return union( - select_waypoint.select(), - select_waypoint_children.select(), - select_waypoint_grandchildren.select()). \ + select_waypoint.select(), + select_waypoint_children.select(), + select_waypoint_grandchildren.select()). \ cte('select_all_waypoints') @@ -393,6 +411,70 @@ def get(self): return self._get_document_info(waypoint_documents_config) +@resource(path='/reachablewaypoints', cors_policy=cors_policy) +class ReachableWaypointRest(DocumentRest): + + def __init__(self, request, context=None): + self.request = request + + @view(validators=[validate_pagination, validate_preferred_lang_param]) + def get(self): + """Returns a list of object {documents: Waypoint[], total: Integer} -> + documents: waypoints reachable within offset and limit + total: number of documents returned by query without offset and limit""" # noqa: E501 + validated = self.request.validated + + meta_params = { + 'offset': validated.get('offset', 0), + 'limit': min(validated.get('limit', LIMIT_DEFAULT), LIMIT_MAX), + 'lang': validated.get('lang') + } + + query = build_reachable_waypoints_query(self.request.GET, meta_params) + + count = query.count() + + results = ( + query + .limit(meta_params['limit']) + .offset(meta_params['offset']) + .all() + ) + + areas_id = set() + for wp, areas in results: + if areas is None: + continue + for area in areas: + area_id = area.get("document_id") + if area_id is not None: + areas_id.add(area_id) + + areas_objects = DBSession.query(Area).filter( + Area.document_id.in_(areas_id)).all() + + areas_map = {area.document_id: area for area in areas_objects} + + waypoints = [] + for wp, areas in results: + json_areas = [] + if areas is None: + areas = [] + + for area in areas: + area_obj = areas_map.get(area.get("document_id")) + if area_obj: + json_areas.append(to_json_dict( + area_obj, schema_listing_area)) + + # assign JSON areas to the waypoint + wp.areas = json_areas + wp_dict = to_json_dict(wp, schema_waypoint, True) + waypoints.append(wp_dict) + + return {'documents': waypoints, 'total': count} + + def update_linked_routes(waypoint, update_types, user_id): update_linked_route_titles(waypoint, update_types, user_id) update_linked_routes_public_transportation_rating(waypoint, update_types) @@ -459,3 +541,67 @@ def update_linked_routes_public_transportation_rating(waypoint, update_types): for route in routes: update_pt_rating(route) + + +def build_reachable_waypoints_query(params, meta_params): + """build the query based on params and meta params. + this includes every filters on waypoints, + as well as offset + limit, sort, bbox... + returns a list of waypoints reachable + (can be accessible by public transports), filtered with params + """ + search = build_query(params, meta_params, WAYPOINT_TYPE) + + search_dict = search.to_dict() + + filter_conditions, sort_expressions, needs_locale_join, langs = \ + build_sqlalchemy_filters( + search_dict=search_dict, + document_model=Waypoint, + filter_map={"areas": Area}, + geometry_model=DocumentGeometry, + range_enum_map=search_attr_by_field, + title_columns=[DocumentLocale.title] + ) + + # perform query + query = DBSession.query(Waypoint, func.jsonb_agg(func.distinct( + func.jsonb_build_object( + literal_column("'document_id'"), Area.document_id + ))).label("areas")). \ + select_from(Association). \ + join(Waypoint, + and_( + or_( + Waypoint.document_id == Association.child_document_id, + Waypoint.document_id == Association.parent_document_id + ), + Waypoint.waypoint_type == 'access' + ) + ). \ + join( + WaypointStoparea, + WaypointStoparea.waypoint_id == Waypoint.document_id + ). \ + join( + AreaAssociation, + AreaAssociation.document_id == Waypoint.document_id + ). \ + join(Area, Area.document_id == AreaAssociation.area_id). \ + join(DocumentGeometry, Waypoint.document_id == + DocumentGeometry.document_id) + + if (needs_locale_join or len(langs) > 0): + query = query.join( + DocumentLocale, Waypoint.document_id == DocumentLocale.document_id) + + if (len(langs) > 0): + query = query.filter(DocumentLocale.lang.in_(langs)) + + query = query. \ + filter(filter_conditions). \ + order_by(*sort_expressions). \ + group_by(Waypoint). \ + distinct() + + return query diff --git a/update_navitia_coverage.sh b/update_navitia_coverage.sh new file mode 100755 index 000000000..e1a88327b --- /dev/null +++ b/update_navitia_coverage.sh @@ -0,0 +1,186 @@ +#!/bin/bash +# shellcheck disable=SC2001 + +# this scripts is meant to be executed whenever navitia france coverages are updated. +# the goal of this script is to save in DB france coverages, with their id and geometry. +# there are as many Navitia request as there are coverages in France. +# +# The script takes in parameter +# Username 'user123' +# Password 'password123' (make sure to escape special characters) +# Base API URL 'http://localhost' +# API Port '6543' +# And is meant to be used by moderators, as regular users can't delete documents. + +# First, a token is retrieved using username and password in parameter. +# Then, existing coverages are deleted +# Navitia request are made towards /coverages/{region_id} route to get all coverages. +# For each coverage found, a POST on Base_API_URL:API_PORT/coverages is made to insert in database. + +# NOTE: the geometry returned by Navitia for the coverages are in WGS384. + +if [ -f ./.env ]; then + # Load .env data + export $(grep -v '^#' ./.env | xargs) +else + echo ".env file not found!" + exit 1 +fi + +# Function to display usage +usage() { + echo "Usage: $0 " + exit 1 +} + +# Check if exactly 4 arguments are provided +if [ "$#" -ne 4 ]; then + usage +fi + +# Check if jq is installed +if ! command -v jq &> /dev/null; then + echo "jq could not be found. Please install it." + exit 1 +fi + +# Assign arguments to variables +username="$1" +password="$2" +base_api_url="$3" +api_port="$4" + +LOG_FILE="log-navitia-coverage.txt" +NAVITIA_REQUEST_COUNT=0 + +COVERAGE_API_URL="$base_api_url:$api_port/coverages" + +echo "Start time :" > "$LOG_FILE" +echo $(date +"%Y-%m-%d-%H-%M-%S") >> "$LOG_FILE" + +login_body=$(jq -n \ + --arg username "$username" \ + --arg password "$password" \ + '{ + username: $username, + password: $password, + discourse: true, + }') + +# log in to execute script +loginResponse=$(curl -s -X POST "$base_api_url:$api_port/users/login" \ + -H "Content-Type: application/json" \ + -d "$login_body") + +roles=$(echo "$loginResponse" | jq -r '.roles') +JWTToken=$(echo "$loginResponse" | jq -r '.token') + +coverages=$(curl -s -X GET "$COVERAGE_API_URL" \ + -H "Content-Type: application/json") + +numberOfCoverage=$(echo "$coverages" | jq -r '.total') + +if [ "$numberOfCoverage" != "0" ]; then + # check if logged user is a moderator + found=false + for role in "${roles[@]}"; do + if [[ "$role" == "moderator" ]]; then + found=true + break + fi + done + if ! $found; then + echo "Error : User should be a moderator to delete existing coverages" + exit 1 + fi + + # remove old coverages + echo "$coverages" | jq -c '.documents[]' | while IFS= read -r coverage; do + coverage_doc_id=$(echo "$coverage" | jq -r '.document_id') + + deleteResponse=$(curl -X POST -v -H "Content-Type: application/json" -H "Authorization: JWT token=\"$JWTToken\"" "$base_api_url:$api_port/documents/delete/${coverage_doc_id}") + + status=$(echo "$deleteResponse" | jq -r '.status') + + # if we can't delete coverage, then we stop the script + if [ $status = "error" ]; then + exit 1 + fi + done +fi + +# This define how much navitia request will be made executing this script +regions=('fr-idf' 'fr-ne' 'fr-nw' 'fr-se' 'fr-sw') + +responses=() + +# Loop over Navitia regions in France +for region_id in "${regions[@]}"; do + # Fetch the response from the Navitia API + response=$(curl -s -H "Authorization: $NAVITIA_API_KEY" \ + "https://api.navitia.io/v1/coverage/${region_id}") + ((NAVITIA_REQUEST_COUNT++)) + + # Extract the coverage type + coverage_type=$(echo "$response" | jq -r '.regions[0].id') + + # Extract the shape (WKT string) + shape=$(echo "$response" | jq -r '.regions[0].shape') + + # remove 'MULTIPOLGYON' from shape + coordinate_list=${shape//"MULTIPOLYGON"/} + + # remove ( + coordinate_list=${coordinate_list//"("/} + + # remove ) + coordinate_list=${coordinate_list//")"/} + + coordinates=() + coordinates+="[[" + + # get a list of all coordinates (separated by comma) + while IFS=',' read -ra coo; do + for i in "${coo[@]}"; do + # get lon & lat + lon_lat=($i) + # fix subcoordinates + operator concatenate not exist + subcoordinates="[${lon_lat[0]},${lon_lat[1]}]" + coordinates+="${subcoordinates}," + done + done <<< "$coordinate_list" + + # remove last comma + coordinates=${coordinates%?} + + coordinates+="]]" + + type="Polygon" + + geom_detail="{\"type\": \"Polygon\", \"coordinates\": $coordinates}" + + # no coverages yet, so we insert + if [ "$numberOfCoverage" = "0" ]; then + echo "inserting coverages" + # Build JSON payload + payload=$(jq -n \ + --arg coverage_type "$coverage_type" \ + --arg geom_detail "$geom_detail" \ + '{ + coverage_type: $coverage_type, + geometry: { + geom: null, + geom_detail: $geom_detail + } + }') + + # Send the POST request to create a coverage in DB + responses+=$(curl -X POST -v -H "Content-Type: application/json" -H "Authorization: JWT token=\"$JWTToken\"" -d "$payload" "$COVERAGE_API_URL") + fi +done + +# Log final progress +echo "Completed. Total Navitia API requests: $NAVITIA_REQUEST_COUNT" >> $LOG_FILE + +echo "Stop time :" >> $LOG_FILE +echo $(date +"%Y-%m-%d-%H-%M-%S") >> $LOG_FILE \ No newline at end of file