diff --git a/.github/workflows/update_chart_version.py b/.github/workflows/update_chart_version.py index 89bb38563..92bda4857 100755 --- a/.github/workflows/update_chart_version.py +++ b/.github/workflows/update_chart_version.py @@ -101,7 +101,7 @@ def update_values_yaml(values_path: Path, diracx_version: str) -> None: def main() -> None: - """Main function.""" + """Run the Main.""" parser = argparse.ArgumentParser( description="Update chart versions for DiracX release" ) diff --git a/diracx-client/src/diracx/client/_generated/aio/operations/_operations.py b/diracx-client/src/diracx/client/_generated/aio/operations/_operations.py index 06f7a655f..ff0f2c5b5 100644 --- a/diracx-client/src/diracx/client/_generated/aio/operations/_operations.py +++ b/diracx-client/src/diracx/client/_generated/aio/operations/_operations.py @@ -378,7 +378,8 @@ async def initiate_device_flow( async def do_device_flow(self, *, user_code: str, **kwargs: Any) -> Any: """Do Device Flow. - This is called as the verification URI for the device flow. + Serve as the verification URI for the device flow. + It will redirect to the actual OpenID server (IAM, CheckIn) to perform a authorization code flow. @@ -435,8 +436,8 @@ async def do_device_flow(self, *, user_code: str, **kwargs: Any) -> Any: async def finish_device_flow(self, *, code: str, state: str, **kwargs: Any) -> Any: """Finish Device Flow. - This the url callbacked by IAM/CheckIn after the authorization - flow was granted. + Handle the URL callbacked by IAM/CheckIn after authorization flow. + It gets us the code we need for the authorization flow, and we can map it to the corresponding device flow using the user_code in the cookie/session. @@ -492,7 +493,7 @@ async def finish_device_flow(self, *, code: str, state: str, **kwargs: Any) -> A async def finished(self, **kwargs: Any) -> Any: """Finished. - This is the final step of the device flow. + Mark the final step of the device flow. :return: any :rtype: any @@ -539,7 +540,9 @@ async def finished(self, **kwargs: Any) -> Any: async def get_refresh_tokens(self, **kwargs: Any) -> List[Any]: """Get Refresh Tokens. - Get all refresh tokens for the user. If the user has the ``proxy_management`` property, then + Get all refresh tokens for the user. + + If the user has the ``proxy_management`` property, then the subject is not used to filter the refresh tokens. :return: list of any @@ -587,7 +590,9 @@ async def get_refresh_tokens(self, **kwargs: Any) -> List[Any]: async def revoke_refresh_token_by_jti(self, jti: str, **kwargs: Any) -> str: """Revoke Refresh Token By Jti. - Revoke a refresh token. If the user has the ``proxy_management`` property, then + Revoke a refresh token. + + If the user has the ``proxy_management`` property, then the subject is not used to filter the refresh tokens. :param jti: Required. @@ -697,6 +702,7 @@ async def initiate_authorization_flow( """Initiate Authorization Flow. Initiate the authorization flow. + It will redirect to the actual OpenID server (IAM, CheckIn) to perform a authorization code flow. @@ -1831,6 +1837,7 @@ async def patch_metadata( """Patch Metadata. Update job metadata such as UserPriority, HeartBeatTime, JobType, etc. + The argument are all the attributes/parameters of a job (except the ID). :param body: Required. @@ -1848,6 +1855,7 @@ async def patch_metadata(self, body: IO[bytes], *, content_type: str = "applicat """Patch Metadata. Update job metadata such as UserPriority, HeartBeatTime, JobType, etc. + The argument are all the attributes/parameters of a job (except the ID). :param body: Required. @@ -1865,6 +1873,7 @@ async def patch_metadata(self, body: Union[Dict[str, _models.JobMetaData], IO[by """Patch Metadata. Update job metadata such as UserPriority, HeartBeatTime, JobType, etc. + The argument are all the attributes/parameters of a job (except the ID). :param body: Is either a {str: JobMetaData} type or a IO[bytes] type. Required. @@ -1930,8 +1939,9 @@ async def search( ) -> List[Dict[str, Any]]: """Search. - Creates a search query to the job database. This search can be based on - different parameters, such as jobID, status, owner, etc. + Create a search query to the job database. + + This search can be based on different parameters, such as jobID, status, owner, etc. **Possibilities** @@ -1969,8 +1979,9 @@ async def search( ) -> List[Dict[str, Any]]: """Search. - Creates a search query to the job database. This search can be based on - different parameters, such as jobID, status, owner, etc. + Create a search query to the job database. + + This search can be based on different parameters, such as jobID, status, owner, etc. **Possibilities** @@ -2007,8 +2018,9 @@ async def search( ) -> List[Dict[str, Any]]: """Search. - Creates a search query to the job database. This search can be based on - different parameters, such as jobID, status, owner, etc. + Create a search query to the job database. + + This search can be based on different parameters, such as jobID, status, owner, etc. **Possibilities** @@ -2094,8 +2106,9 @@ async def summary( ) -> Any: """Summary. - Group jobs by a specific list of parameters. Returns an array of n-uplets, where each n-uplet - contains the + Group jobs by a specific list of parameters. + + Returns an array of n-uplets, where each n-uplet contains the values of the grouping parameters and the number of jobs that match those values. Body parameters: @@ -2118,8 +2131,9 @@ async def summary( async def summary(self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any) -> Any: """Summary. - Group jobs by a specific list of parameters. Returns an array of n-uplets, where each n-uplet - contains the + Group jobs by a specific list of parameters. + + Returns an array of n-uplets, where each n-uplet contains the values of the grouping parameters and the number of jobs that match those values. Body parameters: @@ -2142,8 +2156,9 @@ async def summary(self, body: IO[bytes], *, content_type: str = "application/jso async def summary(self, body: Union[_models.SummaryParams, IO[bytes]], **kwargs: Any) -> Any: """Summary. - Group jobs by a specific list of parameters. Returns an array of n-uplets, where each n-uplet - contains the + Group jobs by a specific list of parameters. + + Returns an array of n-uplets, where each n-uplet contains the values of the grouping parameters and the number of jobs that match those values. Body parameters: diff --git a/diracx-client/src/diracx/client/_generated/operations/_operations.py b/diracx-client/src/diracx/client/_generated/operations/_operations.py index e1b1e8c7a..fa20edd1f 100644 --- a/diracx-client/src/diracx/client/_generated/operations/_operations.py +++ b/diracx-client/src/diracx/client/_generated/operations/_operations.py @@ -879,7 +879,8 @@ def initiate_device_flow(self, *, client_id: str, scope: str, **kwargs: Any) -> def do_device_flow(self, *, user_code: str, **kwargs: Any) -> Any: """Do Device Flow. - This is called as the verification URI for the device flow. + Serve as the verification URI for the device flow. + It will redirect to the actual OpenID server (IAM, CheckIn) to perform a authorization code flow. @@ -936,8 +937,8 @@ def do_device_flow(self, *, user_code: str, **kwargs: Any) -> Any: def finish_device_flow(self, *, code: str, state: str, **kwargs: Any) -> Any: """Finish Device Flow. - This the url callbacked by IAM/CheckIn after the authorization - flow was granted. + Handle the URL callbacked by IAM/CheckIn after authorization flow. + It gets us the code we need for the authorization flow, and we can map it to the corresponding device flow using the user_code in the cookie/session. @@ -993,7 +994,7 @@ def finish_device_flow(self, *, code: str, state: str, **kwargs: Any) -> Any: def finished(self, **kwargs: Any) -> Any: """Finished. - This is the final step of the device flow. + Mark the final step of the device flow. :return: any :rtype: any @@ -1040,7 +1041,9 @@ def finished(self, **kwargs: Any) -> Any: def get_refresh_tokens(self, **kwargs: Any) -> List[Any]: """Get Refresh Tokens. - Get all refresh tokens for the user. If the user has the ``proxy_management`` property, then + Get all refresh tokens for the user. + + If the user has the ``proxy_management`` property, then the subject is not used to filter the refresh tokens. :return: list of any @@ -1088,7 +1091,9 @@ def get_refresh_tokens(self, **kwargs: Any) -> List[Any]: def revoke_refresh_token_by_jti(self, jti: str, **kwargs: Any) -> str: """Revoke Refresh Token By Jti. - Revoke a refresh token. If the user has the ``proxy_management`` property, then + Revoke a refresh token. + + If the user has the ``proxy_management`` property, then the subject is not used to filter the refresh tokens. :param jti: Required. @@ -1198,6 +1203,7 @@ def initiate_authorization_flow( """Initiate Authorization Flow. Initiate the authorization flow. + It will redirect to the actual OpenID server (IAM, CheckIn) to perform a authorization code flow. @@ -2330,6 +2336,7 @@ def patch_metadata( """Patch Metadata. Update job metadata such as UserPriority, HeartBeatTime, JobType, etc. + The argument are all the attributes/parameters of a job (except the ID). :param body: Required. @@ -2347,6 +2354,7 @@ def patch_metadata(self, body: IO[bytes], *, content_type: str = "application/js """Patch Metadata. Update job metadata such as UserPriority, HeartBeatTime, JobType, etc. + The argument are all the attributes/parameters of a job (except the ID). :param body: Required. @@ -2366,6 +2374,7 @@ def patch_metadata( # pylint: disable=inconsistent-return-statements """Patch Metadata. Update job metadata such as UserPriority, HeartBeatTime, JobType, etc. + The argument are all the attributes/parameters of a job (except the ID). :param body: Is either a {str: JobMetaData} type or a IO[bytes] type. Required. @@ -2431,8 +2440,9 @@ def search( ) -> List[Dict[str, Any]]: """Search. - Creates a search query to the job database. This search can be based on - different parameters, such as jobID, status, owner, etc. + Create a search query to the job database. + + This search can be based on different parameters, such as jobID, status, owner, etc. **Possibilities** @@ -2470,8 +2480,9 @@ def search( ) -> List[Dict[str, Any]]: """Search. - Creates a search query to the job database. This search can be based on - different parameters, such as jobID, status, owner, etc. + Create a search query to the job database. + + This search can be based on different parameters, such as jobID, status, owner, etc. **Possibilities** @@ -2508,8 +2519,9 @@ def search( ) -> List[Dict[str, Any]]: """Search. - Creates a search query to the job database. This search can be based on - different parameters, such as jobID, status, owner, etc. + Create a search query to the job database. + + This search can be based on different parameters, such as jobID, status, owner, etc. **Possibilities** @@ -2593,8 +2605,9 @@ def search( def summary(self, body: _models.SummaryParams, *, content_type: str = "application/json", **kwargs: Any) -> Any: """Summary. - Group jobs by a specific list of parameters. Returns an array of n-uplets, where each n-uplet - contains the + Group jobs by a specific list of parameters. + + Returns an array of n-uplets, where each n-uplet contains the values of the grouping parameters and the number of jobs that match those values. Body parameters: @@ -2617,8 +2630,9 @@ def summary(self, body: _models.SummaryParams, *, content_type: str = "applicati def summary(self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any) -> Any: """Summary. - Group jobs by a specific list of parameters. Returns an array of n-uplets, where each n-uplet - contains the + Group jobs by a specific list of parameters. + + Returns an array of n-uplets, where each n-uplet contains the values of the grouping parameters and the number of jobs that match those values. Body parameters: @@ -2641,8 +2655,9 @@ def summary(self, body: IO[bytes], *, content_type: str = "application/json", ** def summary(self, body: Union[_models.SummaryParams, IO[bytes]], **kwargs: Any) -> Any: """Summary. - Group jobs by a specific list of parameters. Returns an array of n-uplets, where each n-uplet - contains the + Group jobs by a specific list of parameters. + + Returns an array of n-uplets, where each n-uplet contains the values of the grouping parameters and the number of jobs that match those values. Body parameters: diff --git a/diracx-core/src/diracx/core/config/sources.py b/diracx-core/src/diracx/core/config/sources.py index 1cf9cc622..5fa37085a 100644 --- a/diracx-core/src/diracx/core/config/sources.py +++ b/diracx-core/src/diracx/core/config/sources.py @@ -1,4 +1,4 @@ -"""This module implements the logic of the configuration server side. +"""Module to implement the logic of the configuration server side. This is where all the backend abstraction and the caching logic takes place. """ @@ -44,7 +44,7 @@ def is_running_in_async_context(): def _apply_default_scheme(value: str) -> str: - """Applies the default git+file:// scheme if not present.""" + """Apply the default git+file:// scheme if not present.""" if isinstance(value, str) and "://" not in value: value = f"git+file://{value}" return value @@ -88,14 +88,18 @@ def __init__(self, *, backend_url: ConfigSourceUrl) -> None: @abstractmethod def latest_revision(self) -> tuple[str, datetime]: - """Must return: + """Abstract method. + + Must return: * a unique hash as a string, representing the last version * a datetime object corresponding to when the version dates. """ @abstractmethod def read_raw(self, hexsha: str, modified: datetime) -> Config: - """Return the Config object that corresponds to the + """Abstract method. + + Return the Config object that corresponds to the specific hash The `modified` parameter is just added as a attribute to the config. """ @@ -114,10 +118,7 @@ def create(cls): def create_from_url( cls, *, backend_url: ConfigSourceUrl | Path | str ) -> "ConfigSource": - """Factory method to produce a concrete instance depending on - the backend URL scheme. - - """ + """Produce a concrete instance depending on the backend URL scheme.""" url = TypeAdapter(ConfigSourceUrl).validate_python(str(backend_url)) return cls.__registry[url.scheme](backend_url=url) @@ -234,7 +235,8 @@ def get_git_branch_from_url(self, backend_url: ConfigSourceUrl) -> str: class LocalGitConfigSource(BaseGitConfigSource): - """The configuration is stored on a local git repository + """The configuration is stored on a local git repository. + When running on multiple servers, the filesystem must be shared. """ diff --git a/diracx-core/src/diracx/core/extensions.py b/diracx-core/src/diracx/core/extensions.py index b29d3e048..2343deb8d 100644 --- a/diracx-core/src/diracx/core/extensions.py +++ b/diracx-core/src/diracx/core/extensions.py @@ -63,7 +63,7 @@ def select_from_extension(*, group: str, name: str | None = None) -> list[EntryP def supports_extending( group: str, name: str ) -> Callable[[Callable[P, T]], Callable[P, T]]: - """Decorator to replace a function with an extension implementation. + """Replace a function with an extension implementation. This decorator looks for an entry point in the specified group and name, and if found, replaces the decorated function with the extension's implementation. diff --git a/diracx-core/src/diracx/core/models.py b/diracx-core/src/diracx/core/models.py index c71ef7392..937950c72 100644 --- a/diracx-core/src/diracx/core/models.py +++ b/diracx-core/src/diracx/core/models.py @@ -1,6 +1,6 @@ -"""Models are used to define the data structure of the requests and responses -for the DiracX API. They are shared between the client components (cli, api) and -services components (db, logic, routers). +"""Models used to define the data structure of the requests and responses for the DiracX API. + +They are shared between the client components (cli, api) and services components (db, logic, routers). """ from __future__ import annotations diff --git a/diracx-core/src/diracx/core/preferences.py b/diracx-core/src/diracx/core/preferences.py index d2e0cc111..6df6f7104 100644 --- a/diracx-core/src/diracx/core/preferences.py +++ b/diracx-core/src/diracx/core/preferences.py @@ -58,5 +58,5 @@ def validate_log_level(cls, v: str): @lru_cache(maxsize=1) def get_diracx_preferences() -> DiracxPreferences: - """Caches the preferences.""" + """Cache the preferences.""" return DiracxPreferences() diff --git a/diracx-core/src/diracx/core/properties.py b/diracx-core/src/diracx/core/properties.py index c75c014ec..cdb664c76 100644 --- a/diracx-core/src/diracx/core/properties.py +++ b/diracx-core/src/diracx/core/properties.py @@ -1,6 +1,4 @@ -"""Just listing the possible Properties -This module contains list of Properties that can be assigned to users and groups. -""" +"""Module containing the list of Properties that can be assigned to users and groups.""" from __future__ import annotations diff --git a/diracx-core/src/diracx/core/settings.py b/diracx-core/src/diracx/core/settings.py index 2568c13bc..7e2992aa2 100644 --- a/diracx-core/src/diracx/core/settings.py +++ b/diracx-core/src/diracx/core/settings.py @@ -107,7 +107,7 @@ def __init__(self, data: str): def _apply_default_scheme(value: str) -> str: - """Applies the default file:// scheme if not present.""" + """Apply the default file:// scheme if not present.""" if isinstance(value, str) and "://" not in value: value = f"file://{value}" return value @@ -125,7 +125,7 @@ def create(cls) -> Self: @contextlib.asynccontextmanager async def lifetime_function(self) -> AsyncIterator[None]: - """A context manager that can be used to run code at startup and shutdown.""" + """Context manager to run code at startup and shutdown.""" yield diff --git a/diracx-core/src/diracx/core/utils.py b/diracx-core/src/diracx/core/utils.py index ea487fcda..87816d4bd 100644 --- a/diracx-core/src/diracx/core/utils.py +++ b/diracx-core/src/diracx/core/utils.py @@ -237,7 +237,7 @@ def get(self, key: str, populate_func: Callable[[], T], blocking: bool = True) - raise NotReadyError(f"Cache key {key} is not ready yet.") def _work(self, key: str, populate_func: Callable[[], Any]) -> None: - """Internal method to execute the populate_func and update caches. + """Execute the populate_func and update caches. This method is intended to be run in a separate thread. It calls the populate_func, stores the result in both caches, and cleans up the diff --git a/diracx-db/src/diracx/db/os/utils.py b/diracx-db/src/diracx/db/os/utils.py index ea5d292e6..f65971452 100644 --- a/diracx-db/src/diracx/db/os/utils.py +++ b/diracx-db/src/diracx/db/os/utils.py @@ -30,7 +30,7 @@ class OpenSearchDBUnavailableError(DBUnavailableError, OpenSearchDBError): class BaseOSDB(metaclass=ABCMeta): - """This should be the base class of all the OpenSearch DiracX DBs. + """Base class of all the OpenSearch DiracX DBs. The details covered here should be handled automatically by the service and task machinery of DiracX and this documentation exists for informational @@ -121,26 +121,19 @@ def available_urls(cls) -> dict[str, dict[str, Any]]: @classmethod def session(cls) -> Self: - """This is just a fake method such that the Dependency overwrite has - a hash to use. - """ + """Fake method such that the Dependency overwrite has a hash to use.""" raise NotImplementedError("This should never be called") @property def client(self) -> AsyncOpenSearch: - """Just a getter for _client, making sure we entered - the context manager. - """ + """Just a getter for _client, making sure we entered the context manager.""" if self._client is None: raise RuntimeError(f"{self.__class__} was used before entering") return self._client @contextlib.asynccontextmanager async def client_context(self) -> AsyncIterator[None]: - """Context manage to manage the client lifecycle. - This is called when starting fastapi. - - """ + """Context manager to manage the client lifecycle. This is called when starting fastapi.""" assert self._client is None, "client_context cannot be nested" async with AsyncOpenSearch(**self._connection_kwargs) as self._client: try: @@ -150,6 +143,7 @@ async def client_context(self) -> AsyncIterator[None]: async def ping(self): """Check whether the connection to the DB is still working. + We could enable the ``pre_ping`` in the engine, but this would be ran at every query. """ @@ -159,7 +153,8 @@ async def ping(self): ) async def __aenter__(self): - """This is entered on every request. + """Entered on every request. + At the moment it does nothing, however, we keep it here in case we ever want to use OpenSearch equivalent of a transaction. """ diff --git a/diracx-db/src/diracx/db/sql/auth/db.py b/diracx-db/src/diracx/db/sql/auth/db.py index 5bf384cf6..871bbf854 100644 --- a/diracx-db/src/diracx/db/sql/auth/db.py +++ b/diracx-db/src/diracx/db/sql/auth/db.py @@ -38,7 +38,9 @@ class AuthDB(BaseSQLDB): @classmethod async def post_create(cls, conn: AsyncConnection) -> None: - """Create partitions if it is a MySQL DB and it does not have + """Create partitions. + + If it is a MySQL DB and it does not have it yet and the table does not have any data yet. We do this as a post_create step as sqlalchemy does not support partition so well. @@ -115,7 +117,7 @@ async def device_flow_validate_user_code( return (await self.conn.execute(stmt)).scalar_one() async def get_device_flow(self, device_code: str): - """:raises: NoResultFound""" + """raise: NoResultFound.""" # The with_for_update # prevents that the token is retrieved # multiple time concurrently @@ -137,7 +139,7 @@ async def update_device_flow_status( async def device_flow_insert_id_token( self, user_code: str, id_token: dict[str, str], max_validity: int ) -> None: - """:raises: AuthorizationError if no such code or status not pending""" + """raise: AuthorizationError if no such code or status not pending.""" stmt = update(DeviceFlows) stmt = stmt.where( DeviceFlows.user_code == user_code, @@ -211,7 +213,8 @@ async def insert_authorization_flow( async def authorization_flow_insert_id_token( self, uuid: str, id_token: dict[str, str], max_validity: int ) -> tuple[str, str]: - """Returns code, redirect_uri + """Return code, redirect_uri. + :raises: AuthorizationError if no such uuid or status not pending. """ # Hash the code to avoid leaking information @@ -268,8 +271,9 @@ async def insert_refresh_token( subject: str, scope: str, ) -> None: - """Insert a refresh token in the DB as well as user attributes - required to generate access tokens. + """Insert a refresh token in the DB. + + As well as user attributes required to generate access tokens. """ # Insert values into the DB stmt = insert(RefreshTokens).values( diff --git a/diracx-db/src/diracx/db/sql/auth/schema.py b/diracx-db/src/diracx/db/sql/auth/schema.py index 26e5b5bfe..5ae6b0d8e 100644 --- a/diracx-db/src/diracx/db/sql/auth/schema.py +++ b/diracx-db/src/diracx/db/sql/auth/schema.py @@ -23,7 +23,8 @@ class FlowStatus(Enum): - """The normal flow is + """The normal flow. + PENDING -> READY -> DONE Pending is upon insertion Ready/Error is set in response to IdP @@ -71,8 +72,7 @@ class AuthorizationFlows(Base): class RefreshTokenStatus(Enum): - """The normal flow is - CREATED -> REVOKED. + """CREATED -> REVOKED. Note1: There is no EXPIRED status as it can be calculated from a creation time Note2: As part of the refresh token rotation mechanism, the revoked token should be retained @@ -87,8 +87,9 @@ class RefreshTokenStatus(Enum): class RefreshTokens(Base): - """Store attributes bound to a refresh token, as well as specific user attributes - that might be then used to generate access tokens. + """Store attributes bound to a refresh token. + + Also specific user attributes that might be then used to generate access tokens. """ __tablename__ = "RefreshTokens" diff --git a/diracx-db/src/diracx/db/sql/dummy/db.py b/diracx-db/src/diracx/db/sql/dummy/db.py index 5735b43bb..76e8db07b 100644 --- a/diracx-db/src/diracx/db/sql/dummy/db.py +++ b/diracx-db/src/diracx/db/sql/dummy/db.py @@ -10,8 +10,7 @@ class DummyDB(BaseSQLDB): - """This DummyDB is just to illustrate some important aspect of writing - DB classes in DiracX. + """Illustrate some important aspect of writing DB classes in DiracX. It is mostly pure SQLAlchemy, with a few convention diff --git a/diracx-db/src/diracx/db/sql/job/db.py b/diracx-db/src/diracx/db/sql/job/db.py index ec31e7e9b..327bb2ff0 100644 --- a/diracx-db/src/diracx/db/sql/job/db.py +++ b/diracx-db/src/diracx/db/sql/job/db.py @@ -72,7 +72,7 @@ async def search( ) async def create_job(self, compressed_original_jdl: str): - """Used to insert a new job with original JDL. Returns inserted job id.""" + """Insert a new job with original JDL. Returns inserted job id.""" result = await self.conn.execute( JobJDLs.__table__.insert().values( JDL="", @@ -115,7 +115,7 @@ async def insert_job_attributes(self, jobs_to_update: dict[int, dict]): ) async def update_job_jdls(self, jdls_to_update: dict[int, str]): - """Used to update the JDL, typically just after inserting the original JDL, or rescheduling, for example.""" + """Update the JDL, typically just after inserting the original JDL, or rescheduling, for example.""" await self.conn.execute( JobJDLs.__table__.update().where( JobJDLs.__table__.c.JobID == bindparam("b_JobID") @@ -200,7 +200,8 @@ async def set_job_commands(self, commands: list[tuple[int, str, str]]) -> None: async def set_properties( self, properties: dict[int, dict[str, Any]], update_timestamp: bool = False ) -> int: - """Update the job parameters + """Update the job parameters. + All the jobs must update the same properties. :param properties: {job_id : {prop1: val1, prop2:val2} diff --git a/diracx-db/src/diracx/db/sql/job_logging/db.py b/diracx-db/src/diracx/db/sql/job_logging/db.py index f225ed95c..2e824f80d 100644 --- a/diracx-db/src/diracx/db/sql/job_logging/db.py +++ b/diracx-db/src/diracx/db/sql/job_logging/db.py @@ -61,8 +61,9 @@ async def insert_records( ) async def get_records(self, job_ids: list[int]) -> dict[int, JobStatusReturn]: - """Returns a Status,MinorStatus,ApplicationStatus,StatusTime,Source tuple - for each record found for job specified by its jobID in historical order. + """Return a Status,MinorStatus,ApplicationStatus,StatusTime,Source tuple. + + For each record found for job specified by its jobID in historical order. """ # We could potentially use a group_by here, but we need to post-process the # results later. @@ -140,7 +141,8 @@ async def delete_records(self, job_ids: list[int]): async def get_wms_time_stamps( self, job_ids: Iterable[int] ) -> dict[int, dict[str, datetime]]: - """Get TimeStamps for job MajorState transitions for multiple jobs at once + """Get TimeStamps for job MajorState transitions for multiple jobs at once. + return a {JobID: {State:timestamp}} dictionary. """ result: defaultdict[int, dict[str, datetime]] = defaultdict(dict) diff --git a/diracx-db/src/diracx/db/sql/job_logging/schema.py b/diracx-db/src/diracx/db/sql/job_logging/schema.py index 2366448f2..df4ba7e8f 100644 --- a/diracx-db/src/diracx/db/sql/job_logging/schema.py +++ b/diracx-db/src/diracx/db/sql/job_logging/schema.py @@ -11,8 +11,9 @@ class MagicEpochDateTime(TypeDecorator): - """A SQLAlchemy type that stores a datetime as a numeric value representing the - seconds elapsed since MAGIC_EPOC_NUMBER. The underlying column is defined as + """A SQLAlchemy type to store a datetime as a numeric value. + + Representing the seconds elapsed since MAGIC_EPOC_NUMBER. The underlying column is defined as Numeric(12,3) which provides a fixed-precision representation. """ @@ -23,6 +24,7 @@ class MagicEpochDateTime(TypeDecorator): def process_bind_param(self, value, dialect): """Convert a Python datetime to a numeric value: (timestamp - MAGIC_EPOC_NUMBER). + The result is rounded to three decimal places. """ if value is None: @@ -39,8 +41,9 @@ def process_bind_param(self, value, dialect): ) def process_result_value(self, value, dialect): - """Convert the numeric database value back into a Python datetime by reversing the - stored difference (adding MAGIC_EPOC_NUMBER). + """Convert the numeric database value back into a Python datetime. + + Reversing the stored difference (adding MAGIC_EPOC_NUMBER). """ if value is None: return None diff --git a/diracx-db/src/diracx/db/sql/sandbox_metadata/db.py b/diracx-db/src/diracx/db/sql/sandbox_metadata/db.py index 0b7425e8e..ac33c25f8 100644 --- a/diracx-db/src/diracx/db/sql/sandbox_metadata/db.py +++ b/diracx-db/src/diracx/db/sql/sandbox_metadata/db.py @@ -121,7 +121,7 @@ async def update_sandbox_last_access_time(self, se_name: str, pfn: str) -> None: ) async def sandbox_is_assigned(self, pfn: str, se_name: str) -> bool | None: - """Checks if a sandbox exists and has been assigned.""" + """Check if a sandbox exists and has been assigned.""" stmt: Executable = select(SandBoxes.Assigned).where( SandBoxes.SEName == se_name, SandBoxes.SEPFN == pfn ) diff --git a/diracx-db/src/diracx/db/sql/utils/base.py b/diracx-db/src/diracx/db/sql/utils/base.py index efb30d642..c66b0a054 100644 --- a/diracx-db/src/diracx/db/sql/utils/base.py +++ b/diracx-db/src/diracx/db/sql/utils/base.py @@ -42,7 +42,7 @@ class SQLDBUnavailableError(DBUnavailableError, SQLDBError): class BaseSQLDB(metaclass=ABCMeta): - """This should be the base class of all the SQL DiracX DBs. + """The base class of all the SQL DiracX DBs. The details covered here should be handled automatically by the service and task machinery of DiracX and this documentation exists for informational @@ -218,7 +218,7 @@ async def __aenter__(self) -> Self: return self async def __aexit__(self, exc_type, exc, tb): - """This is called when exiting a route. + """Exit a route. If there was no exception, the changes in the DB are committed. Otherwise, they are rolled back. diff --git a/diracx-db/tests/auth/test_refresh_token.py b/diracx-db/tests/auth/test_refresh_token.py index 52b23650d..28d6dfe9d 100644 --- a/diracx-db/tests/auth/test_refresh_token.py +++ b/diracx-db/tests/auth/test_refresh_token.py @@ -75,8 +75,9 @@ async def test_get(auth_db: AuthDB): async def test_get_user_refresh_tokens(auth_db: AuthDB): - """Insert refresh tokens belonging to different users in the DB and - get the refresh tokens of each user. + """Insert refresh tokens belonging to different users in the DB. + + Get the refresh tokens of each user. """ # Two users sub1 = "subject1" @@ -174,8 +175,9 @@ async def test_revoke_user_refresh_tokens(auth_db: AuthDB): async def test_revoke_and_get_user_refresh_tokens(auth_db: AuthDB): - """Insert refresh tokens belonging to a user, revoke one of them and - make sure that only the active tokens appear. + """Insert refresh tokens belonging to a user. + + Revoke one of them and make sure that only the active tokens appear. """ # User sub = "subject" @@ -222,8 +224,9 @@ async def test_revoke_and_get_user_refresh_tokens(auth_db: AuthDB): async def test_get_refresh_tokens(auth_db: AuthDB): - """Insert refresh tokens belonging to different users in the DB and - get the refresh tokens. + """Insert refresh tokens belonging to different users in the DB. + + Get the refresh tokens. """ # Two users sub1 = "subject1" diff --git a/diracx-db/tests/opensearch/test_connection.py b/diracx-db/tests/opensearch/test_connection.py index 1e61760fc..cf281913d 100644 --- a/diracx-db/tests/opensearch/test_connection.py +++ b/diracx-db/tests/opensearch/test_connection.py @@ -7,7 +7,7 @@ async def _ensure_db_unavailable(db: DummyOSDB): - """Helper function which raises an exception if we manage to connect to the DB.""" + """Raise an exception if we manage to connect to the DB.""" async with db.client_context(): async with db: with pytest.raises(OpenSearchDBUnavailableError): diff --git a/diracx-db/tests/opensearch/test_search.py b/diracx-db/tests/opensearch/test_search.py index 93998ac3e..f49da63d8 100644 --- a/diracx-db/tests/opensearch/test_search.py +++ b/diracx-db/tests/opensearch/test_search.py @@ -40,7 +40,7 @@ @contextlib.asynccontextmanager async def resolve_fixtures_hack(request, name): - """Resolves a fixture from `diracx.testing.osdb`. + """Resolve a fixture from `diracx.testing.osdb`. This is a hack to work around pytest-asyncio not supporting the use of request.getfixturevalue() from within an async function. diff --git a/diracx-logic/src/diracx/logic/auth/device_flow.py b/diracx-logic/src/diracx/logic/auth/device_flow.py index 75cbea09a..39be694da 100644 --- a/diracx-logic/src/diracx/logic/auth/device_flow.py +++ b/diracx-logic/src/diracx/logic/auth/device_flow.py @@ -50,7 +50,7 @@ async def do_device_flow( available_properties: set[SecurityProperty], settings: AuthSettings, ) -> str: - """This is called as the verification URI for the device flow.""" + """Verify URI for the device flow.""" # Here we make sure the user_code actually exists scope = await auth_db.device_flow_validate_user_code( user_code, settings.device_flow_expiration_seconds @@ -82,9 +82,7 @@ async def finish_device_flow( config: Config, settings: AuthSettings, ): - """This the url callbacked by IAM/CheckIn after the authorization - flow was granted. - """ + """Check that the url callbacked by IAM/CheckIn after the authorization flow was granted.""" decrypted_state = decrypt_state(state, settings.state_key.fernet) assert decrypted_state["grant_type"] == GrantType.device_code diff --git a/diracx-logic/src/diracx/logic/auth/management.py b/diracx-logic/src/diracx/logic/auth/management.py index 5a02bd27f..b86cb1ee7 100644 --- a/diracx-logic/src/diracx/logic/auth/management.py +++ b/diracx-logic/src/diracx/logic/auth/management.py @@ -1,4 +1,4 @@ -"""This module contains the auth management functions.""" +"""Module containing the auth management functions.""" from __future__ import annotations @@ -15,8 +15,9 @@ async def get_refresh_tokens( auth_db: AuthDB, subject: str | None, ) -> list: - """Get all refresh tokens bound to a given subject. If there is no subject, then - all the refresh tokens are retrieved. + """Get all refresh tokens bound to a given subject. + + If there is no subject, then all the refresh tokens are retrieved. """ return await auth_db.get_user_refresh_tokens(subject) diff --git a/diracx-logic/src/diracx/logic/auth/token.py b/diracx-logic/src/diracx/logic/auth/token.py index e1db7eb09..62dba8c7c 100644 --- a/diracx-logic/src/diracx/logic/auth/token.py +++ b/diracx-logic/src/diracx/logic/auth/token.py @@ -282,7 +282,7 @@ async def exchange_token( legacy_exchange: bool = False, include_refresh_token: bool = True, ) -> tuple[AccessTokenPayload, RefreshTokenPayload | None]: - """Method called to exchange the OIDC token for a DIRAC generated access token.""" + """Exchange the OIDC token for a DIRAC generated access token.""" # Extract dirac attributes from the OIDC scope parsed_scope = parse_and_validate_scope(scope, config, available_properties) vo = parsed_scope["vo"] diff --git a/diracx-logic/src/diracx/logic/auth/utils.py b/diracx-logic/src/diracx/logic/auth/utils.py index 184fc9f6a..a240eabce 100644 --- a/diracx-logic/src/diracx/logic/auth/utils.py +++ b/diracx-logic/src/diracx/logic/auth/utils.py @@ -209,7 +209,8 @@ async def verify_dirac_refresh_token( refresh_token: str, settings: AuthSettings, ) -> tuple[UUID, float, bool]: - """Verify dirac user token and return a UserInfo class + """Verify dirac user token and return a UserInfo class. + Used for each API endpoint. """ claims = read_token( @@ -235,7 +236,8 @@ def get_allowed_user_properties(config: Config, sub, vo: str) -> set[SecurityPro def parse_and_validate_scope( scope: str, config: Config, available_properties: set[SecurityProperty] ) -> ScopeInfoDict: - """Check: + """Check. + * At most one VO * At most one group * group belongs to VO diff --git a/diracx-logic/src/diracx/logic/jobs/sandboxes.py b/diracx-logic/src/diracx/logic/jobs/sandboxes.py index 43856b849..1ac68ad24 100644 --- a/diracx-logic/src/diracx/logic/jobs/sandboxes.py +++ b/diracx-logic/src/diracx/logic/jobs/sandboxes.py @@ -229,7 +229,7 @@ async def delete_batch_and_log( objects: list[S3Object], semaphore: asyncio.Semaphore, ) -> None: - """Helper function to delete a batch of objects and log the result.""" + """Delete a batch of objects and log the result.""" async with semaphore: await s3_bulk_delete_with_retry( settings.s3_client, settings.bucket_name, objects diff --git a/diracx-logic/src/diracx/logic/jobs/status.py b/diracx-logic/src/diracx/logic/jobs/status.py index a6cf73c3b..a11ffd7c4 100644 --- a/diracx-logic/src/diracx/logic/jobs/status.py +++ b/diracx-logic/src/diracx/logic/jobs/status.py @@ -107,6 +107,7 @@ async def set_job_statuses( additional_attributes: dict[int, dict[str, str]] = {}, ) -> SetJobStatusReturn: """Set various status fields for job specified by its jobId. + Set only the last status in the JobDB, updating all the status logging information in the JobLoggingDB. The status dict has datetime as a key and status information dictionary as values. diff --git a/diracx-logic/src/diracx/logic/jobs/utils.py b/diracx-logic/src/diracx/logic/jobs/utils.py index f067dc122..78d2f0ee0 100644 --- a/diracx-logic/src/diracx/logic/jobs/utils.py +++ b/diracx-logic/src/diracx/logic/jobs/utils.py @@ -56,7 +56,8 @@ async def check_and_prepare_job( job_db: JobDB, config: Config, ): - """Check Consistency of Submitted JDL and set some defaults + """Check Consistency of Submitted JDL and set some defaults. + Prepare subJDL with Job Requirements. """ # Create configuration dict for DIRACCommon function from diracx config diff --git a/diracx-logic/src/diracx/logic/task_queues/priority.py b/diracx-logic/src/diracx/logic/task_queues/priority.py index 522c75697..11500a5be 100644 --- a/diracx-logic/src/diracx/logic/task_queues/priority.py +++ b/diracx-logic/src/diracx/logic/task_queues/priority.py @@ -107,7 +107,7 @@ async def calculate_priority( """ def is_background(tq_priority: float, allow_bg_tqs: bool) -> bool: - """A TQ is background if its priority is below a threshold and background TQs are allowed.""" + """Determine if a TQ is background based on its priority and allowed background TQs.""" return tq_priority <= 0.1 and allow_bg_tqs # Calculate Sum of priorities of non background TQs diff --git a/diracx-logic/tests/jobs/test_status.py b/diracx-logic/tests/jobs/test_status.py index 371a0e8e7..4dd0cc04f 100644 --- a/diracx-logic/tests/jobs/test_status.py +++ b/diracx-logic/tests/jobs/test_status.py @@ -105,7 +105,8 @@ async def valid_job_id(job_db: JobDB) -> int: async def test_patch_metadata_updates_attributes_and_parameters( job_db: JobDB, job_parameters_db: _MockJobParametersDB, valid_job_id: int ): - """Patch metadata mixing: + """Patch metadata mixing. + - Attribute only (UserPriority) - Attribute + parameter (JobType) - Parameter only (CPUNormalizationFactor) diff --git a/diracx-routers/src/diracx/routers/access_policies.py b/diracx-routers/src/diracx/routers/access_policies.py index 6aeb977d9..79f71b4dc 100644 --- a/diracx-routers/src/diracx/routers/access_policies.py +++ b/diracx-routers/src/diracx/routers/access_policies.py @@ -51,12 +51,12 @@ class BaseAccessPolicy(metaclass=ABCMeta): @classmethod def check(cls) -> Self: - """Placeholder which is in the dependency override.""" + """Provide a placeholder in the dependency override.""" raise NotImplementedError("This should never be called") @classmethod def all_used_access_policies(cls) -> dict[str, "BaseAccessPolicy"]: - """Returns the list of classes that are actually called. + """Return the list of classes that are actually called. This should be overridden by the dependency_override. """ @@ -80,7 +80,8 @@ def available_implementations(cls, access_policy_name: str): @staticmethod @abstractmethod async def policy(policy_name: str, user_info: AuthorizedUserInfo, /): - """This is the method to be implemented in child classes. + """Implement the method in child classes. + It should always take an AuthorizedUserInfo parameter, which is passed by check_permissions. The rest is whatever the policy actually needs. There are rules to write it: @@ -95,8 +96,9 @@ async def policy(policy_name: str, user_info: AuthorizedUserInfo, /): def enrich_tokens( access_payload: AccessTokenPayload, refresh_payload: RefreshTokenPayload | None ) -> tuple[dict, dict]: - """This method is called when issuing a token, and can add whatever - content it wants inside the access or refresh payload. + """Add content to access or refresh payload when issuing a token. + + Content can be whatever is desired inside the access or refresh payload. :param access_payload: access token payload :param refresh_payload: refresh token payload @@ -111,8 +113,8 @@ def check_permissions( user_info: Annotated[AuthorizedUserInfo, Depends(verify_dirac_access_token)], dev_settings: DevelopmentSettings, ): - """This wrapper just calls the actual implementation, but also makes sure - that the policy has been called. + """Call the actual policy implementation and ensure it has been invoked. + If not, diracx will abruptly crash. It is violent, but necessary to make sure that it gets noticed :-). @@ -123,7 +125,7 @@ def check_permissions( @functools.wraps(policy) async def wrapped_policy(**kwargs): - """This wrapper is just to update the has_been_called flag.""" + """Update the has_been_called flag.""" nonlocal has_been_called has_been_called = True return await policy(policy_name, user_info, **kwargs) @@ -149,8 +151,8 @@ async def wrapped_policy(**kwargs): def open_access(f): - """Decorator to put around the route that are part of a DiracxRouter - that are expected not to do any access policy check. + """Decorate routes that are part of a DiracxRouter and do not require access policy checks. + The presence of a token will still be checked if the router has require_auth to True. This is useful to allow the CI to detect routes which may have forgotten to have an access check. diff --git a/diracx-routers/src/diracx/routers/auth/authorize_code_flow.py b/diracx-routers/src/diracx/routers/auth/authorize_code_flow.py index d88b2c739..5344a9d35 100644 --- a/diracx-routers/src/diracx/routers/auth/authorize_code_flow.py +++ b/diracx-routers/src/diracx/routers/auth/authorize_code_flow.py @@ -49,6 +49,7 @@ async def initiate_authorization_flow( settings: AuthSettings, ) -> responses.RedirectResponse: """Initiate the authorization flow. + It will redirect to the actual OpenID server (IAM, CheckIn) to perform a authorization code flow. diff --git a/diracx-routers/src/diracx/routers/auth/device_flow.py b/diracx-routers/src/diracx/routers/auth/device_flow.py index b00a352c0..6d8257b5f 100644 --- a/diracx-routers/src/diracx/routers/auth/device_flow.py +++ b/diracx-routers/src/diracx/routers/auth/device_flow.py @@ -89,7 +89,8 @@ async def do_device_flow( available_properties: AvailableSecurityProperties, settings: AuthSettings, ) -> RedirectResponse: - """This is called as the verification URI for the device flow. + """Serve as the verification URI for the device flow. + It will redirect to the actual OpenID server (IAM, CheckIn) to perform a authorization code flow. @@ -118,8 +119,8 @@ async def finish_device_flow( config: Config, settings: AuthSettings, ) -> RedirectResponse: - """This the url callbacked by IAM/CheckIn after the authorization - flow was granted. + """Handle the URL callbacked by IAM/CheckIn after authorization flow. + It gets us the code we need for the authorization flow, and we can map it to the corresponding device flow using the user_code in the cookie/session. @@ -151,7 +152,7 @@ async def finish_device_flow( @router.get("/device/complete/finished") def finished(response: Response): - """This is the final step of the device flow.""" + """Mark the final step of the device flow.""" response.body = b"