From d3e7ae752bfb9fa1f20a69eadfd8711adc518628 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Wed, 9 Jul 2025 04:49:28 +0300 Subject: [PATCH 01/30] Skipping _db prefix when using /_open/auth (#57) --- arangoasync/connection.py | 12 +++++++++--- tests/test_connection.py | 6 ++++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/arangoasync/connection.py b/arangoasync/connection.py index f404248..21fa756 100644 --- a/arangoasync/connection.py +++ b/arangoasync/connection.py @@ -160,11 +160,16 @@ def compress_request(self, request: Request) -> bool: return result - async def process_request(self, request: Request) -> Response: + async def process_request( + self, + request: Request, + skip_db_prefix: bool = False, + ) -> Response: """Process request, potentially trying multiple hosts. Args: request (Request): Request object. + skip_db_prefix (bool): If `True`, do not prepend the database endpoint. Returns: Response: Response object. @@ -173,7 +178,8 @@ async def process_request(self, request: Request) -> Response: ConnectionAbortedError: If it can't connect to host(s) within limit. """ - request.endpoint = f"{self._db_endpoint}{request.endpoint}" + if not skip_db_prefix: + request.endpoint = f"{self._db_endpoint}{request.endpoint}" host_index = self._host_resolver.get_host_index() for tries in range(self._host_resolver.max_tries): try: @@ -376,7 +382,7 @@ async def refresh_token(self) -> None: ) try: - resp = await self.process_request(request) + resp = await self.process_request(request, skip_db_prefix=True) except ClientConnectionAbortedError as e: raise JWTRefreshError(str(e)) from e except ServerConnectionError as e: diff --git a/tests/test_connection.py b/tests/test_connection.py index 568815c..e053e58 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -223,6 +223,12 @@ async def test_JwtConnection_ping_success( status_code = await connection1.ping() assert status_code == 200 + # Refresh the token + await connection3.refresh_token() + status_code = await connection1.ping() + assert status_code == 200 + assert connection3.token != connection1.token + @pytest.mark.asyncio async def test_JwtSuperuserConnection_ping_success( From 52493cb1a500ab340a720286f2b7b7bb547191e1 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Mon, 14 Jul 2025 10:15:27 +0300 Subject: [PATCH 02/30] Refactored request to skip db prefix (#58) --- arangoasync/connection.py | 7 +++---- arangoasync/database.py | 4 +++- arangoasync/request.py | 5 +++++ 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/arangoasync/connection.py b/arangoasync/connection.py index 21fa756..5fa6363 100644 --- a/arangoasync/connection.py +++ b/arangoasync/connection.py @@ -163,13 +163,11 @@ def compress_request(self, request: Request) -> bool: async def process_request( self, request: Request, - skip_db_prefix: bool = False, ) -> Response: """Process request, potentially trying multiple hosts. Args: request (Request): Request object. - skip_db_prefix (bool): If `True`, do not prepend the database endpoint. Returns: Response: Response object. @@ -178,7 +176,7 @@ async def process_request( ConnectionAbortedError: If it can't connect to host(s) within limit. """ - if not skip_db_prefix: + if request.prefix_needed: request.endpoint = f"{self._db_endpoint}{request.endpoint}" host_index = self._host_resolver.get_host_index() for tries in range(self._host_resolver.max_tries): @@ -379,10 +377,11 @@ async def refresh_token(self) -> None: method=Method.POST, endpoint="/_open/auth", data=auth.encode("utf-8"), + prefix_needed=False, ) try: - resp = await self.process_request(request, skip_db_prefix=True) + resp = await self.process_request(request) except ClientConnectionAbortedError as e: raise JWTRefreshError(str(e)) from e except ServerConnectionError as e: diff --git a/arangoasync/database.py b/arangoasync/database.py index dbcc319..c188290 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -2022,7 +2022,9 @@ async def reload_jwt_secrets(self) -> Result[Json]: References: - `hot-reload-the-jwt-secrets-from-disk `__ """ # noqa: 501 - request = Request(method=Method.POST, endpoint="/_admin/server/jwt") + request = Request( + method=Method.POST, endpoint="/_admin/server/jwt", prefix_needed=False + ) def response_handler(resp: Response) -> Json: if not resp.is_success: diff --git a/arangoasync/request.py b/arangoasync/request.py index 951c9e9..6bd629d 100644 --- a/arangoasync/request.py +++ b/arangoasync/request.py @@ -33,6 +33,7 @@ class Request: params (dict | None): URL parameters. data (bytes | None): Request payload. auth (Auth | None): Authentication. + prefix_needed (bool): Whether the request needs a prefix (e.g., database name). Attributes: method (Method): HTTP method. @@ -41,6 +42,7 @@ class Request: params (dict | None): URL parameters. data (bytes | None): Request payload. auth (Auth | None): Authentication. + prefix_needed (bool): Whether the request needs a prefix (e.g., database name). """ __slots__ = ( @@ -50,6 +52,7 @@ class Request: "params", "data", "auth", + "prefix_needed", ) def __init__( @@ -60,6 +63,7 @@ def __init__( params: Optional[Params] = None, data: Optional[bytes | str] = None, auth: Optional[Auth] = None, + prefix_needed: bool = True, ) -> None: self.method: Method = method self.endpoint: str = endpoint @@ -67,6 +71,7 @@ def __init__( self.params: Params = params or dict() self.data: Optional[bytes | str] = data self.auth: Optional[Auth] = auth + self.prefix_needed = prefix_needed def normalized_headers(self) -> RequestHeaders: """Normalize request headers. From e014bf850b91f10c4b09b092dcdeced6c871b0f8 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Mon, 28 Jul 2025 19:45:41 +0200 Subject: [PATCH 03/30] Collection API completeness (#59) * Adding /figures support * Adding support for /responsibleShard * Adding support for /shards * Adding support for /revision * Adding support for /checksum * Added support for /key-generators * Skipping part of test in 3.11 * Adding configure method * Adding renaming method * recalculate-the-document-count-of-a-collection * compact-a-collection --- arangoasync/collection.py | 322 +++++++++++++++++++++++++++++++++++++- arangoasync/database.py | 24 +++ arangoasync/exceptions.py | 48 +++++- arangoasync/typings.py | 142 ++++++++++++++++- tests/test_collection.py | 83 +++++++++- tests/test_database.py | 15 +- tests/test_typings.py | 60 +++++++ 7 files changed, 677 insertions(+), 17 deletions(-) diff --git a/arangoasync/collection.py b/arangoasync/collection.py index 810ee06..e3d12ee 100644 --- a/arangoasync/collection.py +++ b/arangoasync/collection.py @@ -16,7 +16,16 @@ HTTP_PRECONDITION_FAILED, ) from arangoasync.exceptions import ( + CollectionChecksumError, + CollectionCompactError, + CollectionConfigureError, CollectionPropertiesError, + CollectionRecalculateCountError, + CollectionRenameError, + CollectionResponsibleShardError, + CollectionRevisionError, + CollectionShardsError, + CollectionStatisticsError, CollectionTruncateError, DocumentCountError, DocumentDeleteError, @@ -40,7 +49,9 @@ from arangoasync.result import Result from arangoasync.serialization import Deserializer, Serializer from arangoasync.typings import ( + CollectionInfo, CollectionProperties, + CollectionStatistics, IndexProperties, Json, Jsons, @@ -481,6 +492,26 @@ def response_handler(resp: Response) -> bool: return await self._executor.execute(request, response_handler) + async def recalculate_count(self) -> None: + """Recalculate the document count. + + Raises: + CollectionRecalculateCountError: If re-calculation fails. + + References: + - `recalculate-the-document-count-of-a-collection `__ + """ # noqa: E501 + request = Request( + method=Method.PUT, + endpoint=f"/_api/collection/{self.name}/recalculateCount", + ) + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise CollectionRecalculateCountError(resp, request) + + await self._executor.execute(request, response_handler) + async def properties(self) -> Result[CollectionProperties]: """Return the full properties of the current collection. @@ -501,7 +532,129 @@ async def properties(self) -> Result[CollectionProperties]: def response_handler(resp: Response) -> CollectionProperties: if not resp.is_success: raise CollectionPropertiesError(resp, request) - return CollectionProperties(self._executor.deserialize(resp.raw_body)) + return CollectionProperties(self.deserializer.loads(resp.raw_body)) + + return await self._executor.execute(request, response_handler) + + async def configure( + self, + cache_enabled: Optional[bool] = None, + computed_values: Optional[Jsons] = None, + replication_factor: Optional[int | str] = None, + schema: Optional[Json] = None, + wait_for_sync: Optional[bool] = None, + write_concern: Optional[int] = None, + ) -> Result[CollectionProperties]: + """Changes the properties of a collection. + + Only the provided attributes are updated. + + Args: + cache_enabled (bool | None): Whether the in-memory hash cache + for documents should be enabled for this collection. + computed_values (list | None): An optional list of objects, each + representing a computed value. + replication_factor (int | None): In a cluster, this attribute determines + how many copies of each shard are kept on different DB-Servers. + For SatelliteCollections, it needs to be the string "satellite". + schema (dict | None): The configuration of the collection-level schema + validation for documents. + wait_for_sync (bool | None): If set to `True`, the data is synchronized + to disk before returning from a document create, update, replace or + removal operation. + write_concern (int | None): Determines how many copies of each shard are + required to be in sync on the different DB-Servers. + + Returns: + CollectionProperties: Properties. + + Raises: + CollectionConfigureError: If configuration fails. + + References: + - `change-the-properties-of-a-collection `__ + """ # noqa: E501 + data: Json = {} + if cache_enabled is not None: + data["cacheEnabled"] = cache_enabled + if computed_values is not None: + data["computedValues"] = computed_values + if replication_factor is not None: + data["replicationFactor"] = replication_factor + if schema is not None: + data["schema"] = schema + if wait_for_sync is not None: + data["waitForSync"] = wait_for_sync + if write_concern is not None: + data["writeConcern"] = write_concern + request = Request( + method=Method.PUT, + endpoint=f"/_api/collection/{self.name}/properties", + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> CollectionProperties: + if not resp.is_success: + raise CollectionConfigureError(resp, request) + return CollectionProperties(self.deserializer.loads(resp.raw_body)) + + return await self._executor.execute(request, response_handler) + + async def rename(self, new_name: str) -> None: + """Rename the collection. + + Renames may not be reflected immediately in async execution, batch + execution or transactions. It is recommended to initialize new API + wrappers after a rename. + + Note: + Renaming collections is not supported in cluster deployments. + + Args: + new_name (str): New collection name. + + Raises: + CollectionRenameError: If rename fails. + + References: + - `rename-a-collection `__ + """ # noqa: E501 + data: Json = {"name": new_name} + request = Request( + method=Method.PUT, + endpoint=f"/_api/collection/{self.name}/rename", + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise CollectionRenameError(resp, request) + self._name = new_name + self._id_prefix = f"{new_name}/" + + await self._executor.execute(request, response_handler) + + async def compact(self) -> Result[CollectionInfo]: + """Compact a collection. + + Returns: + CollectionInfo: Collection information. + + Raises: + CollectionCompactError: If compaction fails. + + References: + - `compact-a-collection `__ + """ # noqa: E501 + request = Request( + method=Method.PUT, + endpoint=f"/_api/collection/{self.name}/compact", + ) + + def response_handler(resp: Response) -> CollectionInfo: + if not resp.is_success: + raise CollectionCompactError(resp, request) + return CollectionInfo(self.deserializer.loads(resp.raw_body)) return await self._executor.execute(request, response_handler) @@ -552,7 +705,10 @@ async def count(self) -> Result[int]: Raises: DocumentCountError: If retrieval fails. - """ + + References: + - `get-the-document-count-of-a-collection `__ + """ # noqa: E501 request = Request( method=Method.GET, endpoint=f"/_api/collection/{self.name}/count" ) @@ -565,6 +721,158 @@ def response_handler(resp: Response) -> int: return await self._executor.execute(request, response_handler) + async def statistics(self) -> Result[CollectionStatistics]: + """Get additional statistical information about the collection. + + Returns: + CollectionStatistics: Collection statistics. + + Raises: + CollectionStatisticsError: If retrieval fails. + + References: + - `get-the-collection-statistics `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint=f"/_api/collection/{self.name}/figures", + ) + + def response_handler(resp: Response) -> CollectionStatistics: + if not resp.is_success: + raise CollectionStatisticsError(resp, request) + return CollectionStatistics(self.deserializer.loads(resp.raw_body)) + + return await self._executor.execute(request, response_handler) + + async def responsible_shard(self, document: Json) -> Result[str]: + """Return the ID of the shard responsible for given document. + + If the document does not exist, return the shard that would be + responsible. + + Args: + document (dict): Document body with "_key" field. + + Returns: + str: Shard ID. + + Raises: + CollectionResponsibleShardError: If retrieval fails. + + References: + - `get-the-responsible-shard-for-a-document `__ + """ # noqa: E501 + request = Request( + method=Method.PUT, + endpoint=f"/_api/collection/{self.name}/responsibleShard", + data=self.serializer.dumps(document), + ) + + def response_handler(resp: Response) -> str: + if resp.is_success: + body = self.deserializer.loads(resp.raw_body) + return cast(str, body["shardId"]) + raise CollectionResponsibleShardError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def shards(self, details: Optional[bool] = None) -> Result[Json]: + """Return collection shards and properties. + + Available only in a cluster setup. + + Args: + details (bool | None): If set to `True`, include responsible + servers for these shards. + + Returns: + dict: Collection shards. + + Raises: + CollectionShardsError: If retrieval fails. + + References: + - `get-the-shard-ids-of-a-collection `__ + """ # noqa: E501 + params: Params = {} + if details is not None: + params["details"] = details + + request = Request( + method=Method.GET, + endpoint=f"/_api/collection/{self.name}/shards", + params=params, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise CollectionShardsError(resp, request) + return cast(Json, self.deserializer.loads(resp.raw_body)["shards"]) + + return await self._executor.execute(request, response_handler) + + async def revision(self) -> Result[str]: + """Return collection revision. + + Returns: + str: Collection revision. + + Raises: + CollectionRevisionError: If retrieval fails. + + References: + - `get-the-collection-revision-id `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint=f"/_api/collection/{self.name}/revision", + ) + + def response_handler(resp: Response) -> str: + if not resp.is_success: + raise CollectionRevisionError(resp, request) + return cast(str, self.deserializer.loads(resp.raw_body)["revision"]) + + return await self._executor.execute(request, response_handler) + + async def checksum( + self, with_rev: Optional[bool] = None, with_data: Optional[bool] = None + ) -> Result[str]: + """Calculate collection checksum. + + Args: + with_rev (bool | None): Include document revisions in checksum calculation. + with_data (bool | None): Include document data in checksum calculation. + + Returns: + str: Collection checksum. + + Raises: + CollectionChecksumError: If retrieval fails. + + References: + - `get-the-collection-checksum `__ + """ # noqa: E501 + params: Params = {} + if with_rev is not None: + params["withRevision"] = with_rev + if with_data is not None: + params["withData"] = with_data + + request = Request( + method=Method.GET, + endpoint=f"/_api/collection/{self.name}/checksum", + params=params, + ) + + def response_handler(resp: Response) -> str: + if not resp.is_success: + raise CollectionChecksumError(resp, request) + return cast(str, self.deserializer.loads(resp.raw_body)["checksum"]) + + return await self._executor.execute(request, response_handler) + async def has( self, document: str | Json, @@ -1444,9 +1752,9 @@ async def insert( def response_handler(resp: Response) -> bool | Json: if resp.is_success: - if silent is True: + if silent: return True - return self._executor.deserialize(resp.raw_body) + return self.deserializer.loads(resp.raw_body) msg: Optional[str] = None if resp.status_code == HTTP_BAD_PARAMETER: msg = ( @@ -1551,7 +1859,7 @@ def response_handler(resp: Response) -> bool | Json: if resp.is_success: if silent is True: return True - return self._executor.deserialize(resp.raw_body) + return self.deserializer.loads(resp.raw_body) msg: Optional[str] = None if resp.status_code == HTTP_PRECONDITION_FAILED: raise DocumentRevisionError(resp, request) @@ -1641,7 +1949,7 @@ def response_handler(resp: Response) -> bool | Json: if resp.is_success: if silent is True: return True - return self._executor.deserialize(resp.raw_body) + return self.deserializer.loads(resp.raw_body) msg: Optional[str] = None if resp.status_code == HTTP_PRECONDITION_FAILED: raise DocumentRevisionError(resp, request) @@ -1726,7 +2034,7 @@ def response_handler(resp: Response) -> bool | Json: if resp.is_success: if silent is True: return True - return self._executor.deserialize(resp.raw_body) + return self.deserializer.loads(resp.raw_body) msg: Optional[str] = None if resp.status_code == HTTP_PRECONDITION_FAILED: raise DocumentRevisionError(resp, request) diff --git a/arangoasync/database.py b/arangoasync/database.py index c188290..578222f 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -22,6 +22,7 @@ AsyncJobListError, CollectionCreateError, CollectionDeleteError, + CollectionKeyGeneratorsError, CollectionListError, DatabaseCreateError, DatabaseDeleteError, @@ -695,6 +696,29 @@ def response_handler(resp: Response) -> bool: return await self._executor.execute(request, response_handler) + async def key_generators(self) -> Result[List[str]]: + """Returns the available key generators for collections. + + Returns: + list: List of available key generators. + + Raises: + CollectionKeyGeneratorsError: If retrieval fails. + + References: + - `get-the-available-key-generators `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint="/_api/key-generators") + + def response_handler(resp: Response) -> List[str]: + if not resp.is_success: + raise CollectionKeyGeneratorsError(resp, request) + return cast( + List[str], self.deserializer.loads(resp.raw_body)["keyGenerators"] + ) + + return await self._executor.execute(request, response_handler) + async def has_document( self, document: str | Json, diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index e052fd4..5de6ea4 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -183,10 +183,26 @@ class CollectionCreateError(ArangoServerError): """Failed to create collection.""" +class CollectionChecksumError(ArangoServerError): + """Failed to retrieve collection checksum.""" + + +class CollectionConfigureError(ArangoServerError): + """Failed to configure collection properties.""" + + +class CollectionCompactError(ArangoServerError): + """Failed to compact collection.""" + + class CollectionDeleteError(ArangoServerError): """Failed to delete collection.""" +class CollectionKeyGeneratorsError(ArangoServerError): + """Failed to retrieve key generators.""" + + class CollectionListError(ArangoServerError): """Failed to retrieve collections.""" @@ -195,18 +211,42 @@ class CollectionPropertiesError(ArangoServerError): """Failed to retrieve collection properties.""" -class ClientConnectionAbortedError(ArangoClientError): - """The connection was aborted.""" +class CollectionRecalculateCountError(ArangoServerError): + """Failed to recalculate document count.""" -class ClientConnectionError(ArangoClientError): - """The request was unable to reach the server.""" +class CollectionRenameError(ArangoServerError): + """Failed to rename collection.""" + + +class CollectionResponsibleShardError(ArangoServerError): + """Failed to retrieve responsible shard.""" + + +class CollectionRevisionError(ArangoServerError): + """Failed to retrieve collection revision.""" + + +class CollectionShardsError(ArangoServerError): + """Failed to retrieve collection shards.""" + + +class CollectionStatisticsError(ArangoServerError): + """Failed to retrieve collection statistics.""" class CollectionTruncateError(ArangoServerError): """Failed to truncate collection.""" +class ClientConnectionAbortedError(ArangoClientError): + """The connection was aborted.""" + + +class ClientConnectionError(ArangoClientError): + """The request was unable to reach the server.""" + + class CursorCloseError(ArangoServerError): """Failed to delete the cursor result from server.""" diff --git a/arangoasync/typings.py b/arangoasync/typings.py index 280e27e..d49411d 100644 --- a/arangoasync/typings.py +++ b/arangoasync/typings.py @@ -791,8 +791,6 @@ def compatibility_formatter(data: Json) -> Json: result["deleted"] = data["deleted"] if "syncByRevision" in data: result["sync_by_revision"] = data["syncByRevision"] - if "tempObjectId" in data: - result["temp_object_id"] = data["tempObjectId"] if "usesRevisionsAsDocumentIds" in data: result["rev_as_id"] = data["usesRevisionsAsDocumentIds"] if "isDisjoint" in data: @@ -819,6 +817,146 @@ def format(self, formatter: Optional[Formatter] = None) -> Json: return self.compatibility_formatter(self._data) +class CollectionStatistics(JsonWrapper): + """Statistical information about the collection. + + Example: + .. code-block:: json + + { + "figures" : { + "indexes" : { + "count" : 1, + "size" : 1234 + }, + "documentsSize" : 5601, + "cacheInUse" : false, + "cacheSize" : 0, + "cacheUsage" : 0, + "engine" : { + "documents" : 1, + "indexes" : [ + { + "type" : "primary", + "id" : 0, + "count" : 1 + } + ] + } + }, + "writeConcern" : 1, + "waitForSync" : false, + "usesRevisionsAsDocumentIds" : true, + "syncByRevision" : true, + "statusString" : "loaded", + "id" : "69123", + "isSmartChild" : false, + "schema" : null, + "name" : "products", + "type" : 2, + "status" : 3, + "count" : 1, + "cacheEnabled" : false, + "isSystem" : false, + "internalValidatorType" : 0, + "globallyUniqueId" : "hB7C02EE43DCE/69123", + "keyOptions" : { + "allowUserKeys" : true, + "type" : "traditional", + "lastValue" : 69129 + }, + "computedValues" : null, + "objectId" : "69124" + } + + References: + - `get-the-collection-statistics `__ + """ # noqa: E501 + + def __init__(self, data: Json) -> None: + super().__init__(data) + + @property + def figures(self) -> Json: + return cast(Json, self._data.get("figures")) + + @property + def write_concern(self) -> Optional[int]: + return self._data.get("writeConcern") + + @property + def wait_for_sync(self) -> Optional[bool]: + return self._data.get("waitForSync") + + @property + def use_revisions_as_document_ids(self) -> Optional[bool]: + return self._data.get("usesRevisionsAsDocumentIds") + + @property + def sync_by_revision(self) -> Optional[bool]: + return self._data.get("syncByRevision") + + @property + def status_string(self) -> Optional[str]: + return self._data.get("statusString") + + @property + def id(self) -> str: + return self._data["id"] # type: ignore[no-any-return] + + @property + def is_smart_child(self) -> bool: + return self._data["isSmartChild"] # type: ignore[no-any-return] + + @property + def schema(self) -> Optional[Json]: + return self._data.get("schema") + + @property + def name(self) -> str: + return self._data["name"] # type: ignore[no-any-return] + + @property + def type(self) -> CollectionType: + return CollectionType.from_int(self._data["type"]) + + @property + def status(self) -> CollectionStatus: + return CollectionStatus.from_int(self._data["status"]) + + @property + def count(self) -> int: + return self._data["count"] # type: ignore[no-any-return] + + @property + def cache_enabled(self) -> Optional[bool]: + return self._data.get("cacheEnabled") + + @property + def is_system(self) -> bool: + return self._data["isSystem"] # type: ignore[no-any-return] + + @property + def internal_validator_type(self) -> Optional[int]: + return self._data.get("internalValidatorType") + + @property + def globally_unique_id(self) -> str: + return self._data["globallyUniqueId"] # type: ignore[no-any-return] + + @property + def key_options(self) -> KeyOptions: + return KeyOptions(self._data["keyOptions"]) + + @property + def computed_values(self) -> Optional[Json]: + return self._data.get("computedValues") + + @property + def object_id(self) -> str: + return self._data["objectId"] # type: ignore[no-any-return] + + class IndexProperties(JsonWrapper): """Properties of an index. diff --git a/tests/test_collection.py b/tests/test_collection.py index d9214dd..fb8d7ba 100644 --- a/tests/test_collection.py +++ b/tests/test_collection.py @@ -4,7 +4,16 @@ from arangoasync.errno import DATA_SOURCE_NOT_FOUND, INDEX_NOT_FOUND from arangoasync.exceptions import ( + CollectionChecksumError, + CollectionCompactError, + CollectionConfigureError, CollectionPropertiesError, + CollectionRecalculateCountError, + CollectionRenameError, + CollectionResponsibleShardError, + CollectionRevisionError, + CollectionShardsError, + CollectionStatisticsError, CollectionTruncateError, DocumentCountError, IndexCreateError, @@ -13,6 +22,7 @@ IndexListError, IndexLoadError, ) +from tests.helpers import generate_col_name def test_collection_attributes(db, doc_col): @@ -22,7 +32,9 @@ def test_collection_attributes(db, doc_col): @pytest.mark.asyncio -async def test_collection_misc_methods(doc_col, bad_col): +async def test_collection_misc_methods(doc_col, bad_col, docs, cluster): + doc = await doc_col.insert(docs[0]) + # Properties properties = await doc_col.properties() assert properties.name == doc_col.name @@ -31,6 +43,75 @@ async def test_collection_misc_methods(doc_col, bad_col): with pytest.raises(CollectionPropertiesError): await bad_col.properties() + # Configure + wfs = not properties.wait_for_sync + new_properties = await doc_col.configure(wait_for_sync=wfs) + assert new_properties.wait_for_sync == wfs + with pytest.raises(CollectionConfigureError): + await bad_col.configure(wait_for_sync=wfs) + + # Statistics + statistics = await doc_col.statistics() + assert statistics.name == doc_col.name + assert "figures" in statistics + with pytest.raises(CollectionStatisticsError): + await bad_col.statistics() + + # Shards + if cluster: + shard = await doc_col.responsible_shard(doc) + assert isinstance(shard, str) + with pytest.raises(CollectionResponsibleShardError): + await bad_col.responsible_shard(doc) + shards = await doc_col.shards(details=True) + assert isinstance(shards, dict) + with pytest.raises(CollectionShardsError): + await bad_col.shards() + + # Revision + revision = await doc_col.revision() + assert isinstance(revision, str) + with pytest.raises(CollectionRevisionError): + await bad_col.revision() + + # Checksum + checksum = await doc_col.checksum(with_rev=True, with_data=True) + assert isinstance(checksum, str) + with pytest.raises(CollectionChecksumError): + await bad_col.checksum() + + # Recalculate count + with pytest.raises(CollectionRecalculateCountError): + await bad_col.recalculate_count() + await doc_col.recalculate_count() + + # Compact + with pytest.raises(CollectionCompactError): + await bad_col.compact() + res = await doc_col.compact() + assert res.name == doc_col.name + + +@pytest.mark.asyncio +async def test_collection_rename(cluster, db, bad_col, docs): + if cluster: + pytest.skip("Renaming collections is not supported in cluster deployments.") + + with pytest.raises(CollectionRenameError): + await bad_col.rename("new_name") + + col_name = generate_col_name() + new_name = generate_col_name() + try: + await db.create_collection(col_name) + col = db.collection(col_name) + await col.rename(new_name) + assert col.name == new_name + doc = await col.insert(docs[0]) + assert col.get_col_name(doc) == new_name + finally: + db.delete_collection(new_name, ignore_missing=True) + @pytest.mark.asyncio async def test_collection_index(doc_col, bad_col, cluster): diff --git a/tests/test_database.py b/tests/test_database.py index eb7daa3..7058ac1 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -1,11 +1,13 @@ import asyncio import pytest +from packaging import version from arangoasync.collection import StandardCollection from arangoasync.exceptions import ( CollectionCreateError, CollectionDeleteError, + CollectionKeyGeneratorsError, CollectionListError, DatabaseCreateError, DatabaseDeleteError, @@ -21,7 +23,7 @@ @pytest.mark.asyncio -async def test_database_misc_methods(sys_db, db, bad_db, cluster): +async def test_database_misc_methods(sys_db, db, bad_db, cluster, db_version): # Status status = await sys_db.status() assert status["server"] == "arango" @@ -50,11 +52,18 @@ async def test_database_misc_methods(sys_db, db, bad_db, cluster): await bad_db.reload_jwt_secrets() # Version - version = await sys_db.version() - assert version["version"].startswith("3.") + v = await sys_db.version() + assert v["version"].startswith("3.") with pytest.raises(ServerVersionError): await bad_db.version() + # key generators + if db_version >= version.parse("3.12.0"): + key_generators = await db.key_generators() + assert isinstance(key_generators, list) + with pytest.raises(CollectionKeyGeneratorsError): + await bad_db.key_generators() + @pytest.mark.asyncio async def test_create_drop_database( diff --git a/tests/test_typings.py b/tests/test_typings.py index fd04fa1..3b4e5e2 100644 --- a/tests/test_typings.py +++ b/tests/test_typings.py @@ -2,6 +2,7 @@ from arangoasync.typings import ( CollectionInfo, + CollectionStatistics, CollectionStatus, CollectionType, EdgeDefinitionOptions, @@ -386,3 +387,62 @@ def test_EdgeDefinitionOptions(): ) assert options.satellites == ["col1", "col2"] + + +def test_CollectionStatistics(): + data = { + "figures": { + "indexes": {"count": 1, "size": 1234}, + "documentsSize": 5601, + "cacheInUse": False, + "cacheSize": 0, + "cacheUsage": 0, + }, + "writeConcern": 1, + "waitForSync": False, + "usesRevisionsAsDocumentIds": True, + "syncByRevision": True, + "statusString": "loaded", + "id": "69123", + "isSmartChild": False, + "schema": None, + "name": "products", + "type": 2, + "status": 3, + "count": 1, + "cacheEnabled": False, + "isSystem": False, + "internalValidatorType": 0, + "globallyUniqueId": "hB7C02EE43DCE/69123", + "keyOptions": { + "allowUserKeys": True, + "type": "traditional", + "lastValue": 69129, + }, + "computedValues": None, + "objectId": "69124", + } + + stats = CollectionStatistics(data) + + assert stats.figures == data["figures"] + assert stats.write_concern == 1 + assert stats.wait_for_sync is False + assert stats.use_revisions_as_document_ids is True + assert stats.sync_by_revision is True + assert stats.status_string == "loaded" + assert stats.id == "69123" + assert stats.is_smart_child is False + assert stats.schema is None + assert stats.name == "products" + assert stats.type == CollectionType.DOCUMENT + assert stats.status == CollectionStatus.LOADED + assert stats.count == 1 + assert stats.cache_enabled is False + assert stats.is_system is False + assert stats.internal_validator_type == 0 + assert stats.globally_unique_id == "hB7C02EE43DCE/69123" + assert isinstance(stats.key_options, KeyOptions) + assert stats.key_options["type"] == "traditional" + assert stats.computed_values is None + assert stats.object_id == "69124" From 325c4e08268d22d91cf92793d0266f0edd9e1734 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Mon, 4 Aug 2025 13:53:36 +0800 Subject: [PATCH 04/30] Hot Backup API (#60) * Hot Backup API * Hot Backup docs * Hot Backup only tested in cluster * Hot Backup only tested for enterprise * Minimize backup tests --- arangoasync/backup.py | 295 ++++++++++++++++++++++++++++++++++++++ arangoasync/database.py | 10 ++ arangoasync/exceptions.py | 24 ++++ docs/backup.rst | 78 ++++++++++ docs/index.rst | 1 + docs/specs.rst | 3 + tests/test_backup.py | 57 ++++++++ 7 files changed, 468 insertions(+) create mode 100644 arangoasync/backup.py create mode 100644 docs/backup.rst create mode 100644 tests/test_backup.py diff --git a/arangoasync/backup.py b/arangoasync/backup.py new file mode 100644 index 0000000..75a26a6 --- /dev/null +++ b/arangoasync/backup.py @@ -0,0 +1,295 @@ +__all__ = ["Backup"] + +from numbers import Number +from typing import Optional, cast + +from arangoasync.exceptions import ( + BackupCreateError, + BackupDeleteError, + BackupDownloadError, + BackupGetError, + BackupRestoreError, + BackupUploadError, +) +from arangoasync.executor import ApiExecutor +from arangoasync.request import Method, Request +from arangoasync.response import Response +from arangoasync.result import Result +from arangoasync.serialization import Deserializer, Serializer +from arangoasync.typings import Json, Jsons + + +class Backup: + """Backup API wrapper.""" + + def __init__(self, executor: ApiExecutor) -> None: + self._executor = executor + + @property + def serializer(self) -> Serializer[Json]: + """Return the serializer.""" + return self._executor.serializer + + @property + def deserializer(self) -> Deserializer[Json, Jsons]: + """Return the deserializer.""" + return self._executor.deserializer + + async def get(self, backup_id: Optional[str] = None) -> Result[Json]: + """Return backup details. + + Args: + backup_id (str | None): If set, the returned list is restricted to the + backup with the given id. + + Returns: + dict: Backup details. + + Raises: + BackupGetError: If the operation fails. + + References: + - `list-backups `__ + """ # noqa: E501 + data: Json = {} + if backup_id is not None: + data["id"] = backup_id + + request = Request( + method=Method.POST, + endpoint="/_admin/backup/list", + data=self.serializer.dumps(data) if data else None, + prefix_needed=False, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise BackupGetError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return cast(Json, result["result"]) + + return await self._executor.execute(request, response_handler) + + async def create( + self, + label: Optional[str] = None, + allow_inconsistent: Optional[bool] = None, + force: Optional[bool] = None, + timeout: Optional[Number] = None, + ) -> Result[Json]: + """Create a backup when the global write lock can be obtained. + + Args: + label (str | None): Label for this backup. If not specified, a UUID is used. + allow_inconsistent (bool | None): Allow inconsistent backup when the global + transaction lock cannot be acquired before timeout. + force (bool | None): Forcefully abort all running transactions to ensure a + consistent backup when the global transaction lock cannot be + acquired before timeout. Default (and highly recommended) value + is `False`. + timeout (float | None): The time in seconds that the operation tries to + get a consistent snapshot. + + Returns: + dict: Backup information. + + Raises: + BackupCreateError: If the backup creation fails. + + References: + - `create-backup `__ + """ # noqa: E501 + data: Json = {} + if label is not None: + data["label"] = label + if allow_inconsistent is not None: + data["allowInconsistent"] = allow_inconsistent + if force is not None: + data["force"] = force + if timeout is not None: + data["timeout"] = timeout + + request = Request( + method=Method.POST, + endpoint="/_admin/backup/create", + data=self.serializer.dumps(data), + prefix_needed=False, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise BackupCreateError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return cast(Json, result["result"]) + + return await self._executor.execute(request, response_handler) + + async def restore(self, backup_id: str) -> Result[Json]: + """Restore a local backup. + + Args: + backup_id (str): Backup ID. + + Returns: + dict: Result of the restore operation. + + Raises: + BackupRestoreError: If the restore operation fails. + + References: + - `restore-backup `__ + """ # noqa: E501 + data: Json = {"id": backup_id} + request = Request( + method=Method.POST, + endpoint="/_admin/backup/restore", + data=self.serializer.dumps(data), + prefix_needed=False, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise BackupRestoreError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return cast(Json, result["result"]) + + return await self._executor.execute(request, response_handler) + + async def delete(self, backup_id: str) -> None: + """Delete a backup. + + Args: + backup_id (str): Backup ID. + + Raises: + BackupDeleteError: If the delete operation fails. + + References: + - `delete-backup `__ + """ # noqa: E501 + data: Json = {"id": backup_id} + request = Request( + method=Method.POST, + endpoint="/_admin/backup/delete", + data=self.serializer.dumps(data), + prefix_needed=False, + ) + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise BackupDeleteError(resp, request) + + await self._executor.execute(request, response_handler) + + async def upload( + self, + backup_id: Optional[str] = None, + repository: Optional[str] = None, + abort: Optional[bool] = None, + config: Optional[Json] = None, + upload_id: Optional[str] = None, + ) -> Result[Json]: + """Manage backup uploads. + + Args: + backup_id (str | None): Backup ID used for scheduling an upload. Mutually + exclusive with parameter **upload_id**. + repository (str | None): Remote repository URL(e.g. "local://tmp/backups"). + abort (str | None): If set to `True`, running upload is aborted. Used with + parameter **upload_id**. + config (dict | None): Remote repository configuration. Required for scheduling + an upload and mutually exclusive with parameter **upload_id**. + upload_id (str | None): Upload ID. Mutually exclusive with parameters + **backup_id**, **repository**, and **config**. + + Returns: + dict: Upload details. + + Raises: + BackupUploadError: If upload operation fails. + + References: + - `upload-a-backup-to-a-remote-repository `__ + """ # noqa: E501 + data: Json = {} + if upload_id is not None: + data["uploadId"] = upload_id + if backup_id is not None: + data["id"] = backup_id + if repository is not None: + data["remoteRepository"] = repository + if abort is not None: + data["abort"] = abort + if config is not None: + data["config"] = config + + request = Request( + method=Method.POST, + endpoint="/_admin/backup/upload", + data=self.serializer.dumps(data), + prefix_needed=False, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise BackupUploadError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return cast(Json, result["result"]) + + return await self._executor.execute(request, response_handler) + + async def download( + self, + backup_id: Optional[str] = None, + repository: Optional[str] = None, + abort: Optional[bool] = None, + config: Optional[Json] = None, + download_id: Optional[str] = None, + ) -> Result[Json]: + """Manage backup downloads. + + Args: + backup_id (str | None): Backup ID used for scheduling a download. Mutually + exclusive with parameter **download_id**. + repository (str | None): Remote repository URL (e.g. "local://tmp/backups"). + abort (bool | None): If set to `True`, running download is aborted. + config (dict | None): Remote repository configuration. Required for scheduling + a download and mutually exclusive with parameter **download_id**. + download_id (str | None): Download ID. Mutually exclusive with parameters + **backup_id**, **repository**, and **config**. + + Returns: + dict: Download details. + + Raises: + BackupDownloadError: If the download operation fails. + + References: + - `download-a-backup-from-a-remote-repository `__ + """ # noqa: E501 + data: Json = {} + if download_id is not None: + data["downloadId"] = download_id + if backup_id is not None: + data["id"] = backup_id + if repository is not None: + data["remoteRepository"] = repository + if abort is not None: + data["abort"] = abort + if config is not None: + data["config"] = config + + request = Request( + method=Method.POST, + endpoint="/_admin/backup/download", + data=self.serializer.dumps(data), + prefix_needed=False, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise BackupDownloadError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return cast(Json, result["result"]) + + return await self._executor.execute(request, response_handler) diff --git a/arangoasync/database.py b/arangoasync/database.py index 578222f..b048b4f 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -10,6 +10,7 @@ from warnings import warn from arangoasync.aql import AQL +from arangoasync.backup import Backup from arangoasync.collection import Collection, StandardCollection from arangoasync.connection import Connection from arangoasync.errno import HTTP_FORBIDDEN, HTTP_NOT_FOUND @@ -172,6 +173,15 @@ def aql(self) -> AQL: """ return AQL(self._executor) + @property + def backup(self) -> Backup: + """Return Backup API wrapper. + + Returns: + arangoasync.backup.Backup: Backup API wrapper. + """ + return Backup(self._executor) + async def properties(self) -> Result[DatabaseProperties]: """Return database properties. diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index 5de6ea4..41644de 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -179,6 +179,30 @@ class AuthHeaderError(ArangoClientError): """The authentication header could not be determined.""" +class BackupCreateError(ArangoServerError): + """Failed to create a backup.""" + + +class BackupDeleteError(ArangoServerError): + """Failed to delete a backup.""" + + +class BackupDownloadError(ArangoServerError): + """Failed to download a backup from remote repository.""" + + +class BackupGetError(ArangoServerError): + """Failed to retrieve backup details.""" + + +class BackupRestoreError(ArangoServerError): + """Failed to restore from backup.""" + + +class BackupUploadError(ArangoServerError): + """Failed to upload a backup to remote repository.""" + + class CollectionCreateError(ArangoServerError): """Failed to create collection.""" diff --git a/docs/backup.rst b/docs/backup.rst new file mode 100644 index 0000000..de36041 --- /dev/null +++ b/docs/backup.rst @@ -0,0 +1,78 @@ +Backups +------- + +Hot Backups are near instantaneous consistent snapshots of an entire ArangoDB deployment. +This includes all databases, collections, indexes, Views, graphs, and users at any given time. +For more information, refer to `ArangoDB Manual`_. + +.. _ArangoDB Manual: https://docs.arangodb.com + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import JwtToken + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + token = JwtToken.generate_token(LOGIN_SECRET) + + # Connect to "_system" database as root user. + db = await client.db( + "_system", auth_method="superuser", token=token, verify=True + ) + + # Get the backup API wrapper. + backup = db.backup + + # Create a backup. + result = await backup.create( + label="foo", + allow_inconsistent=True, + force=False, + timeout=1000 + ) + backup_id = result["id"] + + # Retrieve details on all backups + backups = await backup.get() + + # Retrieve details on a specific backup. + details = await backup.get(backup_id=backup_id) + + # Upload a backup to a remote repository. + result = await backup.upload( + backup_id=backup_id, + repository="local://tmp/backups", + config={"local": {"type": "local"}} + ) + upload_id = result["uploadId"] + + # Get status of an upload. + status = await backup.upload(upload_id=upload_id) + + # Abort an upload. + await backup.upload(upload_id=upload_id, abort=True) + + # Download a backup from a remote repository. + result = await backup.download( + backup_id=backup_id, + repository="local://tmp/backups", + config={"local": {"type": "local"}} + ) + download_id = result["downloadId"] + + # Get status of an download. + status = await backup.download(download_id=download_id) + + # Abort an download. + await backup.download(download_id=download_id, abort=True) + + # Restore from a backup. + await backup.restore(backup_id) + + # Delete a backup. + await backup.delete(backup_id) + +See :class:`arangoasync.backup.Backup` for API specification. diff --git a/docs/index.rst b/docs/index.rst index 375303c..1b361fd 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -72,6 +72,7 @@ Contents certificates compression serialization + backup errors errno logging diff --git a/docs/specs.rst b/docs/specs.rst index 9983716..a2b982f 100644 --- a/docs/specs.rst +++ b/docs/specs.rst @@ -28,6 +28,9 @@ python-arango-async. .. automodule:: arangoasync.cursor :members: +.. automodule:: arangoasync.backup + :members: + .. automodule:: arangoasync.compression :members: diff --git a/tests/test_backup.py b/tests/test_backup.py new file mode 100644 index 0000000..d2fb07e --- /dev/null +++ b/tests/test_backup.py @@ -0,0 +1,57 @@ +import pytest +from packaging import version + +from arangoasync.client import ArangoClient +from arangoasync.exceptions import ( + BackupCreateError, + BackupDeleteError, + BackupDownloadError, + BackupGetError, + BackupRestoreError, + BackupUploadError, +) + + +@pytest.mark.asyncio +async def test_backup(url, sys_db_name, bad_db, token, enterprise, cluster, db_version): + if not enterprise: + pytest.skip("Backup API is only available in ArangoDB Enterprise Edition") + if not cluster: + pytest.skip("For simplicity, the backup API is only tested in cluster setups") + if db_version < version.parse("3.12.0"): + pytest.skip( + "For simplicity, the backup API is only tested in the latest versions" + ) + + with pytest.raises(BackupCreateError): + await bad_db.backup.create() + with pytest.raises(BackupGetError): + await bad_db.backup.get() + with pytest.raises(BackupRestoreError): + await bad_db.backup.restore("foobar") + with pytest.raises(BackupDeleteError): + await bad_db.backup.delete("foobar") + with pytest.raises(BackupUploadError): + await bad_db.backup.upload() + with pytest.raises(BackupDownloadError): + await bad_db.backup.download() + + async with ArangoClient(hosts=url) as client: + db = await client.db( + sys_db_name, auth_method="superuser", token=token, verify=True + ) + backup = db.backup + result = await backup.create() + backup_id = result["id"] + result = await backup.get() + assert "list" in result + result = await backup.restore(backup_id) + assert "previous" in result + config = {"local": {"type": "local"}} + result = await backup.upload(backup_id, repository="local://tmp", config=config) + assert "uploadId" in result + result = await backup.download( + backup_id, repository="local://tmp", config=config + ) + assert "downloadId" in result + await backup.delete(backup_id) From 88338776349da67c91751ae8ac0b9037288ce17c Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Tue, 5 Aug 2025 13:21:21 +0800 Subject: [PATCH 05/30] Adding /_api/import (#61) --- arangoasync/collection.py | 102 ++++++++++++++++++++++++++++++++++++++ docs/document.rst | 33 ++++++++++++ tests/test_collection.py | 18 +++++++ 3 files changed, 153 insertions(+) diff --git a/arangoasync/collection.py b/arangoasync/collection.py index e3d12ee..52a9d9e 100644 --- a/arangoasync/collection.py +++ b/arangoasync/collection.py @@ -1578,6 +1578,108 @@ def response_handler( return await self._executor.execute(request, response_handler) + async def import_bulk( + self, + documents: bytes | str, + doc_type: Optional[str] = None, + complete: Optional[bool] = True, + details: Optional[bool] = True, + from_prefix: Optional[str] = None, + to_prefix: Optional[str] = None, + overwrite: Optional[bool] = None, + overwrite_collection_prefix: Optional[bool] = None, + on_duplicate: Optional[str] = None, + wait_for_sync: Optional[bool] = None, + ignore_missing: Optional[bool] = None, + ) -> Result[Json]: + """Load JSON data in bulk into ArangoDB. + + Args: + documents (bytes | str): String representation of the JSON data to import. + doc_type (str | None): Determines how the body of the request is interpreted. + Possible values: "", "documents", "array", "auto". + complete (bool | None): If set to `True`, the whole import fails if any error occurs. + Otherwise, the import continues even if some documents are invalid and cannot + be imported, skipping the problematic documents. + details (bool | None): If set to `True`, the result includes a `details` + attribute with information about documents that could not be imported. + from_prefix (str | None): String prefix prepended to the value of "_from" + field in each edge document inserted. For example, prefix "foo" + prepended to "_from": "bar" will result in "_from": "foo/bar". + Applies only to edge collections. + to_prefix (str | None): String prefix prepended to the value of "_to" + field in each edge document inserted. For example, prefix "foo" + prepended to "_to": "bar" will result in "_to": "foo/bar". + Applies only to edge collections. + overwrite (bool | None): If set to `True`, all existing documents are removed + prior to the import. Indexes are still preserved. + overwrite_collection_prefix (bool | None): Force the `fromPrefix` and + `toPrefix`, possibly replacing existing collection name prefixes. + on_duplicate (str | None): Action to take on unique key constraint violations + (for documents with "_key" fields). Allowed values are "error" (do + not import the new documents and count them as errors), "update" + (update the existing documents while preserving any fields missing + in the new ones), "replace" (replace the existing documents with + new ones), and "ignore" (do not import the new documents and count + them as ignored, as opposed to counting them as errors). Options + "update" and "replace" may fail on secondary unique key constraint + violations. + wait_for_sync (bool | None): Block until operation is synchronized to disk. + ignore_missing (bool | None): When importing JSON arrays of tabular data + (type parameter is omitted), the first line of the request body defines + the attribute keys and the subsequent lines the attribute values for each + document. Subsequent lines with a different number of elements than the + first line are not imported by default. You can enable this option to + import them anyway. For the missing elements, the document attributes + are omitted. Excess elements are ignored. + + Returns: + dict: Result of the import operation. + + Raises: + DocumentInsertError: If import fails. + + References: + - `import-json-data-as-documents `__ + """ # noqa: E501 + params: Params = dict() + params["collection"] = self.name + if doc_type is not None: + params["type"] = doc_type + if complete is not None: + params["complete"] = complete + if details is not None: + params["details"] = details + if from_prefix is not None: + params["fromPrefix"] = from_prefix + if to_prefix is not None: + params["toPrefix"] = to_prefix + if overwrite is not None: + params["overwrite"] = overwrite + if overwrite_collection_prefix is not None: + params["overwriteCollectionPrefix"] = overwrite_collection_prefix + if on_duplicate is not None: + params["onDuplicate"] = on_duplicate + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if ignore_missing is not None: + params["ignoreMissing"] = ignore_missing + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise DocumentInsertError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + request = Request( + method=Method.POST, + endpoint="/_api/import", + data=documents, + params=params, + ) + + return await self._executor.execute(request, response_handler) + class StandardCollection(Collection[T, U, V]): """Standard collection API wrapper. diff --git a/docs/document.rst b/docs/document.rst index c0764e8..47619db 100644 --- a/docs/document.rst +++ b/docs/document.rst @@ -150,6 +150,39 @@ Standard documents are managed via collection API wrapper: # Delete one or more matching documents. await students.delete_match({"first": "Emma"}) +Importing documents in bulk is faster when using specialized methods. Suppose +our data is in a file containing JSON Lines (JSONL) format. Each line is expected +to be one JSON object. Example of a "students.jsonl" file: + +.. code-block:: json + + {"_key":"john","name":"John Smith","age":35} + {"_key":"katie","name":"Katie Foster","age":28} + +To import this file into the "students" collection, we can use the `import_bulk` API: + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + import aiofiles + + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for "students" collection. + students = db.collection("students") + + # Read the JSONL file asynchronously. + async with aiofiles.open('students.jsonl', mode='r') as f: + documents = await f.read() + + # Import documents in bulk. + result = await students.import_bulk(documents, doc_type="documents") + You can manage documents via database API wrappers also, but only simple operations (i.e. get, insert, update, replace, delete) are supported and you must provide document IDs instead of keys: diff --git a/tests/test_collection.py b/tests/test_collection.py index fb8d7ba..2dc4c42 100644 --- a/tests/test_collection.py +++ b/tests/test_collection.py @@ -16,6 +16,7 @@ CollectionStatisticsError, CollectionTruncateError, DocumentCountError, + DocumentInsertError, IndexCreateError, IndexDeleteError, IndexGetError, @@ -263,3 +264,20 @@ async def test_collection_truncate_count(docs, doc_col, bad_col): await doc_col.truncate(wait_for_sync=True, compact=True) cnt = await doc_col.count() assert cnt == 0 + + +@pytest.mark.asyncio +async def test_collection_import_bulk(doc_col, bad_col, docs): + documents = "\n".join(doc_col.serializer.dumps(doc) for doc in docs) + + # Test errors + with pytest.raises(DocumentInsertError): + await bad_col.import_bulk(documents, doc_type="documents") + + # Insert documents in bulk + result = await doc_col.import_bulk(documents, doc_type="documents") + + # Verify the documents were inserted + count = await doc_col.count() + assert count == len(docs) + assert result["created"] == count From 8155b952b43b61c0e62bd4dc6bc6cc193f2dd557 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Fri, 8 Aug 2025 13:29:30 +0800 Subject: [PATCH 06/30] Tasks API (#62) * Adding support for /_api/tasks * Adding docs for /_api/tasks --- arangoasync/database.py | 146 ++++++++++++++++++++++++++++++++++++++ arangoasync/exceptions.py | 16 +++++ docs/index.rst | 1 + docs/task.rst | 51 +++++++++++++ tests/conftest.py | 13 ++++ tests/helpers.py | 18 +++++ tests/test_task.py | 79 +++++++++++++++++++++ 7 files changed, 324 insertions(+) create mode 100644 docs/task.rst create mode 100644 tests/test_task.py diff --git a/arangoasync/database.py b/arangoasync/database.py index b048b4f..f2b03ee 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -40,6 +40,10 @@ PermissionUpdateError, ServerStatusError, ServerVersionError, + TaskCreateError, + TaskDeleteError, + TaskGetError, + TaskListError, TransactionAbortError, TransactionCommitError, TransactionExecuteError, @@ -2193,6 +2197,148 @@ def response_handler(resp: Response) -> Json: return await self._executor.execute(request, response_handler) + async def tasks(self) -> Result[Jsons]: + """Fetches all existing tasks from the server. + + Returns: + list: List of currently active server tasks. + + Raises: + TaskListError: If the list cannot be retrieved. + + References: + - `list-all-tasks `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint="/_api/tasks") + + def response_handler(resp: Response) -> Jsons: + if not resp.is_success: + raise TaskListError(resp, request) + result: Jsons = self.deserializer.loads_many(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def task(self, task_id: str) -> Result[Json]: + """Return the details of an active server task. + + Args: + task_id (str) -> Server task ID. + + Returns: + dict: Details of the server task. + + Raises: + TaskGetError: If the task details cannot be retrieved. + + References: + - `get-a-task `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint=f"/_api/tasks/{task_id}") + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise TaskGetError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def create_task( + self, + command: str, + task_id: Optional[str] = None, + name: Optional[str] = None, + offset: Optional[int] = None, + params: Optional[Json] = None, + period: Optional[int] = None, + ) -> Result[Json]: + """Create a new task. + + Args: + command (str): The JavaScript code to be executed. + task_id (str | None): Optional task ID. If not provided, the server will + generate a unique ID. + name (str | None): The name of the task. + offset (int | None): The offset in seconds after which the task should + start executing. + params (dict | None): Parameters to be passed to the command. + period (int | None): The number of seconds between the executions. + + Returns: + dict: Details of the created task. + + Raises: + TaskCreateError: If the task cannot be created. + + References: + - `create-a-task `__ + - `create-a-task-with-id `__ + """ # noqa: E501 + data: Json = {"command": command} + if name is not None: + data["name"] = name + if offset is not None: + data["offset"] = offset + if params is not None: + data["params"] = params + if period is not None: + data["period"] = period + + if task_id is None: + request = Request( + method=Method.POST, + endpoint="/_api/tasks", + data=self.serializer.dumps(data), + ) + else: + request = Request( + method=Method.PUT, + endpoint=f"/_api/tasks/{task_id}", + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise TaskCreateError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def delete_task( + self, + task_id: str, + ignore_missing: bool = False, + ) -> Result[bool]: + """Delete a server task. + + Args: + task_id (str): Task ID. + ignore_missing (bool): If `True`, do not raise an exception if the + task does not exist. + + Returns: + bool: `True` if the task was deleted successfully, `False` if the + task was not found and **ignore_missing** was set to `True`. + + Raises: + TaskDeleteError: If the operation fails. + + References: + - `delete-a-task `__ + """ # noqa: E501 + request = Request(method=Method.DELETE, endpoint=f"/_api/tasks/{task_id}") + + def response_handler(resp: Response) -> bool: + if resp.is_success: + return True + if resp.status_code == HTTP_NOT_FOUND and ignore_missing: + return False + raise TaskDeleteError(resp, request) + + return await self._executor.execute(request, response_handler) + class StandardDatabase(Database): """Standard database API wrapper. diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index 41644de..5ca333a 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -451,6 +451,22 @@ class SortValidationError(ArangoClientError): """Invalid sort parameters.""" +class TaskCreateError(ArangoServerError): + """Failed to create server task.""" + + +class TaskDeleteError(ArangoServerError): + """Failed to delete server task.""" + + +class TaskGetError(ArangoServerError): + """Failed to retrieve server task details.""" + + +class TaskListError(ArangoServerError): + """Failed to retrieve server tasks.""" + + class TransactionAbortError(ArangoServerError): """Failed to abort transaction.""" diff --git a/docs/index.rst b/docs/index.rst index 1b361fd..41eaeee 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -73,6 +73,7 @@ Contents compression serialization backup + task errors errno logging diff --git a/docs/task.rst b/docs/task.rst new file mode 100644 index 0000000..2490507 --- /dev/null +++ b/docs/task.rst @@ -0,0 +1,51 @@ +Tasks +----- + +ArangoDB can schedule user-defined Javascript snippets as one-time or periodic +(re-scheduled after each execution) tasks. Tasks are executed in the context of +the database they are defined in. + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Create a new task which simply prints parameters. + await db.create_task( + name="test_task", + command=""" + var task = function(params){ + var db = require('@arangodb'); + db.print(params); + } + task(params); + """, + params={"foo": "bar"}, + offset=300, + period=10, + task_id="001" + ) + + # List all active tasks + tasks = await db.tasks() + + # Retrieve details of a task by ID. + details = await db.task("001") + + # Delete an existing task by ID. + await db.delete_task('001', ignore_missing=True) + + +.. note:: + When deleting a database, any tasks that were initialized under its context + remain active. It is therefore advisable to delete any running tasks before + deleting the database. diff --git a/tests/conftest.py b/tests/conftest.py index 98d75de..66e5a9d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -256,6 +256,19 @@ async def teardown(): verify=False, ) + # Remove all tasks + test_tasks = [ + task + for task in await sys_db.tasks() + if task["name"].startswith("test_task") + ] + await asyncio.gather( + *( + sys_db.delete_task(task["id"], ignore_missing=True) + for task in test_tasks + ) + ) + # Remove all test users. tst_users = [ user["user"] diff --git a/tests/helpers.py b/tests/helpers.py index f2f63f7..dfaae4d 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -62,3 +62,21 @@ def generate_analyzer_name(): str: Random analyzer name. """ return f"test_analyzer_{uuid4().hex}" + + +def generate_task_name(): + """Generate and return a random task name. + + Returns: + str: Random task name. + """ + return f"test_task_{uuid4().hex}" + + +def generate_task_id(): + """Generate and return a random task ID. + + Returns: + str: Random task ID + """ + return f"test_task_id_{uuid4().hex}" diff --git a/tests/test_task.py b/tests/test_task.py new file mode 100644 index 0000000..4e1aee6 --- /dev/null +++ b/tests/test_task.py @@ -0,0 +1,79 @@ +import pytest + +from arangoasync.exceptions import ( + TaskCreateError, + TaskDeleteError, + TaskGetError, + TaskListError, +) +from tests.helpers import generate_task_id, generate_task_name + + +@pytest.mark.asyncio +async def test_task_management(sys_db, bad_db): + # This test intentionally uses the system database because cleaning up tasks is + # easier there. + + test_command = 'require("@arangodb").print(params);' + + # Test errors + with pytest.raises(TaskCreateError): + await bad_db.create_task(command=test_command) + with pytest.raises(TaskGetError): + await bad_db.task("non_existent_task_id") + with pytest.raises(TaskListError): + await bad_db.tasks() + with pytest.raises(TaskDeleteError): + await bad_db.delete_task("non_existent_task_id") + + # Create a task with a random ID + task_name = generate_task_name() + new_task = await sys_db.create_task( + name=task_name, + command=test_command, + params={"foo": 1, "bar": 2}, + offset=1, + ) + assert new_task["name"] == task_name + task_id = new_task["id"] + assert await sys_db.task(task_id) == new_task + + # Delete task + assert await sys_db.delete_task(task_id) is True + + # Create a task with a specific ID + task_name = generate_task_name() + task_id = generate_task_id() + new_task = await sys_db.create_task( + name=task_name, + command=test_command, + params={"foo": 1, "bar": 2}, + offset=1, + period=10, + task_id=task_id, + ) + assert new_task["name"] == task_name + assert new_task["id"] == task_id + + # Try to create a duplicate task + with pytest.raises(TaskCreateError): + await sys_db.create_task( + name=task_name, + command=test_command, + params={"foo": 1, "bar": 2}, + task_id=task_id, + ) + + # Test get missing task + with pytest.raises(TaskGetError): + await sys_db.task(generate_task_id()) + + # Test list tasks + tasks = await sys_db.tasks() + assert len(tasks) == 1 + + # Delete tasks + assert await sys_db.delete_task(task_id) is True + assert await sys_db.delete_task(task_id, ignore_missing=True) is False + with pytest.raises(TaskDeleteError): + await sys_db.delete_task(task_id) From a171df7b449cea79a2a2ce3d41ad0052261e0d6c Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Fri, 8 Aug 2025 14:39:03 +0800 Subject: [PATCH 07/30] Adding security API (#63) --- arangoasync/database.py | 78 +++++++++++++++++++++++++++++++++++++++ arangoasync/exceptions.py | 12 ++++++ docs/certificates.rst | 22 +++++++++++ docs/migration.rst | 2 +- tests/test_client.py | 14 +++++++ 5 files changed, 127 insertions(+), 1 deletion(-) diff --git a/arangoasync/database.py b/arangoasync/database.py index f2b03ee..b338b56 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -38,7 +38,10 @@ PermissionListError, PermissionResetError, PermissionUpdateError, + ServerEncryptionError, ServerStatusError, + ServerTLSError, + ServerTLSReloadError, ServerVersionError, TaskCreateError, TaskDeleteError, @@ -2072,6 +2075,81 @@ def response_handler(resp: Response) -> Json: return await self._executor.execute(request, response_handler) + async def tls(self) -> Result[Json]: + """Return TLS data (keyfile, clientCA). + + This API requires authentication. + + Returns: + dict: dict containing the following components: + - keyfile: Information about the key file. + - clientCA: Information about the Certificate Authority (CA) for client certificate verification. + + Raises: + ServerTLSError: If the operation fails. + + References: + - `get-the-tls-data `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint="/_admin/server/tls") + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerTLSError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body)["result"] + return result + + return await self._executor.execute(request, response_handler) + + async def reload_tls(self) -> Result[Json]: + """Reload TLS data (keyfile, clientCA). + + This is a protected API and can only be executed with superuser rights. + + Returns: + dict: New TLS data. + + Raises: + ServerTLSReloadError: If the operation fails. + + References: + - `reload-the-tls-data `__ + """ # noqa: E501 + request = Request(method=Method.POST, endpoint="/_admin/server/tls") + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerTLSReloadError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body)["result"] + return result + + return await self._executor.execute(request, response_handler) + + async def encryption(self) -> Result[Json]: + """Rotate the user-supplied keys for encryption. + + This is a protected API and can only be executed with superuser rights. + This API is not available on Coordinator nodes. + + Returns: + dict: Encryption keys. + + Raises: + ServerEncryptionError: If the operation fails. + + References: + - `rotate-the-encryption-keys `__ + """ # noqa: E501 + request = Request(method=Method.POST, endpoint="/_admin/server/encryption") + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerEncryptionError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body)["result"] + return result + + return await self._executor.execute(request, response_handler) + async def list_transactions(self) -> Result[Jsons]: """List all currently running stream transactions. diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index 5ca333a..5e2844a 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -435,6 +435,10 @@ class SerializationError(ArangoClientError): """Failed to serialize the request.""" +class ServerEncryptionError(ArangoServerError): + """Failed to reload user-defined encryption keys.""" + + class ServerConnectionError(ArangoServerError): """Failed to connect to ArangoDB server.""" @@ -443,6 +447,14 @@ class ServerStatusError(ArangoServerError): """Failed to retrieve server status.""" +class ServerTLSError(ArangoServerError): + """Failed to retrieve TLS data.""" + + +class ServerTLSReloadError(ArangoServerError): + """Failed to reload TLS.""" + + class ServerVersionError(ArangoServerError): """Failed to retrieve server version.""" diff --git a/docs/certificates.rst b/docs/certificates.rst index c0665fa..ee49e13 100644 --- a/docs/certificates.rst +++ b/docs/certificates.rst @@ -108,3 +108,25 @@ Use a client certificate chain If you want to have fine-grained control over the HTTP connection, you should define your HTTP client as described in the :ref:`HTTP` section. + +Security features +================= + +See the `ArangoDB Manual`_ for more information on security features. + +**Example:** + +.. code-block:: python + + async with ArangoClient(hosts=url) as client: + db = await client.db( + sys_db_name, auth_method="superuser", token=token, verify=True + ) + + # Get TLS data + tls = await db.tls() + + # Reload TLS data + tls = await db.reload_tls() + +.. _ArangoDB Manual: https://docs.arangodb.com/stable/develop/http-api/security/ diff --git a/docs/migration.rst b/docs/migration.rst index f26e7d6..7c2427e 100644 --- a/docs/migration.rst +++ b/docs/migration.rst @@ -2,7 +2,7 @@ Coming from python-arango ------------------------- Generally, migrating from `python-arango`_ should be a smooth transition. For the most part, the API is similar, -but there are a few things to note._ +but there are a few things to note. Helpers ======= diff --git a/tests/test_client.py b/tests/test_client.py index 6210412..cb488a7 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -3,6 +3,7 @@ from arangoasync.auth import JwtToken from arangoasync.client import ArangoClient from arangoasync.compression import DefaultCompressionManager +from arangoasync.exceptions import ServerEncryptionError from arangoasync.http import DefaultHTTPClient from arangoasync.resolver import DefaultHostResolver, RoundRobinHostResolver from arangoasync.version import __version__ @@ -131,6 +132,19 @@ async def test_client_jwt_superuser_auth( await db.jwt_secrets() await db.reload_jwt_secrets() + # Get TLS data + tls = await db.tls() + assert isinstance(tls, dict) + + # Reload TLS data + tls = await db.reload_tls() + assert isinstance(tls, dict) + + # Rotate + with pytest.raises(ServerEncryptionError): + # Not allowed on coordinators + await db.encryption() + # token missing async with ArangoClient(hosts=url) as client: with pytest.raises(ValueError): From 7586d09f7232f6b0fbda3f7c513047abbcc4d074 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 10 Aug 2025 11:16:27 +0800 Subject: [PATCH 08/30] Cluster API (#64) * Adding cluster API * Adding rebalance operations * Adding cluster documentation --- arangoasync/cluster.py | 451 ++++++++++++++++++++++++++++++++++++++ arangoasync/database.py | 10 + arangoasync/exceptions.py | 28 +++ docs/cluster.rst | 53 +++++ docs/index.rst | 1 + docs/specs.rst | 3 + tests/test_cluster.py | 101 +++++++++ 7 files changed, 647 insertions(+) create mode 100644 arangoasync/cluster.py create mode 100644 docs/cluster.rst create mode 100644 tests/test_cluster.py diff --git a/arangoasync/cluster.py b/arangoasync/cluster.py new file mode 100644 index 0000000..ce33b92 --- /dev/null +++ b/arangoasync/cluster.py @@ -0,0 +1,451 @@ +__all__ = ["Cluster"] + +from typing import List, Optional, cast + +from arangoasync.exceptions import ( + ClusterEndpointsError, + ClusterHealthError, + ClusterMaintenanceModeError, + ClusterRebalanceError, + ClusterServerIDError, + ClusterServerRoleError, + ClusterStatisticsError, +) +from arangoasync.executor import ApiExecutor +from arangoasync.request import Method, Request +from arangoasync.response import Response +from arangoasync.result import Result +from arangoasync.serialization import Deserializer, Serializer +from arangoasync.typings import Json, Jsons, Params + + +class Cluster: + """Cluster-specific endpoints.""" + + def __init__(self, executor: ApiExecutor) -> None: + self._executor = executor + + @property + def serializer(self) -> Serializer[Json]: + """Return the serializer.""" + return self._executor.serializer + + @property + def deserializer(self) -> Deserializer[Json, Jsons]: + """Return the deserializer.""" + return self._executor.deserializer + + async def health(self) -> Result[Json]: + """Queries the health of the cluster. + + Returns: + dict: Health status of the cluster. + + Raises: + ClusterHealthError: If retrieval fails. + + References: + - `get-the-cluster-health `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint="/_admin/cluster/health", + prefix_needed=False, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ClusterHealthError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return Response.format_body(result) + + return await self._executor.execute(request, response_handler) + + async def statistics(self, db_server: str) -> Result[Json]: + """Queries the statistics of the given DB-Server. + + Args: + db_server (str): The ID of the DB-Server. + + Returns: + dict: Statistics of the DB-Server. + + Raises: + ClusterStatisticsError: If retrieval fails. + + References: + - `get-the-statistics-of-a-db-server `__ + """ # noqa: E501 + params: Params = {"DBserver": db_server} + + request = Request( + method=Method.GET, + endpoint="/_admin/cluster/statistics", + prefix_needed=False, + params=params, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ClusterStatisticsError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return Response.format_body(result) + + return await self._executor.execute(request, response_handler) + + async def endpoints(self) -> Result[List[str]]: + """Fetch all coordinator endpoints. + + Returns: + list: List of coordinator endpoints. + + Raises: + ClusterEndpointsError: If retrieval fails. + + References: + - `list-all-coordinator-endpoints `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint="/_api/cluster/endpoints", + prefix_needed=False, + ) + + def response_handler(resp: Response) -> List[str]: + if not resp.is_success: + raise ClusterEndpointsError(resp, request) + body: Json = self.deserializer.loads(resp.raw_body) + return [item["endpoint"] for item in body["endpoints"]] + + return await self._executor.execute(request, response_handler) + + async def server_id(self) -> Result[str]: + """Get the ID of the current server. + + Returns: + str: Server ID. + + Raises: + ClusterServerIDError: If retrieval fails. + + References: + - `get-the-server-id `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint="/_admin/server/id", + prefix_needed=False, + ) + + def response_handler(resp: Response) -> str: + if not resp.is_success: + raise ClusterServerIDError(resp, request) + return str(self.deserializer.loads(resp.raw_body)["id"]) + + return await self._executor.execute(request, response_handler) + + async def server_role(self) -> Result[str]: + """Get the role of the current server + + Returns: + str: Server role. Possible values: "SINGLE", "COORDINATOR", "PRIMARY", "SECONDARY", "AGENT", "UNDEFINED". + + Raises: + ClusterServerRoleError: If retrieval fails. + + References: + - `get-the-server-role `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint="/_admin/server/role", + prefix_needed=False, + ) + + def response_handler(resp: Response) -> str: + if not resp.is_success: + raise ClusterServerRoleError(resp, request) + return str(self.deserializer.loads(resp.raw_body)["role"]) + + return await self._executor.execute(request, response_handler) + + async def toggle_maintenance_mode(self, mode: str) -> Result[Json]: + """Enable or disable the cluster supervision (agency) maintenance mode. + + Args: + mode (str): Maintenance mode. Allowed values are "on" or "off". + + Returns: + dict: Result of the operation. + + Raises: + ClusterMaintenanceModeError: If the toggle operation fails. + + References: + - `toggle-cluster-maintenance-mode `__ + """ # noqa: E501 + request = Request( + method=Method.PUT, + endpoint="/_admin/cluster/maintenance", + prefix_needed=False, + data=f'"{mode}"', + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ClusterMaintenanceModeError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return Response.format_body(result) + + return await self._executor.execute(request, response_handler) + + async def server_maintenance_mode(self, server_id: str) -> Result[Json]: + """Check whether the specified DB-Server is in maintenance mode and until when. + + Args: + server_id (str): Server ID. + + Returns: + dict: Maintenance status for the given server. + + Raises: + ClusterMaintenanceModeError: If retrieval fails. + + References: + - `get-the-maintenance-status-of-a-db-server `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint=f"/_admin/cluster/maintenance/{server_id}", + prefix_needed=False, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ClusterMaintenanceModeError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return Response.format_body(result) + + return await self._executor.execute(request, response_handler) + + async def toggle_server_maintenance_mode( + self, server_id: str, mode: str, timeout: Optional[int] = None + ) -> None: + """Enable or disable the maintenance mode for the given server. + + Args: + server_id (str): Server ID. + mode (str): Maintenance mode. Allowed values are "normal" and "maintenance". + timeout (int | None): After how many seconds the maintenance mode shall automatically end. + + Raises: + ClusterMaintenanceModeError: If the operation fails. + + References: + - `set-the-maintenance-status-of-a-db-server `__ + """ # noqa: E501 + data: Json = {"mode": mode} + if timeout is not None: + data["timeout"] = timeout + + request = Request( + method=Method.PUT, + endpoint=f"/_admin/cluster/maintenance/{server_id}", + prefix_needed=False, + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise ClusterMaintenanceModeError(resp, request) + + await self._executor.execute(request, response_handler) + + async def calculate_imbalance(self) -> Result[Json]: + """Computes the current cluster imbalance and returns the result. + + Returns: + dict: Cluster imbalance information. + + Raises: + ClusterRebalanceError: If retrieval fails. + + References: + - `get-the-current-cluster-imbalance `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint="/_admin/cluster/rebalance") + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ClusterRebalanceError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return Response.format_body(result) + + return await self._executor.execute(request, response_handler) + + async def calculate_rebalance_plan( + self, + databases_excluded: Optional[List[str]] = None, + exclude_system_collections: Optional[bool] = None, + leader_changes: Optional[bool] = None, + maximum_number_of_moves: Optional[int] = None, + move_followers: Optional[bool] = None, + move_leaders: Optional[bool] = None, + pi_factor: Optional[float] = None, + version: int = 1, + ) -> Result[Json]: + """Compute a set of move shard operations to improve balance. + + Args: + databases_excluded (list | None): List of database names to be excluded from + the analysis. + exclude_system_collections (bool | None): Ignore system collections in the + rebalance plan. + leader_changes (bool | None): Allow leader changes without moving data. + maximum_number_of_moves (int | None): Maximum number of moves to be computed. + move_followers (bool | None): Allow moving shard followers. + move_leaders (bool | None): Allow moving shard leaders. + pi_factor (float | None): A weighting factor that should remain untouched. + version (int): Must be set to 1. + + Returns: + dict: Cluster rebalance plan. + + Raises: + ClusterRebalanceError: If retrieval fails. + + References: + - `compute-a-set-of-move-shard-operations-to-improve-balance `__ + """ # noqa: E501 + data: Json = dict(version=version) + if databases_excluded is not None: + data["databasesExcluded"] = databases_excluded + if exclude_system_collections is not None: + data["excludeSystemCollections"] = exclude_system_collections + if leader_changes is not None: + data["leaderChanges"] = leader_changes + if maximum_number_of_moves is not None: + data["maximumNumberOfMoves"] = maximum_number_of_moves + if move_followers is not None: + data["moveFollowers"] = move_followers + if move_leaders is not None: + data["moveLeaders"] = move_leaders + if pi_factor is not None: + data["piFactor"] = pi_factor + + request = Request( + method=Method.POST, + endpoint="/_admin/cluster/rebalance", + prefix_needed=False, + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ClusterRebalanceError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return cast(Json, result["result"]) + + return await self._executor.execute(request, response_handler) + + async def rebalance( + self, + databases_excluded: Optional[List[str]] = None, + exclude_system_collections: Optional[bool] = None, + leader_changes: Optional[bool] = None, + maximum_number_of_moves: Optional[int] = None, + move_followers: Optional[bool] = None, + move_leaders: Optional[bool] = None, + pi_factor: Optional[float] = None, + version: int = 1, + ) -> Result[Json]: + """Compute and execute a set of move shard operations to improve balance. + + Args: + databases_excluded (list | None): List of database names to be excluded from + the analysis. + exclude_system_collections (bool | None): Ignore system collections in the + rebalance plan. + leader_changes (bool | None): Allow leader changes without moving data. + maximum_number_of_moves (int | None): Maximum number of moves to be computed. + move_followers (bool | None): Allow moving shard followers. + move_leaders (bool | None): Allow moving shard leaders. + pi_factor (float | None): A weighting factor that should remain untouched. + version (int): Must be set to 1. + + Returns: + dict: Cluster rebalance plan. + + Raises: + ClusterRebalanceError: If retrieval fails. + + References: + - `compute-and-execute-a-set-of-move-shard-operations-to-improve-balance `__ + """ # noqa: E501 + data: Json = dict(version=version) + if databases_excluded is not None: + data["databasesExcluded"] = databases_excluded + if exclude_system_collections is not None: + data["excludeSystemCollections"] = exclude_system_collections + if leader_changes is not None: + data["leaderChanges"] = leader_changes + if maximum_number_of_moves is not None: + data["maximumNumberOfMoves"] = maximum_number_of_moves + if move_followers is not None: + data["moveFollowers"] = move_followers + if move_leaders is not None: + data["moveLeaders"] = move_leaders + if pi_factor is not None: + data["piFactor"] = pi_factor + + request = Request( + method=Method.PUT, + endpoint="/_admin/cluster/rebalance", + prefix_needed=False, + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ClusterRebalanceError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return cast(Json, result["result"]) + + return await self._executor.execute(request, response_handler) + + async def execute_rebalance_plan( + self, + moves: List[Json], + version: int = 1, + ) -> Result[int]: + """Execute a set of move shard operations. + + Args: + moves (list): List of move shard operations to be executed. + version (int): Must be set to 1. + + Returns: + int: Indicates whether the methods have been accepted and scheduled for execution. + + Raises: + ClusterRebalanceError: If the execution fails. + + References: + - `execute-a-set-of-move-shard-operations `__ + """ # noqa: E501 + data: Json = dict(version=version, moves=moves) + + request = Request( + method=Method.POST, + endpoint="/_admin/cluster/rebalance/execute", + data=self.serializer.dumps(data), + prefix_needed=False, + ) + + def response_handler(resp: Response) -> int: + if not resp.is_success: + raise ClusterRebalanceError(resp, request) + result: int = self.deserializer.loads(resp.raw_body)["code"] + return result + + return await self._executor.execute(request, response_handler) diff --git a/arangoasync/database.py b/arangoasync/database.py index b338b56..d0ddbbb 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -11,6 +11,7 @@ from arangoasync.aql import AQL from arangoasync.backup import Backup +from arangoasync.cluster import Cluster from arangoasync.collection import Collection, StandardCollection from arangoasync.connection import Connection from arangoasync.errno import HTTP_FORBIDDEN, HTTP_NOT_FOUND @@ -189,6 +190,15 @@ def backup(self) -> Backup: """ return Backup(self._executor) + @property + def cluster(self) -> Cluster: + """Return Cluster API wrapper. + + Returns: + arangoasync.cluster.Cluster: Cluster API wrapper. + """ + return Cluster(self._executor) + async def properties(self) -> Result[DatabaseProperties]: """Return database properties. diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index 5e2844a..bfd30d7 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -271,6 +271,34 @@ class ClientConnectionError(ArangoClientError): """The request was unable to reach the server.""" +class ClusterEndpointsError(ArangoServerError): + """Failed to retrieve coordinator endpoints.""" + + +class ClusterHealthError(ArangoServerError): + """Failed to retrieve cluster health.""" + + +class ClusterMaintenanceModeError(ArangoServerError): + """Failed to enable/disable cluster supervision maintenance mode.""" + + +class ClusterRebalanceError(ArangoServerError): + """Failed to execute cluster rebalancing operation.""" + + +class ClusterServerRoleError(ArangoServerError): + """Failed to retrieve server role in a cluster.""" + + +class ClusterServerIDError(ArangoServerError): + """Failed to retrieve server ID.""" + + +class ClusterStatisticsError(ArangoServerError): + """Failed to retrieve DB-Server statistics.""" + + class CursorCloseError(ArangoServerError): """Failed to delete the cursor result from server.""" diff --git a/docs/cluster.rst b/docs/cluster.rst new file mode 100644 index 0000000..c5e58aa --- /dev/null +++ b/docs/cluster.rst @@ -0,0 +1,53 @@ +Clusters +-------- + +The cluster-specific API lets you get information about individual +cluster nodes and the cluster as a whole, as well as monitor and +administrate cluster deployments. For more information on the design +and architecture, refer to `ArangoDB Manual`_. + +.. _ArangoDB Manual: https://docs.arangodb.com + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "_system" database as root user. + db = await client.db("_system", auth=auth) + cluster = db.cluster + + # Cluster health + health = await cluster.health() + + # DB-Server statistics + db_server = "PRMR-2716c9d0-4b22-4c66-ba3d-f9cd3143e52b" + stats = await cluster.statistics(db_server) + + # Cluster endpoints + endpoints = await cluster.endpoints() + + # Cluster server ID and role + server_id = await cluster.server_id() + server_role = await cluster.server_role() + + # Maintenance mode + await cluster.toggle_maintenance_mode("on") + await cluster.toggle_maintenance_mode("off") + await cluster.toggle_server_maintenance_mode( + db_server, "maintenance", timeout=30 + ) + status = await cluster.server_maintenance_mode(db_server) + await cluster.toggle_server_maintenance_mode(db_server, "normal") + + # Rebalance + result = await cluster.calculate_imbalance() + result = await cluster.calculate_rebalance_plan() + result = await cluster.execute_rebalance_plan(moves=[]) + result = await cluster.rebalance() + +See :class:`arangoasync.cluster.Cluster` for API specification. diff --git a/docs/index.rst b/docs/index.rst index 41eaeee..65eefd3 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -46,6 +46,7 @@ Contents transaction view analyzer + cluster **API Executions** diff --git a/docs/specs.rst b/docs/specs.rst index a2b982f..763af9c 100644 --- a/docs/specs.rst +++ b/docs/specs.rst @@ -31,6 +31,9 @@ python-arango-async. .. automodule:: arangoasync.backup :members: +.. automodule:: arangoasync.cluster + :members: + .. automodule:: arangoasync.compression :members: diff --git a/tests/test_cluster.py b/tests/test_cluster.py new file mode 100644 index 0000000..d5b0b75 --- /dev/null +++ b/tests/test_cluster.py @@ -0,0 +1,101 @@ +import pytest +from packaging import version + +from arangoasync.client import ArangoClient +from arangoasync.exceptions import ( + ClusterEndpointsError, + ClusterHealthError, + ClusterMaintenanceModeError, + ClusterRebalanceError, + ClusterServerIDError, + ClusterServerRoleError, + ClusterStatisticsError, +) + + +@pytest.mark.asyncio +async def test_cluster( + url, sys_db_name, bad_db, token, enterprise, cluster, db_version +): + if not cluster: + pytest.skip("Cluster API is only tested in cluster setups") + if not enterprise or db_version < version.parse("3.12.0"): + pytest.skip( + "For simplicity, the cluster API is only tested in the latest versions" + ) + + # Test errors + with pytest.raises(ClusterHealthError): + await bad_db.cluster.health() + with pytest.raises(ClusterStatisticsError): + await bad_db.cluster.statistics("foo") + with pytest.raises(ClusterEndpointsError): + await bad_db.cluster.endpoints() + with pytest.raises(ClusterServerIDError): + await bad_db.cluster.server_id() + with pytest.raises(ClusterServerRoleError): + await bad_db.cluster.server_role() + with pytest.raises(ClusterMaintenanceModeError): + await bad_db.cluster.toggle_maintenance_mode("on") + with pytest.raises(ClusterMaintenanceModeError): + await bad_db.cluster.toggle_server_maintenance_mode("PRMR0001", "normal") + with pytest.raises(ClusterMaintenanceModeError): + await bad_db.cluster.server_maintenance_mode("PRMR0001") + with pytest.raises(ClusterRebalanceError): + await bad_db.cluster.calculate_imbalance() + with pytest.raises(ClusterRebalanceError): + await bad_db.cluster.rebalance() + with pytest.raises(ClusterRebalanceError): + await bad_db.cluster.calculate_rebalance_plan() + with pytest.raises(ClusterRebalanceError): + await bad_db.cluster.execute_rebalance_plan(moves=[]) + + async with ArangoClient(hosts=url) as client: + db = await client.db( + sys_db_name, auth_method="superuser", token=token, verify=True + ) + cluster = db.cluster + + # Cluster health + health = await cluster.health() + assert "Health" in health + + # DB-Server statistics + db_server = None + for server in health["Health"]: + if server.startswith("PRMR"): + db_server = server + break + assert db_server is not None, f"No DB server found in {health}" + stats = await cluster.statistics(db_server) + assert "enabled" in stats + + # Cluster endpoints + endpoints = await cluster.endpoints() + assert len(endpoints) > 0 + + # Cluster server ID and role + server_id = await cluster.server_id() + assert isinstance(server_id, str) + server_role = await cluster.server_role() + assert isinstance(server_role, str) + + # Maintenance mode + await cluster.toggle_maintenance_mode("on") + await cluster.toggle_maintenance_mode("off") + await cluster.toggle_server_maintenance_mode( + db_server, "maintenance", timeout=30 + ) + status = await cluster.server_maintenance_mode(db_server) + assert isinstance(status, dict) + await cluster.toggle_server_maintenance_mode(db_server, "normal") + + # Rebalance + result = await cluster.calculate_imbalance() + assert isinstance(result, dict) + result = await cluster.calculate_rebalance_plan() + assert isinstance(result, dict) + result = await cluster.execute_rebalance_plan(moves=[]) + assert result == 200 + result = await cluster.rebalance() + assert isinstance(result, dict) From 114b45f1d6df144f515ea64c45f0fada3b4fac8b Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Tue, 12 Aug 2025 13:57:08 +0800 Subject: [PATCH 09/30] Foxx API (#65) * Adding foxx API * Finishing foxx API * Foxx documentation --- arangoasync/database.py | 10 + arangoasync/exceptions.py | 84 ++++ arangoasync/foxx.py | 829 ++++++++++++++++++++++++++++++++++++++ arangoasync/request.py | 10 +- docs/document.rst | 2 +- docs/foxx.rst | 147 +++++++ docs/index.rst | 1 + pyproject.toml | 1 + tests/helpers.py | 9 + tests/static/service.zip | Bin 0 -> 2963 bytes tests/test_foxx.py | 245 +++++++++++ 11 files changed, 1332 insertions(+), 6 deletions(-) create mode 100644 arangoasync/foxx.py create mode 100644 docs/foxx.rst create mode 100644 tests/static/service.zip create mode 100644 tests/test_foxx.py diff --git a/arangoasync/database.py b/arangoasync/database.py index d0ddbbb..be057c4 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -74,6 +74,7 @@ DefaultApiExecutor, TransactionApiExecutor, ) +from arangoasync.foxx import Foxx from arangoasync.graph import Graph from arangoasync.request import Method, Request from arangoasync.response import Response @@ -199,6 +200,15 @@ def cluster(self) -> Cluster: """ return Cluster(self._executor) + @property + def foxx(self) -> Foxx: + """Return Foxx API wrapper. + + Returns: + arangoasync.foxx.Foxx: Foxx API wrapper. + """ + return Foxx(self._executor) + async def properties(self) -> Result[DatabaseProperties]: """Return database properties. diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index bfd30d7..99340dd 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -395,6 +395,90 @@ class EdgeListError(ArangoServerError): """Failed to retrieve edges coming in and out of a vertex.""" +class FoxxConfigGetError(ArangoServerError): + """Failed to retrieve Foxx service configuration.""" + + +class FoxxConfigReplaceError(ArangoServerError): + """Failed to replace Foxx service configuration.""" + + +class FoxxConfigUpdateError(ArangoServerError): + """Failed to update Foxx service configuration.""" + + +class FoxxCommitError(ArangoServerError): + """Failed to commit local Foxx service state.""" + + +class FoxxDependencyGetError(ArangoServerError): + """Failed to retrieve Foxx service dependencies.""" + + +class FoxxDependencyReplaceError(ArangoServerError): + """Failed to replace Foxx service dependencies.""" + + +class FoxxDependencyUpdateError(ArangoServerError): + """Failed to update Foxx service dependencies.""" + + +class FoxxScriptListError(ArangoServerError): + """Failed to retrieve Foxx service scripts.""" + + +class FoxxDevModeEnableError(ArangoServerError): + """Failed to enable development mode for Foxx service.""" + + +class FoxxDevModeDisableError(ArangoServerError): + """Failed to disable development mode for Foxx service.""" + + +class FoxxDownloadError(ArangoServerError): + """Failed to download Foxx service bundle.""" + + +class FoxxReadmeGetError(ArangoServerError): + """Failed to retrieve Foxx service readme.""" + + +class FoxxScriptRunError(ArangoServerError): + """Failed to run Foxx service script.""" + + +class FoxxServiceCreateError(ArangoServerError): + """Failed to create Foxx service.""" + + +class FoxxServiceDeleteError(ArangoServerError): + """Failed to delete Foxx services.""" + + +class FoxxServiceGetError(ArangoServerError): + """Failed to retrieve Foxx service metadata.""" + + +class FoxxServiceListError(ArangoServerError): + """Failed to retrieve Foxx services.""" + + +class FoxxServiceReplaceError(ArangoServerError): + """Failed to replace Foxx service.""" + + +class FoxxServiceUpdateError(ArangoServerError): + """Failed to update Foxx service.""" + + +class FoxxSwaggerGetError(ArangoServerError): + """Failed to retrieve Foxx service swagger.""" + + +class FoxxTestRunError(ArangoServerError): + """Failed to run Foxx service tests.""" + + class GraphCreateError(ArangoServerError): """Failed to create the graph.""" diff --git a/arangoasync/foxx.py b/arangoasync/foxx.py new file mode 100644 index 0000000..b74d933 --- /dev/null +++ b/arangoasync/foxx.py @@ -0,0 +1,829 @@ +__all__ = ["Foxx"] + +from typing import Any, Optional + +from arangoasync.exceptions import ( + FoxxCommitError, + FoxxConfigGetError, + FoxxConfigReplaceError, + FoxxConfigUpdateError, + FoxxDependencyGetError, + FoxxDependencyReplaceError, + FoxxDependencyUpdateError, + FoxxDevModeDisableError, + FoxxDevModeEnableError, + FoxxDownloadError, + FoxxReadmeGetError, + FoxxScriptListError, + FoxxScriptRunError, + FoxxServiceCreateError, + FoxxServiceDeleteError, + FoxxServiceGetError, + FoxxServiceListError, + FoxxServiceReplaceError, + FoxxServiceUpdateError, + FoxxSwaggerGetError, + FoxxTestRunError, +) +from arangoasync.executor import ApiExecutor +from arangoasync.request import Method, Request +from arangoasync.response import Response +from arangoasync.result import Result +from arangoasync.serialization import Deserializer, Serializer +from arangoasync.typings import Json, Jsons, Params, RequestHeaders + + +class Foxx: + """Foxx API wrapper.""" + + def __init__(self, executor: ApiExecutor) -> None: + self._executor = executor + + def __repr__(self) -> str: + return f"" + + @property + def serializer(self) -> Serializer[Json]: + """Return the serializer.""" + return self._executor.serializer + + @property + def deserializer(self) -> Deserializer[Json, Jsons]: + """Return the deserializer.""" + return self._executor.deserializer + + async def services(self, exclude_system: Optional[bool] = False) -> Result[Jsons]: + """List installed services. + + Args: + exclude_system (bool | None): Exclude system services. + + Returns: + list: List of installed services. + + Raises: + FoxxServiceListError: If retrieval fails. + + References: + - `list-the-installed-services `__ + """ # noqa: E501 + params: Params = {} + if exclude_system is not None: + params["excludeSystem"] = exclude_system + + request = Request( + method=Method.GET, + endpoint="/_api/foxx", + params=params, + ) + + def response_handler(resp: Response) -> Jsons: + if not resp.is_success: + raise FoxxServiceListError(resp, request) + result: Jsons = self.deserializer.loads_many(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def service(self, mount: str) -> Result[Json]: + """Return service metadata. + + Args: + mount (str): Service mount path (e.g "/_admin/aardvark"). + + Returns: + dict: Service metadata. + + Raises: + FoxxServiceGetError: If retrieval fails. + + References: + - `get-the-service-description `__ + """ # noqa: E501 + params: Params = {"mount": mount} + request = Request( + method=Method.GET, + endpoint="/_api/foxx/service", + params=params, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise FoxxServiceGetError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def create_service( + self, + mount: str, + service: Any, + headers: Optional[RequestHeaders] = None, + development: Optional[bool] = None, + setup: Optional[bool] = None, + legacy: Optional[bool] = None, + ) -> Result[Json]: + """Installs the given new service at the given mount path. + + Args: + mount (str): Mount path the service should be installed at. + service (Any): Service payload. Can be a JSON string, a file-like object, or a + multipart form. + headers (dict | None): Request headers. + development (bool | None): Whether to install the service in development mode. + setup (bool | None): Whether to run the service setup script. + legacy (bool | None): Whether to install in legacy mode. + + Returns: + dict: Service metadata. + + Raises: + FoxxServiceCreateError: If installation fails. + + References: + - `install-a-new-service-mode `__ + """ # noqa: E501 + params: Params = dict() + params["mount"] = mount + if development is not None: + params["development"] = development + if setup is not None: + params["setup"] = setup + if legacy is not None: + params["legacy"] = legacy + + if isinstance(service, dict): + data = self.serializer.dumps(service) + else: + data = service + + request = Request( + method=Method.POST, + endpoint="/_api/foxx", + params=params, + data=data, + headers=headers, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise FoxxServiceCreateError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def delete_service( + self, + mount: str, + teardown: Optional[bool] = None, + ) -> None: + """Removes the service at the given mount path from the database and file system. + + Args: + mount (str): Mount path of the service to uninstall. + teardown (bool | None): Whether to run the teardown script. + + Raises: + FoxxServiceDeleteError: If operations fails. + + References: + - `uninstall-a-service `__ + """ # noqa: E501 + params: Params = dict() + params["mount"] = mount + if teardown is not None: + params["teardown"] = teardown + + request = Request( + method=Method.DELETE, + endpoint="/_api/foxx/service", + params=params, + ) + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise FoxxServiceDeleteError(resp, request) + + await self._executor.execute(request, response_handler) + + async def replace_service( + self, + mount: str, + service: Any, + headers: Optional[RequestHeaders] = None, + teardown: Optional[bool] = None, + setup: Optional[bool] = None, + legacy: Optional[bool] = None, + force: Optional[bool] = None, + ) -> Result[Json]: + """Replace an existing Foxx service at the given mount path. + + Args: + mount (str): Mount path of the service to replace. + service (Any): Service payload (JSON string, file-like object, or multipart form). + headers (dict | None): Optional request headers. + teardown (bool | None): Whether to run the teardown script. + setup (bool | None): Whether to run the setup script. + legacy (bool | None): Whether to install in legacy mode. + force (bool | None): Set to `True` to force service install even if no service is installed under given mount. + + Returns: + dict: Service metadata. + + Raises: + FoxxServiceReplaceError: If replacement fails. + + References: + - `replace-a-service `__ + """ # noqa: E501 + params: Params = dict() + params["mount"] = mount + if teardown is not None: + params["teardown"] = teardown + if setup is not None: + params["setup"] = setup + if legacy is not None: + params["legacy"] = legacy + if force is not None: + params["force"] = force + + if isinstance(service, dict): + data = self.serializer.dumps(service) + else: + data = service + + request = Request( + method=Method.PUT, + endpoint="/_api/foxx/service", + params=params, + data=data, + headers=headers, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise FoxxServiceReplaceError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def update_service( + self, + mount: str, + service: Any, + headers: Optional[RequestHeaders] = None, + teardown: Optional[bool] = None, + setup: Optional[bool] = None, + legacy: Optional[bool] = None, + force: Optional[bool] = None, + ) -> Result[Json]: + """Upgrade a Foxx service at the given mount path. + + Args: + mount (str): Mount path of the service to upgrade. + service (Any): Service payload (JSON string, file-like object, or multipart form). + headers (dict | None): Optional request headers. + teardown (bool | None): Whether to run the teardown script. + setup (bool | None): Whether to run the setup script. + legacy (bool | None): Whether to upgrade in legacy mode. + force (bool | None): Set to `True` to force service install even if no service is installed under given mount. + + Returns: + dict: Service metadata. + + Raises: + FoxxServiceUpdateError: If upgrade fails. + + References: + - `upgrade-a-service `__ + """ # noqa: E501 + params: Params = dict() + params["mount"] = mount + if teardown is not None: + params["teardown"] = teardown + if setup is not None: + params["setup"] = setup + if legacy is not None: + params["legacy"] = legacy + if force is not None: + params["force"] = force + + if isinstance(service, dict): + data = self.serializer.dumps(service) + else: + data = service + + request = Request( + method=Method.PATCH, + endpoint="/_api/foxx/service", + params=params, + data=data, + headers=headers, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise FoxxServiceUpdateError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def config(self, mount: str) -> Result[Json]: + """Return service configuration. + + Args: + mount (str): Service mount path. + + Returns: + dict: Service configuration. + + Raises: + FoxxConfigGetError: If retrieval fails. + + References: + - `get-the-configuration-options `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint="/_api/foxx/configuration", + params={"mount": mount}, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise FoxxConfigGetError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def update_config(self, mount: str, options: Json) -> Result[Json]: + """Update service configuration. + + Args: + mount (str): Service mount path. + options (dict): Configuration values. Omitted options are ignored. + + Returns: + dict: Updated configuration values. + + Raises: + FoxxConfigUpdateError: If update fails. + + References: + - `update-the-configuration-options `__ + """ # noqa: E501 + request = Request( + method=Method.PATCH, + endpoint="/_api/foxx/configuration", + params={"mount": mount}, + data=self.serializer.dumps(options), + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise FoxxConfigUpdateError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def replace_config(self, mount: str, options: Json) -> Result[Json]: + """Replace service configuration. + + Args: + mount (str): Service mount path. + options (dict): Configuration values. Omitted options are reset to their + default values or marked as un-configured. + + Returns: + dict: Replaced configuration values. + + Raises: + FoxxConfigReplaceError: If replace fails. + + References: + - `replace-the-configuration-options `__ + """ # noqa: E501 + request = Request( + method=Method.PUT, + endpoint="/_api/foxx/configuration", + params={"mount": mount}, + data=self.serializer.dumps(options), + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise FoxxConfigReplaceError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def dependencies(self, mount: str) -> Result[Json]: + """Return service dependencies. + + Args: + mount (str): Service mount path. + + Returns: + dict: Service dependencies settings. + + Raises: + FoxxDependencyGetError: If retrieval fails. + + References: + - `get-the-dependency-options `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint="/_api/foxx/dependencies", + params={"mount": mount}, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise FoxxDependencyGetError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def update_dependencies(self, mount: str, options: Json) -> Result[Json]: + """Update service dependencies. + + Args: + mount (str): Service mount path. + options (dict): Dependencies settings. Omitted ones are ignored. + + Returns: + dict: Updated dependency settings. + + Raises: + FoxxDependencyUpdateError: If update fails. + + References: + - `update-the-dependency-options `__ + """ # noqa: E501 + request = Request( + method=Method.PATCH, + endpoint="/_api/foxx/dependencies", + params={"mount": mount}, + data=self.serializer.dumps(options), + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise FoxxDependencyUpdateError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def replace_dependencies(self, mount: str, options: Json) -> Result[Json]: + """Replace service dependencies. + + Args: + mount (str): Service mount path. + options (dict): Dependencies settings. Omitted ones are disabled. + + Returns: + dict: Replaced dependency settings. + + Raises: + FoxxDependencyReplaceError: If replace fails. + + References: + - `replace-the-dependency-options `__ + """ # noqa: E501 + request = Request( + method=Method.PUT, + endpoint="/_api/foxx/dependencies", + params={"mount": mount}, + data=self.serializer.dumps(options), + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise FoxxDependencyReplaceError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def scripts(self, mount: str) -> Result[Json]: + """List service scripts. + + Args: + mount (str): Service mount path. + + Returns: + dict: Service scripts. + + Raises: + FoxxScriptListError: If retrieval fails. + + References: + - `list-the-service-scripts `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint="/_api/foxx/scripts", + params={"mount": mount}, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise FoxxScriptListError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def run_script( + self, mount: str, name: str, arg: Optional[Json] = None + ) -> Result[Any]: + """Run a service script. + + Args: + mount (str): Service mount path. + name (str): Script name. + arg (dict | None): Arbitrary value passed into the script as first argument. + + Returns: + Any: Returns the exports of the script, if any. + + Raises: + FoxxScriptRunError: If script fails. + + References: + - `run-a-service-script `__ + """ # noqa: E501 + request = Request( + method=Method.POST, + endpoint=f"/_api/foxx/scripts/{name}", + params={"mount": mount}, + data=self.serializer.dumps(arg) if arg is not None else None, + ) + + def response_handler(resp: Response) -> Any: + if not resp.is_success: + raise FoxxScriptRunError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def run_tests( + self, + mount: str, + reporter: Optional[str] = None, + idiomatic: Optional[bool] = None, + filter: Optional[str] = None, + output_format: Optional[str] = None, + ) -> Result[str]: + """Run service tests. + + Args: + mount (str): Service mount path. + reporter (str | None): Test reporter. Allowed values are "default" (simple + list of test cases), "suite" (object of test cases nested in + suites), "stream" (raw stream of test results), "xunit" (XUnit or + JUnit compatible structure), or "tap" (raw TAP compatible stream). + idiomatic (bool | None): Use matching format for the reporter, regardless of + the value of parameter **output_format**. + filter (str | None): Only run tests whose full name (test suite and + test case) matches the given string. + output_format (str | None): Used to further control format. Allowed values + are "x-ldjson", "xml" and "text". When using "stream" reporter, + setting this to "x-ldjson" returns newline-delimited JSON stream. + When using "tap" reporter, setting this to "text" returns plain + text TAP report. When using "xunit" reporter, settings this to + "xml" returns an XML instead of JSONML. + + Returns: + str: Reporter output (e.g. raw JSON string, XML, plain text). + + Raises: + FoxxTestRunError: If test fails. + + References: + - `run-the-service-tests `__ + """ # noqa: E501 + params: Params = dict() + params["mount"] = mount + if reporter is not None: + params["reporter"] = reporter + if idiomatic is not None: + params["idiomatic"] = idiomatic + if filter is not None: + params["filter"] = filter + + headers: RequestHeaders = {} + if output_format == "x-ldjson": + headers["accept"] = "application/x-ldjson" + elif output_format == "xml": + headers["accept"] = "application/xml" + elif output_format == "text": + headers["accept"] = "text/plain" + + request = Request( + method=Method.POST, + endpoint="/_api/foxx/tests", + params=params, + headers=headers, + ) + + def response_handler(resp: Response) -> str: + if not resp.is_success: + raise FoxxTestRunError(resp, request) + return resp.raw_body.decode("utf-8") + + return await self._executor.execute(request, response_handler) + + async def enable_development(self, mount: str) -> Result[Json]: + """Puts the service into development mode. + + While the service is running in development mode, it is reloaded from + the file system, and its setup script (if any) is re-executed every + time the service handles a request. + + In a cluster with multiple coordinators, changes to the filesystem on + one coordinator is not reflected across other coordinators. + + Args: + mount (str): Service mount path. + + Returns: + dict: Service metadata. + + Raises: + FoxxDevModeEnableError: If the operation fails. + + References: + - `enable-the-development-mode `__ + """ # noqa: E501 + request = Request( + method=Method.POST, + endpoint="/_api/foxx/development", + params={"mount": mount}, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise FoxxDevModeEnableError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def disable_development(self, mount: str) -> Result[Json]: + """Puts the service into production mode. + + In a cluster with multiple coordinators, the services on all other + coordinators are replaced with the version on the calling coordinator. + + Args: + mount (str): Service mount path. + + Returns: + dict: Service metadata. + + Raises: + FoxxDevModeDisableError: If the operation fails. + + References: + - `disable-the-development-mode `__ + """ # noqa: E501 + request = Request( + method=Method.DELETE, + endpoint="/_api/foxx/development", + params={"mount": mount}, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise FoxxDevModeDisableError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def readme(self, mount: str) -> Result[str]: + """Return the service readme. + + Args: + mount (str): Service mount path. + + Returns: + str: Service readme content. + + Raises: + FoxxReadmeGetError: If retrieval fails. + + References: + - `get-the-service-readme `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint="/_api/foxx/readme", + params={"mount": mount}, + ) + + def response_handler(resp: Response) -> str: + if not resp.is_success: + raise FoxxReadmeGetError(resp, request) + return resp.raw_body.decode("utf-8") + + return await self._executor.execute(request, response_handler) + + async def swagger(self, mount: str) -> Result[Json]: + """Return the Swagger API description for the given service. + + Args: + mount (str): Service mount path. + + Returns: + dict: Swagger API description. + + Raises: + FoxxSwaggerGetError: If retrieval fails. + + References: + - `get-the-swagger-description `__ + """ # noqa: E501 + request = Request( + method=Method.GET, endpoint="/_api/foxx/swagger", params={"mount": mount} + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise FoxxSwaggerGetError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def download(self, mount: str) -> Result[bytes]: + """Downloads a zip bundle of the service directory. + + When development mode is enabled, this always creates a new bundle. + Otherwise, the bundle will represent the version of a service that is + installed on that ArangoDB instance. + + Args: + mount (str): Service mount path. + + Returns: + bytes: Service bundle zip in raw bytes form. + + Raises: + FoxxDownloadError: If download fails. + + References: + - `download-a-service-bundle `__ + """ # noqa: E501 + request = Request( + method=Method.POST, endpoint="/_api/foxx/download", params={"mount": mount} + ) + + def response_handler(resp: Response) -> bytes: + if not resp.is_success: + raise FoxxDownloadError(resp, request) + return resp.raw_body + + return await self._executor.execute(request, response_handler) + + async def commit(self, replace: Optional[bool] = None) -> None: + """Commit local service state of the coordinator to the database. + + This can be used to resolve service conflicts between coordinators + that cannot be fixed automatically due to missing data. + + Args: + replace (bool | None): If set to `True`, any existing service files in the database + will be overwritten. + + Raises: + FoxxCommitError: If commit fails. + + References: + - `commit-the-local-service-state `__ + """ # noqa: E501 + params: Params = {} + if replace is not None: + params["replace"] = replace + + request = Request( + method=Method.POST, endpoint="/_api/foxx/commit", params=params + ) + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise FoxxCommitError(resp, request) + + await self._executor.execute(request, response_handler) diff --git a/arangoasync/request.py b/arangoasync/request.py index 6bd629d..9c43508 100644 --- a/arangoasync/request.py +++ b/arangoasync/request.py @@ -4,7 +4,7 @@ ] from enum import Enum, auto -from typing import Optional +from typing import Any, Optional from arangoasync.auth import Auth from arangoasync.typings import Params, RequestHeaders @@ -31,7 +31,7 @@ class Request: endpoint (str): API endpoint. headers (dict | None): Request headers. params (dict | None): URL parameters. - data (bytes | None): Request payload. + data (Any): Request payload. auth (Auth | None): Authentication. prefix_needed (bool): Whether the request needs a prefix (e.g., database name). @@ -40,7 +40,7 @@ class Request: endpoint (str): API endpoint. headers (dict | None): Request headers. params (dict | None): URL parameters. - data (bytes | None): Request payload. + data (Any): Request payload. auth (Auth | None): Authentication. prefix_needed (bool): Whether the request needs a prefix (e.g., database name). """ @@ -61,7 +61,7 @@ def __init__( endpoint: str, headers: Optional[RequestHeaders] = None, params: Optional[Params] = None, - data: Optional[bytes | str] = None, + data: Optional[Any] = None, auth: Optional[Auth] = None, prefix_needed: bool = True, ) -> None: @@ -69,7 +69,7 @@ def __init__( self.endpoint: str = endpoint self.headers: RequestHeaders = headers or dict() self.params: Params = params or dict() - self.data: Optional[bytes | str] = data + self.data: Optional[Any] = data self.auth: Optional[Auth] = auth self.prefix_needed = prefix_needed diff --git a/docs/document.rst b/docs/document.rst index 47619db..da6434b 100644 --- a/docs/document.rst +++ b/docs/document.rst @@ -177,7 +177,7 @@ To import this file into the "students" collection, we can use the `import_bulk` students = db.collection("students") # Read the JSONL file asynchronously. - async with aiofiles.open('students.jsonl', mode='r') as f: + async with aiofiles.open("students.jsonl", mode="r") as f: documents = await f.read() # Import documents in bulk. diff --git a/docs/foxx.rst b/docs/foxx.rst new file mode 100644 index 0000000..818c80e --- /dev/null +++ b/docs/foxx.rst @@ -0,0 +1,147 @@ +Foxx +---- + +**Foxx** is a microservice framework which lets you define custom HTTP endpoints +that extend ArangoDB's REST API. For more information, refer to `ArangoDB Manual`_. + +.. _ArangoDB Manual: https://docs.arangodb.com + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the Foxx API wrapper. + foxx = db.foxx + + # Define the test mount point. + service_mount = "/test_mount" + + # List services. + await foxx.services() + + # Create a service using a source file. + # In this case, the server must have access to the URL. + service = { + "source": "/tests/static/service.zip", + "configuration": {}, + "dependencies": {}, + } + await foxx.create_service( + mount=service_mount, + service=service, + development=True, + setup=True, + legacy=True + ) + + # Update (upgrade) a service. + await db.foxx.update_service( + mount=service_mount, + service=service, + teardown=True, + setup=True, + legacy=False + ) + + # Replace (overwrite) a service. + await db.foxx.replace_service( + mount=service_mount, + service=service, + teardown=True, + setup=True, + legacy=True, + force=False + ) + + # Get service details. + await foxx.service(service_mount) + + # Manage service configuration. + await foxx.config(service_mount) + await foxx.update_config(service_mount, options={}) + await foxx.replace_config(service_mount, options={}) + + # Manage service dependencies. + await foxx.dependencies(service_mount) + await foxx.update_dependencies(service_mount, options={}) + await foxx.replace_dependencies(service_mount, options={}) + + # Toggle development mode for a service. + await foxx.enable_development(service_mount) + await foxx.disable_development(service_mount) + + # Other miscellaneous functions. + await foxx.readme(service_mount) + await foxx.swagger(service_mount) + await foxx.download(service_mount) + await foxx.commit() + await foxx.scripts(service_mount) + await foxx.run_script(service_mount, "setup", {}) + await foxx.run_tests(service_mount, reporter="xunit", output_format="xml") + + # Delete a service. + await foxx.delete_service(service_mount) + +There are other ways to create, update, and replace services, such as +providing a file directly instead of a source URL. This is useful when you +want to deploy a service from a local file system without needing the +server to access the file directly. When using this method, you must provide +the appropriate content type in the headers, such as `application/zip` for ZIP files or +`multipart/form-data` for multipart uploads. The following example demonstrates how to do this: + +.. code-block:: python + + import aiofiles + import aiohttp + import json + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the Foxx API wrapper. + foxx = db.foxx + + # Define the test mount points. + mount_point = "/test_mount" + + # Create the service using multipart/form-data. + service = aiohttp.FormData() + service.add_field( + "source", + open("./tests/static/service.zip", "rb"), + filename="service.zip", + content_type="application/zip", + ) + service.add_field("configuration", json.dumps({})) + service.add_field("dependencies", json.dumps({})) + service_info = await db.foxx.create_service( + mount=mount_point, service=service, headers={"content-type": "multipart/form-data"} + ) + + # Replace the service using raw data. + async with aiofiles.open("./tests/static/service.zip", mode="rb") as f: + service = await f.read() + service_info = await db.foxx.replace_service( + mount=mount_point, service=service, headers={"content-type": "application/zip"} + ) + + # Delete the service. + await db.foxx.delete_service(mount_point) + +See :class:`arangoasync.foxx.Foxx` for API specification. diff --git a/docs/index.rst b/docs/index.rst index 65eefd3..78afe62 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -43,6 +43,7 @@ Contents .. toctree:: :maxdepth: 1 + foxx transaction view analyzer diff --git a/pyproject.toml b/pyproject.toml index c5c890f..ef00aea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,6 +49,7 @@ version = { attr = "arangoasync.version.__version__" } [project.optional-dependencies] dev = [ + "aiofiles>=24.1.0", "black>=24.2", "flake8>=7.0", "isort>=5.10", diff --git a/tests/helpers.py b/tests/helpers.py index dfaae4d..0e6e8a8 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -80,3 +80,12 @@ def generate_task_id(): str: Random task ID """ return f"test_task_id_{uuid4().hex}" + + +def generate_service_mount(): + """Generate and return a random service name. + + Returns: + str: Random service name. + """ + return f"/test_{uuid4().hex}" diff --git a/tests/static/service.zip b/tests/static/service.zip new file mode 100644 index 0000000000000000000000000000000000000000..00bf513ebf1066886e93020d36d46697ae55a134 GIT binary patch literal 2963 zcmZ`*c{r47A0FEnLyRR!SwkkrR+$9w;l`}#f4?>0a&F!F#vAP4|qkY!PtnG$eP z2fzZrNJ~u|lQn_BdH|RKxZ5~;$hdf4laYhfsa}QpjS-cMQ5N`OB$Kc~kS3C3*6AQyh5dm^L?K)=_stU@T|>CLc&k zIx9`0$KJb6Pk{tTIuPi{pNTx29dOth~CMvlR%Z!!zx$kU_?9TI%%%eDDte$SEm<|3sIuQv_K(*zz2-Wh6JKgOZw zcjQZDhCEwcS}x|yHjxAu-X5!ST1H(hQgxoU(DP08$1b=$R6VTQ7wO%CX`qcHpdI!f z?ReVb+`KIae{~eDudLrH$2PGcRhySwFmd}R_b)|75R;%ZRGgT~$;imJ`@R$%ue)?9 ztvlH7wkYk+=G&agY2ik^eA7_AtQbc(w``y8z6z_kggt#s)(J<^VsYzu&(pSD1>15U ze>~swpiDZVa#d3;Ig7JZ*f2Lr3>u{4@ zV#I=7Kv(^}=f4@^vqhPWI3W7)LKVH4@;!0OObmVn{*8a2sZLNn< zq#{^zdGs>8w>_G94oEWtIm+J))}@9YF>kOO7Q=H#!ukv&2-M2)ml!uaeE=c;5o0KI z(sLXE>zLd(`%Fp%>5qOL5pIkf8Kt);E_4UM!Ku9c6&U2E&Y4ofxnQaQDJ$;Nt$cX` z2hx6GYqhh6qd(fr$$Bhb3^Cy&n^KLq3L&=e>|GOs2I~Lz8^uozw=edf;B z!N{om=&Nn+w*?+aV_w%5D|(7AqhjmEs-LACh8Jc`GQ zq@xOO(MBt2e?BbE=yJgeHjIuLbZNHkPHaVJv}ic&2)F`#@C@E6kx)%F!S*ekTbXEc z?Yy95lc`?dUB4bm?m4}eeGK}xR2R{wied1VueJvZZ6V~-*xMWQ7iN(P4H-S{8f?o2 zu()RfT?Fh{Gg+tUIxFXUg_1dX9OH+a^bq!e; zv%gy_Bz>@{TGSzTQ}Fwzq6L*B@sMq-%n|c$(PL7NJkL zN$Z+~?&~E*ECKSrvv4&c!u$I-IZ!NHjWjqkX7vORzJxl)t9(N(@X0&wTB2!4!+a ztWRg0`N-~wO0V`GvmMUXzLlaDI;Az*8qf%w^@U3(%FApW%nU}JzTabUgVS@>lhZQ% zdIKT4k@itS&6*r5#f03M-V@+eM0`x_2MVQD^woO%psRRRhWxBAdC~3>+p7gB@)Dk* zuAOBi?z7{|?XcnVbC}bG6pso|4%;9%RRQyc?|Z1SFo++ib=LY_ZM`(QUA4vf2`(D< z#a$F*G6K!}L{YeiG~8v9-847P^@@v^rE>{2o#T3($j2;Qo~}#1LJT0vjFj-*uoWPZ zo9n*bKrJgYd(W7n7` z6F0zF}8?P|zLbdv<#Qq z=uNef(9(lM<(xBi3umzuCG!(uYiX96Hz=h1*Amw)lahyyNu*J1uWU61D9lxLYRJ-C zYos}y!a8!fhe|~UMC51sh4L=Ld78dO1*A11>=y6-ILjk0 zrYLuXootZO6VXg1c3t$NCO7QN)cc8E#*6#fmnfJrMOPoVUc%@3dvfP~*Q`)`QaIxI zWo}_(gyR!k-6flam>p&3#S+289XRH9i$nmG*8u=_3FyZ Date: Sat, 16 Aug 2025 19:05:42 +0800 Subject: [PATCH 10/30] Administration API (#66) * Getting started on administration API * Adding more Administration methods * Finishing up Administration API * Adding admin to to toctree --- arangoasync/database.py | 424 ++++++++++++++++++++++++++++++++++++++ arangoasync/exceptions.py | 68 +++++- docs/admin.rst | 47 +++++ docs/index.rst | 1 + tests/test_database.py | 95 ++++++++- 5 files changed, 632 insertions(+), 3 deletions(-) create mode 100644 docs/admin.rst diff --git a/arangoasync/database.py b/arangoasync/database.py index be057c4..449b789 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -6,6 +6,7 @@ ] +from datetime import datetime from typing import Any, List, Optional, Sequence, TypeVar, cast from warnings import warn @@ -26,10 +27,12 @@ CollectionDeleteError, CollectionKeyGeneratorsError, CollectionListError, + DatabaseCompactError, DatabaseCreateError, DatabaseDeleteError, DatabaseListError, DatabasePropertiesError, + DatabaseSupportInfoError, GraphCreateError, GraphDeleteError, GraphListError, @@ -39,8 +42,22 @@ PermissionListError, PermissionResetError, PermissionUpdateError, + ServerAvailableOptionsGetError, + ServerCheckAvailabilityError, + ServerCurrentOptionsGetError, + ServerEchoError, ServerEncryptionError, + ServerEngineError, + ServerExecuteError, + ServerLicenseGetError, + ServerLicenseSetError, + ServerModeError, + ServerModeSetError, + ServerReloadRoutingError, + ServerShutdownError, + ServerShutdownProgressError, ServerStatusError, + ServerTimeError, ServerTLSError, ServerTLSReloadError, ServerVersionError, @@ -2437,6 +2454,413 @@ def response_handler(resp: Response) -> bool: return await self._executor.execute(request, response_handler) + async def engine(self) -> Result[Json]: + """Returns the storage engine the server is configured to use. + + Returns: + dict: Database engine details. + + Raises: + ServerEngineError: If the operation fails. + + References: + - `get-the-storage-engine-type `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint="/_api/engine") + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerEngineError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def time(self) -> Result[datetime]: + """Return server system time. + + Returns: + datetime.datetime: Server system time. + + Raises: + ServerTimeError: If the operation fails. + + References: + - `get-the-system-time `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint="/_admin/time") + + def response_handler(resp: Response) -> datetime: + if not resp.is_success: + raise ServerTimeError(resp, request) + return datetime.fromtimestamp( + self.deserializer.loads(resp.raw_body)["time"] + ) + + return await self._executor.execute(request, response_handler) + + async def check_availability(self) -> Result[str]: + """Return ArangoDB server availability mode. + + Returns: + str: Server availability mode, either "readonly" or "default". + + Raises: + ServerCheckAvailabilityError: If the operation fails. + + References: + - `check-server-availability `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint="/_admin/server/availability", + ) + + def response_handler(resp: Response) -> str: + if not resp.is_success: + raise ServerCheckAvailabilityError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return str(result["mode"]) + + return await self._executor.execute(request, response_handler) + + async def support_info(self) -> Result[Json]: + """Retrieves deployment information for support purposes. + + Note: + As this API may reveal sensitive data about the deployment, it can only be accessed from inside the _system database. + + Returns: + dict: Deployment information + + Raises: + DatabaseSupportInfoError: If the operation fails. + + References: + - `get-information-about-the-deployment `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint="/_admin/support-info") + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise DatabaseSupportInfoError(resp, request) + + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def options(self) -> Result[Json]: + """Return the currently-set server options. + + Returns: + dict: Server options. + + Raises: + ServerCurrentOptionsGetError: If the operation fails. + + References: + - `get-the-startup-option-configuration `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint="/_admin/options") + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerCurrentOptionsGetError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def options_available(self) -> Result[Json]: + """Return a description of all available server options. + + Returns: + dict: Server options description. + + Raises: + ServerAvailableOptionsGetError: If the operation fails. + + References: + - `get-the-available-startup-options `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint="/_admin/options-description") + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerAvailableOptionsGetError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def mode(self) -> Result[str]: + """Return the server mode ("default" or "readonly"). + + Returns: + str: Server mode, either "default" or "readonly". + + Raises: + ServerModeError: If the operation fails. + + References: + - `return-whether-or-not-a-server-is-in-read-only-mode `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint="/_admin/server/mode") + + def response_handler(resp: Response) -> str: + if not resp.is_success: + raise ServerModeError(resp, request) + return str(self.deserializer.loads(resp.raw_body)["mode"]) + + return await self._executor.execute(request, response_handler) + + async def set_mode(self, mode: str) -> Result[str]: + """Set the server mode to read-only or default. + + Args: + mode (str): Server mode. Possible values are "default" or "readonly". + + Returns: + str: New server mode. + + Raises: + ServerModeSetError: If the operation fails. + + References: + - `set-the-server-mode-to-read-only-or-default `__ + """ # noqa: E501 + request = Request( + method=Method.PUT, + endpoint="/_admin/server/mode", + data=self.serializer.dumps({"mode": mode}), + ) + + def response_handler(resp: Response) -> str: + if not resp.is_success: + raise ServerModeSetError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return str(result["mode"]) + + return await self._executor.execute(request, response_handler) + + async def license(self) -> Result[Json]: + """View the license information and status of an Enterprise Edition instance. + + Returns: + dict: Server license information. + + Raises: + ServerLicenseGetError: If the operation fails. + + References: + - `get-information-about-the-current-license `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint="/_admin/license") + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerLicenseGetError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def set_license(self, license: str, force: Optional[bool] = False) -> None: + """Set a new license for an Enterprise Edition instance. + + Args: + license (str) -> Base64-encoded license string, wrapped in double-quotes. + force (bool | None) -> Set to `True` to change the license even if it + expires sooner than the current one. + + Raises: + ServerLicenseSetError: If the operation fails. + + References: + - `set-a-new-license `__ + """ # noqa: E501 + params: Params = {} + if force is not None: + params["force"] = force + + request = Request( + method=Method.PUT, + endpoint="/_admin/license", + params=params, + data=license, + ) + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise ServerLicenseSetError(resp, request) + + await self._executor.execute(request, response_handler) + + async def shutdown(self, soft: Optional[bool] = None) -> None: + """Initiate server shutdown sequence. + + Args: + soft (bool | None): If set to `True`, this initiates a soft shutdown. + + Raises: + ServerShutdownError: If the operation fails. + + References: + - `start-the-shutdown-sequence `__ + """ # noqa: E501 + params: Params = {} + if soft is not None: + params["soft"] = soft + + request = Request( + method=Method.DELETE, + endpoint="/_admin/shutdown", + params=params, + ) + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise ServerShutdownError(resp, request) + + await self._executor.execute(request, response_handler) + + async def shutdown_progress(self) -> Result[Json]: + """Query the soft shutdown progress. + + Returns: + dict: Information about the shutdown progress. + + Raises: + ServerShutdownProgressError: If the operation fails. + + References: + - `query-the-soft-shutdown-progress `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint="/_admin/shutdown") + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerShutdownProgressError(resp, request) + + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def compact( + self, + change_level: Optional[bool] = None, + compact_bottom_most_level: Optional[bool] = None, + ) -> None: + """Compact all databases. This method requires superuser access. + + Note: + This command can cause a full rewrite of all data in all databases, + which may take very long for large databases. + + Args: + change_level (bool | None): Whether or not compacted data should be + moved to the minimum possible level. Default value is `False`. + compact_bottom_most_level (bool | None): Whether or not to compact the bottom-most level of data. + Default value is `False`. + + Returns: + dict: Information about the compaction process. + + Raises: + DatabaseCompactError: If the operation fails. + + References: + - `compact-all-databases `__ + """ # noqa: E501 + data = {} + if change_level is not None: + data["changeLevel"] = change_level + if compact_bottom_most_level is not None: + data["compactBottomMostLevel"] = compact_bottom_most_level + + request = Request( + method=Method.PUT, + endpoint="/_admin/compact", + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise DatabaseCompactError(resp, request) + + await self._executor.execute(request, response_handler) + + async def reload_routing(self) -> None: + """Reload the routing information. + + Raises: + ServerReloadRoutingError: If the operation fails. + + References: + - `reload-the-routing-table `__ + """ # noqa: E501 + request = Request(method=Method.POST, endpoint="/_admin/routing/reload") + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise ServerReloadRoutingError(resp, request) + + await self._executor.execute(request, response_handler) + + async def echo(self, body: Optional[Json] = None) -> Result[Json]: + """Return an object with the servers request information. + + Args: + body (dict | None): Optional body of the request. + + Returns: + dict: Details of the request. + + Raises: + ServerEchoError: If the operation fails. + + References: + - `echo-a-request `__ + """ # noqa: E501 + data = body if body is not None else {} + request = Request(method=Method.POST, endpoint="/_admin/echo", data=data) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerEchoError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def execute(self, command: str) -> Result[Any]: + """Execute raw Javascript command on the server. + + Args: + command (str): Javascript command to execute. + + Returns: + Return value of **command**, if any. + + Raises: + ServerExecuteError: If the execution fails. + + References: + - `execute-a-script `__ + """ # noqa: E501 + request = Request( + method=Method.POST, endpoint="/_admin/execute", data=command.encode("utf-8") + ) + + def response_handler(resp: Response) -> Any: + if not resp.is_success: + raise ServerExecuteError(resp, request) + return self.deserializer.loads(resp.raw_body) + + return await self._executor.execute(request, response_handler) + class StandardDatabase(Database): """Standard database API wrapper. diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index 99340dd..96a432a 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -319,6 +319,10 @@ class CursorStateError(ArangoClientError): """The cursor object was in a bad state.""" +class DatabaseCompactError(ArangoServerError): + """Failed to compact databases.""" + + class DatabaseCreateError(ArangoServerError): """Failed to create database.""" @@ -335,6 +339,10 @@ class DatabasePropertiesError(ArangoServerError): """Failed to retrieve database properties.""" +class DatabaseSupportInfoError(ArangoServerError): + """Failed to retrieve support info for deployment.""" + + class DeserializationError(ArangoClientError): """Failed to deserialize the server response.""" @@ -547,14 +555,66 @@ class SerializationError(ArangoClientError): """Failed to serialize the request.""" -class ServerEncryptionError(ArangoServerError): - """Failed to reload user-defined encryption keys.""" +class ServerAvailableOptionsGetError(ArangoServerError): + """Failed to retrieve available server options.""" + + +class ServerCheckAvailabilityError(ArangoServerError): + """Failed to retrieve server availability mode.""" class ServerConnectionError(ArangoServerError): """Failed to connect to ArangoDB server.""" +class ServerCurrentOptionsGetError(ArangoServerError): + """Failed to retrieve currently-set server options.""" + + +class ServerEchoError(ArangoServerError): + """Failed to retrieve details on last request.""" + + +class ServerEncryptionError(ArangoServerError): + """Failed to reload user-defined encryption keys.""" + + +class ServerEngineError(ArangoServerError): + """Failed to retrieve database engine.""" + + +class ServerExecuteError(ArangoServerError): + """Failed to execute raw JavaScript command.""" + + +class ServerModeError(ArangoServerError): + """Failed to retrieve server mode.""" + + +class ServerModeSetError(ArangoServerError): + """Failed to set server mode.""" + + +class ServerLicenseGetError(ArangoServerError): + """Failed to retrieve server license.""" + + +class ServerLicenseSetError(ArangoServerError): + """Failed to set server license.""" + + +class ServerReloadRoutingError(ArangoServerError): + """Failed to reload routing details.""" + + +class ServerShutdownError(ArangoServerError): + """Failed to initiate shutdown sequence.""" + + +class ServerShutdownProgressError(ArangoServerError): + """Failed to retrieve soft shutdown progress.""" + + class ServerStatusError(ArangoServerError): """Failed to retrieve server status.""" @@ -567,6 +627,10 @@ class ServerTLSReloadError(ArangoServerError): """Failed to reload TLS.""" +class ServerTimeError(ArangoServerError): + """Failed to retrieve server system time.""" + + class ServerVersionError(ArangoServerError): """Failed to retrieve server version.""" diff --git a/docs/admin.rst b/docs/admin.rst new file mode 100644 index 0000000..6a494d1 --- /dev/null +++ b/docs/admin.rst @@ -0,0 +1,47 @@ +Server Administration +--------------------- + +ArangoDB provides operations for server administration and monitoring. +Most of these operations can only be performed by admin users via the +``_system`` database. + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "_system" database as root user. + sys_db = await client.db("_system", auth=auth) + + # Retrieve the database engine. + await sys_db.engine() + + # Retrieve the server time.. + time = await sys_db.time() + + # Check server availability + availability = sys_db.check_availability() + + # Support info + info = sys_db.support_info() + + # Get the startup option configuration + options = await sys_db.options() + + # Get the available startup options + options = await sys_db.options_available() + + # Return whether or not a server is in read-only mode + mode = await sys_db.mode() + + # Get license information + license = await sys_db.license() + + # Execute Javascript on the server + result = await sys_db.execute("return 1") diff --git a/docs/index.rst b/docs/index.rst index 78afe62..0fab3ac 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -61,6 +61,7 @@ Contents .. toctree:: :maxdepth: 1 + admin user **Miscellaneous** diff --git a/tests/test_database.py b/tests/test_database.py index 7058ac1..5daa837 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -1,21 +1,39 @@ import asyncio +import datetime import pytest from packaging import version +from arangoasync.client import ArangoClient from arangoasync.collection import StandardCollection from arangoasync.exceptions import ( CollectionCreateError, CollectionDeleteError, CollectionKeyGeneratorsError, CollectionListError, + DatabaseCompactError, DatabaseCreateError, DatabaseDeleteError, DatabaseListError, DatabasePropertiesError, + DatabaseSupportInfoError, JWTSecretListError, JWTSecretReloadError, + ServerAvailableOptionsGetError, + ServerCheckAvailabilityError, + ServerCurrentOptionsGetError, + ServerEchoError, + ServerEngineError, + ServerExecuteError, + ServerLicenseGetError, + ServerLicenseSetError, + ServerModeError, + ServerModeSetError, + ServerReloadRoutingError, + ServerShutdownError, + ServerShutdownProgressError, ServerStatusError, + ServerTimeError, ServerVersionError, ) from arangoasync.typings import CollectionType, KeyOptions, UserInfo @@ -23,7 +41,9 @@ @pytest.mark.asyncio -async def test_database_misc_methods(sys_db, db, bad_db, cluster, db_version): +async def test_database_misc_methods( + sys_db, db, bad_db, cluster, db_version, url, sys_db_name, token +): # Status status = await sys_db.status() assert status["server"] == "arango" @@ -64,6 +84,79 @@ async def test_database_misc_methods(sys_db, db, bad_db, cluster, db_version): with pytest.raises(CollectionKeyGeneratorsError): await bad_db.key_generators() + # Administration + with pytest.raises(ServerEngineError): + await bad_db.engine() + result = await db.engine() + assert isinstance(result, dict) + + with pytest.raises(ServerTimeError): + await bad_db.time() + time = await db.time() + assert isinstance(time, datetime.datetime) + + with pytest.raises(ServerCheckAvailabilityError): + await bad_db.check_availability() + assert isinstance(await db.check_availability(), str) + + with pytest.raises(DatabaseSupportInfoError): + await bad_db.support_info() + info = await sys_db.support_info() + assert isinstance(info, dict) + + if db_version >= version.parse("3.12.0"): + with pytest.raises(ServerCurrentOptionsGetError): + await bad_db.options() + options = await sys_db.options() + assert isinstance(options, dict) + with pytest.raises(ServerAvailableOptionsGetError): + await bad_db.options_available() + options_available = await sys_db.options_available() + assert isinstance(options_available, dict) + + with pytest.raises(ServerModeError): + await bad_db.mode() + mode = await sys_db.mode() + assert isinstance(mode, str) + with pytest.raises(ServerModeSetError): + await bad_db.set_mode("foo") + mode = await sys_db.set_mode("default") + assert isinstance(mode, str) + + with pytest.raises(ServerLicenseGetError): + await bad_db.license() + license = await sys_db.license() + assert isinstance(license, dict) + with pytest.raises(ServerLicenseSetError): + await sys_db.set_license('"abc"') + + with pytest.raises(ServerShutdownError): + await bad_db.shutdown() + with pytest.raises(ServerShutdownProgressError): + await bad_db.shutdown_progress() + + with pytest.raises(ServerReloadRoutingError): + await bad_db.reload_routing() + await sys_db.reload_routing() + + with pytest.raises(ServerEchoError): + await bad_db.echo() + result = await sys_db.echo() + assert isinstance(result, dict) + + with pytest.raises(ServerExecuteError): + await bad_db.execute("return 1") + result = await sys_db.execute("return 1") + assert result == 1 + + with pytest.raises(DatabaseCompactError): + await bad_db.compact() + async with ArangoClient(hosts=url) as client: + db = await client.db( + sys_db_name, auth_method="superuser", token=token, verify=True + ) + await db.compact() + @pytest.mark.asyncio async def test_create_drop_database( From 1dd20747988976588d0a3b16d5d5d9d21fdbed70 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 16 Aug 2025 19:49:13 +0800 Subject: [PATCH 11/30] Adding custom requests (#67) --- arangoasync/database.py | 15 +++++++++++++++ docs/database.rst | 7 +++++++ tests/test_database.py | 9 +++++++++ 3 files changed, 31 insertions(+) diff --git a/arangoasync/database.py b/arangoasync/database.py index 449b789..813a1ab 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -2861,6 +2861,21 @@ def response_handler(resp: Response) -> Any: return await self._executor.execute(request, response_handler) + async def request(self, request: Request) -> Result[Response]: + """Execute a custom request. + + Args: + request (Request): Request object to be executed. + + Returns: + Response: Response object containing the result of the request. + """ + + def response_handler(resp: Response) -> Response: + return resp + + return await self._executor.execute(request, response_handler) + class StandardDatabase(Database): """Standard database API wrapper. diff --git a/docs/database.rst b/docs/database.rst index 851cc9d..f4dc759 100644 --- a/docs/database.rst +++ b/docs/database.rst @@ -14,6 +14,7 @@ information. from arangoasync import ArangoClient from arangoasync.auth import Auth + from arangoasync.request import Method, Request # Initialize the client for ArangoDB. async with ArangoClient(hosts="http://localhost:8529") as client: @@ -60,4 +61,10 @@ information. # Delete the database. Note that the new users will remain. await sys_db.delete_database("test") + # Example of a custom request + request = Request( + method=Method.POST, endpoint="/_admin/execute", data="return 1".encode("utf-8") + ) + response = await sys_db.request(request) + See :class:`arangoasync.client.ArangoClient` and :class:`arangoasync.database.StandardDatabase` for API specification. diff --git a/tests/test_database.py b/tests/test_database.py index 5daa837..c9a260b 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -1,5 +1,6 @@ import asyncio import datetime +import json import pytest from packaging import version @@ -36,6 +37,7 @@ ServerTimeError, ServerVersionError, ) +from arangoasync.request import Method, Request from arangoasync.typings import CollectionType, KeyOptions, UserInfo from tests.helpers import generate_col_name, generate_db_name, generate_username @@ -157,6 +159,13 @@ async def test_database_misc_methods( ) await db.compact() + # Custom Request + request = Request( + method=Method.POST, endpoint="/_admin/execute", data="return 1".encode("utf-8") + ) + response = await sys_db.request(request) + assert json.loads(response.raw_body) == 1 + @pytest.mark.asyncio async def test_create_drop_database( From f1de45bf445f500848d3b0e413355a026b4d7be6 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 16 Aug 2025 21:20:16 +0800 Subject: [PATCH 12/30] Monitoring API (#68) * Adding monitoring API * API calls only in 3.12 * API calls only in enterprise --- arangoasync/database.py | 343 +++++++++++++++++++++++++++++++++++++- arangoasync/exceptions.py | 32 ++++ docs/admin.rst | 3 + tests/test_database.py | 54 +++++- 4 files changed, 430 insertions(+), 2 deletions(-) diff --git a/arangoasync/database.py b/arangoasync/database.py index 813a1ab..2997bab 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -7,7 +7,7 @@ from datetime import datetime -from typing import Any, List, Optional, Sequence, TypeVar, cast +from typing import Any, Dict, List, Optional, Sequence, TypeVar, cast from warnings import warn from arangoasync.aql import AQL @@ -42,6 +42,7 @@ PermissionListError, PermissionResetError, PermissionUpdateError, + ServerApiCallsError, ServerAvailableOptionsGetError, ServerCheckAvailabilityError, ServerCurrentOptionsGetError, @@ -51,8 +52,15 @@ ServerExecuteError, ServerLicenseGetError, ServerLicenseSetError, + ServerLogLevelError, + ServerLogLevelResetError, + ServerLogLevelSetError, + ServerLogSettingError, + ServerLogSettingSetError, + ServerMetricsError, ServerModeError, ServerModeSetError, + ServerReadLogError, ServerReloadRoutingError, ServerShutdownError, ServerShutdownProgressError, @@ -2876,6 +2884,339 @@ def response_handler(resp: Response) -> Response: return await self._executor.execute(request, response_handler) + async def metrics(self, server_id: Optional[str] = None) -> Result[str]: + """Return server metrics in Prometheus format. + + Args: + server_id (str | None): Returns metrics of the specified server. + If no serverId is given, the asked server will reply. + + Returns: + str: Server metrics in Prometheus format. + + Raises: + ServerMetricsError: If the operation fails. + + References: + - `metrics-api-v2 `__ + """ # noqa: E501 + params: Params = {} + if server_id is not None: + params["serverId"] = server_id + + request = Request( + method=Method.GET, + endpoint="/_admin/metrics/v2", + params=params, + ) + + def response_handler(resp: Response) -> str: + if not resp.is_success: + raise ServerMetricsError(resp, request) + return resp.raw_body.decode("utf-8") + + return await self._executor.execute(request, response_handler) + + async def read_log_entries( + self, + upto: Optional[int | str] = None, + level: Optional[str] = None, + start: Optional[int] = None, + size: Optional[int] = None, + offset: Optional[int] = None, + search: Optional[str] = None, + sort: Optional[str] = None, + server_id: Optional[str] = None, + ) -> Result[Json]: + """Read the global log from server. + + Args: + upto (int | str | None): Return the log entries up to the given level + (mutually exclusive with parameter **level**). Allowed values are + "fatal", "error", "warning", "info" (default), "debug" and "trace". + level (int | str | None): Return the log entries of only the given level + (mutually exclusive with **upto**). + start (int | None): Return the log entries whose ID is greater or equal to + the given value. + size (int | None): Restrict the size of the result to the given value. + This can be used for pagination. + offset (int | None): Number of entries to skip (e.g. for pagination). + search (str | None): Return only the log entries containing the given text. + sort (str | None): Sort the log entries according to the given fashion, + which can be "sort" or "desc". + server_id (str | None): Returns all log entries of the specified server. + If no serverId is given, the asked server will reply. + + Returns: + dict: Server log entries. + + Raises: + ServerReadLogError: If the operation fails. + + References: + - `get-the-global-server-logs `__ + """ # noqa: E501 + params: Params = {} + if upto is not None: + params["upto"] = upto + if level is not None: + params["level"] = level + if start is not None: + params["start"] = start + if size is not None: + params["size"] = size + if offset is not None: + params["offset"] = offset + if search is not None: + params["search"] = search + if sort is not None: + params["sort"] = sort + if server_id is not None: + params["serverId"] = server_id + + request = Request( + method=Method.GET, + endpoint="/_admin/log/entries", + params=params, + prefix_needed=False, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerReadLogError(resp, request) + + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def log_levels( + self, server_id: Optional[str] = None, with_appenders: Optional[bool] = None + ) -> Result[Json]: + """Return current logging levels. + + Args: + server_id (str | None): Forward the request to the specified server. + with_appenders (bool | None): Include appenders in the response. + + Returns: + dict: Current logging levels. + + Raises: + ServerLogLevelError: If the operation fails. + + References: + - `get-the-server-log-levels `__ + """ # noqa: E501 + params: Params = {} + if server_id is not None: + params["serverId"] = server_id + if with_appenders is not None: + params["withAppenders"] = with_appenders + + request = Request( + method=Method.GET, + endpoint="/_admin/log/level", + params=params, + prefix_needed=False, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerLogLevelError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def set_log_levels( + self, + server_id: Optional[str] = None, + with_appenders: Optional[bool] = None, + **kwargs: Dict[str, Any], + ) -> Result[Json]: + """Set the logging levels. + + This method takes arbitrary keyword arguments where the keys are the + logger names and the values are the logging levels. For example: + + .. code-block:: python + + db.set_log_levels( + agency='DEBUG', + collector='INFO', + threads='WARNING' + ) + + Keys that are not valid logger names are ignored. + + Args: + server_id (str | None) -> Forward the request to a specific server. + with_appenders (bool | None): Include appenders in the response. + kwargs (dict): Logging levels to be set. + + Returns: + dict: New logging levels. + + Raises: + ServerLogLevelSetError: If the operation fails. + + References: + - `set-the-structured-log-settings `__ + """ # noqa: E501 + params: Params = {} + if server_id is not None: + params["serverId"] = server_id + if with_appenders is not None: + params["withAppenders"] = with_appenders + + request = Request( + method=Method.PUT, + endpoint="/_admin/log/level", + params=params, + data=self.serializer.dumps(kwargs), + prefix_needed=False, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerLogLevelSetError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def reset_log_levels(self, server_id: Optional[str] = None) -> Result[Json]: + """Reset the logging levels. + + Revert the server’s log level settings to the values they had at startup, + as determined by the startup options specified on the command-line, + a configuration file, and the factory defaults. + + Args: + server_id: Forward the request to a specific server. + + Returns: + dict: New logging levels. + + Raises: + ServerLogLevelResetError: If the operation fails. + + References: + - `reset-the-server-log-levels `__ + """ # noqa: E501 + params: Params = {} + if server_id is not None: + params["serverId"] = server_id + + request = Request( + method=Method.DELETE, + endpoint="/_admin/log/level", + params=params, + prefix_needed=False, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerLogLevelResetError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def log_settings(self) -> Result[Json]: + """Get the structured log settings. + + Returns: + dict: Current structured log settings. + + Raises: + ServerLogSettingError: If the operation fails. + + References: + - `get-the-structured-log-settings `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint="/_admin/log/structured", + prefix_needed=False, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerLogSettingError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def set_log_settings(self, **kwargs: Dict[str, Any]) -> Result[Json]: + """Set the structured log settings. + + This method takes arbitrary keyword arguments where the keys are the + structured log parameters and the values are true or false, for either + enabling or disabling the parameters. + + .. code-block:: python + + db.set_log_settings( + database=True, + url=True, + username=False, + ) + + Args: + kwargs (dict): Structured log parameters to be set. + + Returns: + dict: New structured log settings. + + Raises: + ServerLogSettingSetError: If the operation fails. + + References: + - `set-the-structured-log-settings `__ + """ # noqa: E501 + request = Request( + method=Method.PUT, + endpoint="/_admin/log/structured", + data=self.serializer.dumps(kwargs), + prefix_needed=False, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerLogSettingSetError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def api_calls(self) -> Result[Json]: + """Get a list of the most recent requests with a timestamp and the endpoint. + + Returns: + dict: API calls made to the server. + + Raises: + ServerApiCallsError: If the operation fails. + + References: + - `get-recent-api-calls `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint="/_admin/server/api-calls", + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerApiCallsError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body)["result"] + return result + + return await self._executor.execute(request, response_handler) + class StandardDatabase(Database): """Standard database API wrapper. diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index 96a432a..ebe028e 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -555,6 +555,10 @@ class SerializationError(ArangoClientError): """Failed to serialize the request.""" +class ServerApiCallsError(ArangoServerError): + """Failed to retrieve the list of recent API calls.""" + + class ServerAvailableOptionsGetError(ArangoServerError): """Failed to retrieve available server options.""" @@ -587,6 +591,10 @@ class ServerExecuteError(ArangoServerError): """Failed to execute raw JavaScript command.""" +class ServerMetricsError(ArangoServerError): + """Failed to retrieve server metrics.""" + + class ServerModeError(ArangoServerError): """Failed to retrieve server mode.""" @@ -603,6 +611,30 @@ class ServerLicenseSetError(ArangoServerError): """Failed to set server license.""" +class ServerLogLevelError(ArangoServerError): + """Failed to retrieve server log levels.""" + + +class ServerLogLevelResetError(ArangoServerError): + """Failed to reset server log levels.""" + + +class ServerLogLevelSetError(ArangoServerError): + """Failed to set server log levels.""" + + +class ServerLogSettingError(ArangoServerError): + """Failed to retrieve server log settings.""" + + +class ServerLogSettingSetError(ArangoServerError): + """Failed to set server log settings.""" + + +class ServerReadLogError(ArangoServerError): + """Failed to retrieve global log.""" + + class ServerReloadRoutingError(ArangoServerError): """Failed to reload routing details.""" diff --git a/docs/admin.rst b/docs/admin.rst index 6a494d1..6120567 100644 --- a/docs/admin.rst +++ b/docs/admin.rst @@ -45,3 +45,6 @@ Most of these operations can only be performed by admin users via the # Execute Javascript on the server result = await sys_db.execute("return 1") + + # Get metrics in Prometheus format + metrics = await db.metrics() diff --git a/tests/test_database.py b/tests/test_database.py index c9a260b..425007b 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -20,6 +20,7 @@ DatabaseSupportInfoError, JWTSecretListError, JWTSecretReloadError, + ServerApiCallsError, ServerAvailableOptionsGetError, ServerCheckAvailabilityError, ServerCurrentOptionsGetError, @@ -28,8 +29,15 @@ ServerExecuteError, ServerLicenseGetError, ServerLicenseSetError, + ServerLogLevelError, + ServerLogLevelResetError, + ServerLogLevelSetError, + ServerLogSettingError, + ServerLogSettingSetError, + ServerMetricsError, ServerModeError, ServerModeSetError, + ServerReadLogError, ServerReloadRoutingError, ServerShutdownError, ServerShutdownProgressError, @@ -44,7 +52,7 @@ @pytest.mark.asyncio async def test_database_misc_methods( - sys_db, db, bad_db, cluster, db_version, url, sys_db_name, token + sys_db, db, bad_db, cluster, db_version, url, sys_db_name, token, enterprise ): # Status status = await sys_db.status() @@ -166,6 +174,50 @@ async def test_database_misc_methods( response = await sys_db.request(request) assert json.loads(response.raw_body) == 1 + if enterprise and db_version >= version.parse("3.12.0"): + # API calls + with pytest.raises(ServerApiCallsError): + await bad_db.api_calls() + result = await sys_db.api_calls() + assert isinstance(result, dict) + + +@pytest.mark.asyncio +async def test_metrics(db, bad_db): + with pytest.raises(ServerMetricsError): + await bad_db.metrics() + metrics = await db.metrics() + assert isinstance(metrics, str) + + +@pytest.mark.asyncio +async def test_logs(sys_db, bad_db): + with pytest.raises(ServerReadLogError): + await bad_db.read_log_entries() + result = await sys_db.read_log_entries() + assert isinstance(result, dict) + with pytest.raises(ServerLogLevelError): + await bad_db.log_levels() + result = await sys_db.log_levels() + assert isinstance(result, dict) + with pytest.raises(ServerLogLevelSetError): + await bad_db.set_log_levels() + new_levels = {"agency": "DEBUG", "engines": "INFO", "threads": "WARNING"} + result = await sys_db.set_log_levels(**new_levels) + assert isinstance(result, dict) + with pytest.raises(ServerLogLevelResetError): + await bad_db.reset_log_levels() + result = await sys_db.reset_log_levels() + assert isinstance(result, dict) + with pytest.raises(ServerLogSettingError): + await bad_db.log_settings() + result = await sys_db.log_settings() + assert isinstance(result, dict) + with pytest.raises(ServerLogSettingSetError): + await bad_db.set_log_settings() + result = await sys_db.set_log_settings() + assert isinstance(result, dict) + @pytest.mark.asyncio async def test_create_drop_database( From 4bc2ca70eabc8a75c55887d4623acbc66b6e0fa8 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 17 Aug 2025 12:48:53 +0800 Subject: [PATCH 13/30] Replication APIjk (#69) * Adding replication API * Test fixes --- arangoasync/database.py | 10 ++ arangoasync/exceptions.py | 28 ++++ arangoasync/replication.py | 270 +++++++++++++++++++++++++++++++++++++ docs/migration.rst | 9 +- tests/test_database.py | 37 +++++ 5 files changed, 353 insertions(+), 1 deletion(-) create mode 100644 arangoasync/replication.py diff --git a/arangoasync/database.py b/arangoasync/database.py index 2997bab..a28fa43 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -101,6 +101,7 @@ ) from arangoasync.foxx import Foxx from arangoasync.graph import Graph +from arangoasync.replication import Replication from arangoasync.request import Method, Request from arangoasync.response import Response from arangoasync.result import Result @@ -234,6 +235,15 @@ def foxx(self) -> Foxx: """ return Foxx(self._executor) + @property + def replication(self) -> Replication: + """Return Replication API wrapper. + + Returns: + Replication API wrapper. + """ + return Replication(self._executor) + async def properties(self) -> Result[DatabaseProperties]: """Return database properties. diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index ebe028e..5a904ee 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -551,6 +551,34 @@ class PermissionUpdateError(ArangoServerError): """Failed to update user permission.""" +class ReplicationApplierConfigError(ArangoServerError): + """Failed to retrieve replication applier configuration.""" + + +class ReplicationApplierStateError(ArangoServerError): + """Failed to retrieve replication applier state.""" + + +class ReplicationClusterInventoryError(ArangoServerError): + """Failed to retrieve overview of collection and indexes in a cluster.""" + + +class ReplicationDumpError(ArangoServerError): + """Failed to retrieve collection content.""" + + +class ReplicationInventoryError(ArangoServerError): + """Failed to retrieve inventory of collection and indexes.""" + + +class ReplicationLoggerStateError(ArangoServerError): + """Failed to retrieve logger state.""" + + +class ReplicationServerIDError(ArangoServerError): + """Failed to retrieve server ID.""" + + class SerializationError(ArangoClientError): """Failed to serialize the request.""" diff --git a/arangoasync/replication.py b/arangoasync/replication.py new file mode 100644 index 0000000..9d96709 --- /dev/null +++ b/arangoasync/replication.py @@ -0,0 +1,270 @@ +__all__ = ["Replication"] + + +from typing import Optional + +from arangoasync.exceptions import ( + ReplicationApplierConfigError, + ReplicationApplierStateError, + ReplicationClusterInventoryError, + ReplicationDumpError, + ReplicationInventoryError, + ReplicationLoggerStateError, + ReplicationServerIDError, +) +from arangoasync.executor import ApiExecutor +from arangoasync.request import Method, Request +from arangoasync.response import Response +from arangoasync.result import Result +from arangoasync.serialization import Deserializer, Serializer +from arangoasync.typings import Json, Jsons, Params + + +class Replication: + """Replication API wrapper.""" + + def __init__(self, executor: ApiExecutor) -> None: + self._executor = executor + + @property + def serializer(self) -> Serializer[Json]: + """Return the serializer.""" + return self._executor.serializer + + @property + def deserializer(self) -> Deserializer[Json, Jsons]: + """Return the deserializer.""" + return self._executor.deserializer + + async def inventory( + self, + batch_id: str, + include_system: Optional[bool] = None, + all_databases: Optional[bool] = None, + collection: Optional[bool] = None, + db_server: Optional[str] = None, + ) -> Result[Json]: + """ + Return an overview of collections and indexes. + + Args: + batch_id (str): Batch ID. + include_system (bool | None): Include system collections. + all_databases (bool | None): Include all databases (only on "_system"). + collection (bool | None): If this parameter is set, the + response will be restricted to a single collection (the one specified), + and no views will be returned. + db_server (str | None): On a Coordinator, this request must have a + DBserver query parameter + + Returns: + dict: Overview of collections and indexes. + + Raises: + ReplicationInventoryError: If retrieval fails. + + References: + - `get-a-replication-inventory `__ + """ # noqa: E501 + params: Params = dict() + params["batchId"] = batch_id + if include_system is not None: + params["includeSystem"] = include_system + if all_databases is not None: + params["global"] = all_databases + if collection is not None: + params["collection"] = collection + if db_server is not None: + params["DBServer"] = db_server + + request = Request( + method=Method.GET, + endpoint="/_api/replication/inventory", + params=params, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ReplicationInventoryError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def dump( + self, + collection: str, + batch_id: Optional[str] = None, + chunk_size: Optional[int] = None, + ) -> Result[bytes]: + """Return the events data of one collection. + + Args: + collection (str): ID of the collection to dump. + batch_id (str | None): Batch ID. + chunk_size (int | None): Size of the result in bytes. This value is honored + approximately only. + + Returns: + bytes: Collection events data. + + Raises: + ReplicationDumpError: If retrieval fails. + + References: + - `get-a-replication-dump `__ + """ # noqa: E501 + params: Params = dict() + params["collection"] = collection + if batch_id is not None: + params["batchId"] = batch_id + if chunk_size is not None: + params["chunkSize"] = chunk_size + + request = Request( + method=Method.GET, + endpoint="/_api/replication/dump", + params=params, + ) + + def response_handler(resp: Response) -> bytes: + if not resp.is_success: + raise ReplicationDumpError(resp, request) + return resp.raw_body + + return await self._executor.execute(request, response_handler) + + async def cluster_inventory( + self, include_system: Optional[bool] = None + ) -> Result[Json]: + """Return an overview of collections and indexes in a cluster. + + Args: + include_system (bool | None): Include system collections. + + Returns: + dict: Overview of collections and indexes in the cluster. + + Raises: + ReplicationClusterInventoryError: If retrieval fails. + + References: + - `get-the-cluster-collections-and-indexes `__ + """ # noqa: E501 + params: Params = {} + if include_system is not None: + params["includeSystem"] = include_system + + request = Request( + method=Method.GET, + endpoint="/_api/replication/clusterInventory", + params=params, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ReplicationClusterInventoryError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def logger_state(self) -> Result[Json]: + """Return the state of the replication logger. + + Returns: + dict: Logger state. + + Raises: + ReplicationLoggerStateError: If retrieval fails. + + References: + - `get-the-replication-logger-state `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint="/_api/replication/logger-state", + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ReplicationLoggerStateError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def applier_config(self) -> Result[Json]: + """Return the configuration of the replication applier. + + Returns: + dict: Configuration of the replication applier. + + Raises: + ReplicationApplierConfigError: If retrieval fails. + + References: + - `get-the-replication-applier-configuration `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint="/_api/replication/applier-config", + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ReplicationApplierConfigError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def applier_state(self) -> Result[Json]: + """Return the state of the replication applier. + + Returns: + dict: State of the replication applier. + + Raises: + ReplicationApplierStateError: If retrieval fails. + + References: + - `get-the-replication-applier-state `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint="/_api/replication/applier-state", + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ReplicationApplierStateError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return result + + return await self._executor.execute(request, response_handler) + + async def server_id(self) -> Result[str]: + """Return the current server's ID. + + Returns: + str: Server ID. + + Raises: + ReplicationServerIDError: If retrieval fails. + + References: + - `get-the-replication-server-id `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint="/_api/replication/server-id", + ) + + def response_handler(resp: Response) -> str: + if not resp.is_success: + raise ReplicationServerIDError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return str(result["serverId"]) + + return await self._executor.execute(request, response_handler) diff --git a/docs/migration.rst b/docs/migration.rst index 7c2427e..0353a0d 100644 --- a/docs/migration.rst +++ b/docs/migration.rst @@ -51,7 +51,7 @@ this is not always consistent. The asynchronous driver, however, tries to stick to a simple rule: -* If the API returns a camel case key, it will be returned as is. +* If the API returns a camel case key, it will be returned as is. The response is returned from the server as is. * Parameters passed from client to server use the snake case equivalent of the camel case keys required by the API (e.g. `userName` becomes `user_name`). This is done to ensure PEP8 compatibility. @@ -74,6 +74,13 @@ Serialization Check out the :ref:`Serialization` section to learn more about how to implement your own serializer/deserializer. The current driver makes use of generic types and allows for a higher degree of customization. +Replication +=========== + +Although a minimal replication API is available for observability purposes, its use is not recommended. +Most of these are internal APIs that are not meant to be used by the end user. If you need to make any changes +to replication, please do so from the cluster web interface. + Mixing sync and async ===================== diff --git a/tests/test_database.py b/tests/test_database.py index 425007b..33dcc56 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -20,6 +20,13 @@ DatabaseSupportInfoError, JWTSecretListError, JWTSecretReloadError, + ReplicationApplierConfigError, + ReplicationApplierStateError, + ReplicationClusterInventoryError, + ReplicationDumpError, + ReplicationInventoryError, + ReplicationLoggerStateError, + ReplicationServerIDError, ServerApiCallsError, ServerAvailableOptionsGetError, ServerCheckAvailabilityError, @@ -190,6 +197,36 @@ async def test_metrics(db, bad_db): assert isinstance(metrics, str) +@pytest.mark.asyncio +async def test_replication(db, bad_db, cluster): + with pytest.raises(ReplicationInventoryError): + await bad_db.replication.inventory("id") + with pytest.raises(ReplicationDumpError): + await bad_db.replication.dump("test_collection") + if cluster: + with pytest.raises(ReplicationClusterInventoryError): + await bad_db.replication.cluster_inventory() + result = await db.replication.cluster_inventory() + assert isinstance(result, dict) + if not cluster: + with pytest.raises(ReplicationLoggerStateError): + await bad_db.replication.logger_state() + result = await db.replication.logger_state() + assert isinstance(result, dict) + with pytest.raises(ReplicationApplierConfigError): + await bad_db.replication.applier_config() + result = await db.replication.applier_config() + assert isinstance(result, dict) + with pytest.raises(ReplicationApplierStateError): + await bad_db.replication.applier_state() + result = await db.replication.applier_state() + assert isinstance(result, dict) + with pytest.raises(ReplicationServerIDError): + await bad_db.replication.server_id() + result = await db.replication.server_id() + assert isinstance(result, str) + + @pytest.mark.asyncio async def test_logs(sys_db, bad_db): with pytest.raises(ServerReadLogError): From 12530de10b9c1244d06c5002ad5dd35d9279b60c Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Mon, 18 Aug 2025 06:49:52 +0000 Subject: [PATCH 14/30] Version 1.0.0 --- CONTRIBUTING.md | 2 +- README.md | 2 +- arangoasync/version.py | 2 +- docs/index.rst | 3 +-- 4 files changed, 4 insertions(+), 5 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 375d8b0..66044c4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -10,7 +10,7 @@ pre-commit install # Install git pre-commit hooks Run unit tests with coverage: ```shell -pytest --cov=arango --cov-report=html # Open htmlcov/index.html in your browser +pytest --enterprise --cluster --cov=arango --cov-report=html # Open htmlcov/index.html in your browser ``` To start and ArangoDB instance locally, run: diff --git a/README.md b/README.md index ab24eae..b80d633 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ database natively supporting documents, graphs and search. This is the _asyncio_ alternative of the [python-arango](https://github.com/arangodb/python-arango) driver. -**Note: This project is still in active development, features might be added or removed.** +Check out a demo app at [python-arango-async-demo](https://github.com/apetenchea/python-arango-async-demo). ## Requirements diff --git a/arangoasync/version.py b/arangoasync/version.py index b1a19e3..5becc17 100644 --- a/arangoasync/version.py +++ b/arangoasync/version.py @@ -1 +1 @@ -__version__ = "0.0.5" +__version__ = "1.0.0" diff --git a/docs/index.rst b/docs/index.rst index 0fab3ac..52714c3 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -6,8 +6,7 @@ python-arango-async ------------------- Welcome to the documentation for python-arango-async_, a Python driver for ArangoDB_. - -**Note: This project is still in active development, features might be added or removed.** +You can check out a demo app at [python-arango-async-demo](https://github.com/apetenchea/python-arango-async-demo). Requirements ============= From 59c085d3087f0a1213777f2fffa373e075f59cca Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Mon, 18 Aug 2025 07:05:31 +0000 Subject: [PATCH 15/30] Updated docs --- docs/specs.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/specs.rst b/docs/specs.rst index 763af9c..e8c0a32 100644 --- a/docs/specs.rst +++ b/docs/specs.rst @@ -31,6 +31,9 @@ python-arango-async. .. automodule:: arangoasync.backup :members: +.. automodule:: arangoasync.foxx + :members: + .. automodule:: arangoasync.cluster :members: @@ -57,3 +60,6 @@ python-arango-async. .. automodule:: arangoasync.result :members: + +.. automodule:: arangoasync.replication + :members: From dc72d6cb704ff7937bbb067a5ff817eaca0574a6 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Mon, 18 Aug 2025 07:06:06 +0000 Subject: [PATCH 16/30] Bumping version number --- arangoasync/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/arangoasync/version.py b/arangoasync/version.py index 5becc17..5c4105c 100644 --- a/arangoasync/version.py +++ b/arangoasync/version.py @@ -1 +1 @@ -__version__ = "1.0.0" +__version__ = "1.0.1" From caf33941d7d4151274db964ba6163680595cb4e7 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Mon, 18 Aug 2025 07:16:12 +0000 Subject: [PATCH 17/30] Updated docs --- arangoasync/version.py | 2 +- docs/index.rst | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/arangoasync/version.py b/arangoasync/version.py index 5c4105c..7863915 100644 --- a/arangoasync/version.py +++ b/arangoasync/version.py @@ -1 +1 @@ -__version__ = "1.0.1" +__version__ = "1.0.2" diff --git a/docs/index.rst b/docs/index.rst index 52714c3..b9ac826 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -6,7 +6,8 @@ python-arango-async ------------------- Welcome to the documentation for python-arango-async_, a Python driver for ArangoDB_. -You can check out a demo app at [python-arango-async-demo](https://github.com/apetenchea/python-arango-async-demo). + +You can check out a demo app at python-arango-async-demo_. Requirements ============= @@ -91,3 +92,4 @@ Contents .. _ArangoDB: https://www.arangodb.com .. _python-arango-async: https://github.com/arangodb/python-arango-async +.. _python-arango-async-demo: https://github.com/apetenchea/python-arango-async-demo From b4e4bb533bb7232ebb409ee559cde7735b7403f2 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Fri, 31 Oct 2025 22:21:36 +0800 Subject: [PATCH 18/30] Updating test matrix (#70) --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index fb1bc8e..836c418 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -20,8 +20,8 @@ workflows: parameters: python_version: ["3.10", "3.11", "3.12"] arangodb_config: ["single", "cluster"] - arangodb_license: ["community", "enterprise"] - arangodb_version: ["3.11", "3.12"] + arangodb_license: ["enterprise"] + arangodb_version: ["3.12"] jobs: lint: From 414a4de7e72e832492d26017e3d3ee84bcb24ac9 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Mon, 3 Nov 2025 15:47:11 +0800 Subject: [PATCH 19/30] /_admin/server/aql-queries (#71) * Implemented /_admin/server/api-calls * Version bump --- arangoasync/aql.py | 20 ++++++++++++++++++++ arangoasync/exceptions.py | 4 ++++ arangoasync/version.py | 2 +- tests/test_aql.py | 5 +++++ 4 files changed, 30 insertions(+), 1 deletion(-) diff --git a/arangoasync/aql.py b/arangoasync/aql.py index b81cade..1fad880 100644 --- a/arangoasync/aql.py +++ b/arangoasync/aql.py @@ -16,6 +16,7 @@ AQLQueryClearError, AQLQueryExecuteError, AQLQueryExplainError, + AQLQueryHistoryError, AQLQueryKillError, AQLQueryListError, AQLQueryRulesGetError, @@ -426,6 +427,25 @@ def response_handler(resp: Response) -> QueryTrackingConfiguration: return await self._executor.execute(request, response_handler) + async def history(self) -> Result[Json]: + """Return recently executed AQL queries (admin only). + + Returns: + dict: AQL query history. + + Raises: + AQLQueryHistoryError: If retrieval fails. + """ + request = Request(method=Method.GET, endpoint="/_admin/server/aql-queries") + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise AQLQueryHistoryError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return cast(Json, result["result"]) + + return await self._executor.execute(request, response_handler) + async def queries(self, all_queries: bool = False) -> Result[Jsons]: """Return a list of currently running queries. diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index 5a904ee..a940e1b 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -111,6 +111,10 @@ class AQLQueryExplainError(ArangoServerError): """Failed to parse and explain query.""" +class AQLQueryHistoryError(ArangoServerError): + """Failed to retrieve running AQL queries.""" + + class AQLQueryKillError(ArangoServerError): """Failed to kill the query.""" diff --git a/arangoasync/version.py b/arangoasync/version.py index 7863915..976498a 100644 --- a/arangoasync/version.py +++ b/arangoasync/version.py @@ -1 +1 @@ -__version__ = "1.0.2" +__version__ = "1.0.3" diff --git a/tests/test_aql.py b/tests/test_aql.py index ab5ba19..24f233f 100644 --- a/tests/test_aql.py +++ b/tests/test_aql.py @@ -21,6 +21,7 @@ AQLQueryClearError, AQLQueryExecuteError, AQLQueryExplainError, + AQLQueryHistoryError, AQLQueryKillError, AQLQueryListError, AQLQueryRulesGetError, @@ -96,6 +97,8 @@ async def test_list_queries(superuser, db, bad_db): _ = await superuser.aql.slow_queries(all_queries=True) await aql.clear_slow_queries() await superuser.aql.clear_slow_queries(all_queries=True) + history = await superuser.aql.history() + assert isinstance(history, dict) with pytest.raises(AQLQueryListError): _ = await bad_db.aql.queries() @@ -109,6 +112,8 @@ async def test_list_queries(superuser, db, bad_db): _ = await aql.slow_queries(all_queries=True) with pytest.raises(AQLQueryClearError): await aql.clear_slow_queries(all_queries=True) + with pytest.raises(AQLQueryHistoryError): + _ = await bad_db.aql.history() long_running_task.cancel() From 5f20e5cbf709b278eee20e0c4fec731d8253c1a2 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Tue, 9 Dec 2025 19:43:13 +0800 Subject: [PATCH 20/30] Do not add 8529 to the ports list, if another one is already specified (#72) --- tests/conftest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 66e5a9d..295b946 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -39,7 +39,7 @@ def pytest_addoption(parser): "--host", action="store", default="127.0.0.1", help="ArangoDB host address" ) parser.addoption( - "--port", action="append", default=["8529"], help="ArangoDB coordinator ports" + "--port", action="append", default=None, help="ArangoDB coordinator ports" ) parser.addoption( "--root", action="store", default="root", help="ArangoDB root user" @@ -59,7 +59,7 @@ def pytest_addoption(parser): def pytest_configure(config): - ports = config.getoption("port") + ports = config.getoption("port") or ["8529"] hosts = [f"http://{config.getoption('host')}:{p}" for p in ports] url = hosts[0] From 219d71ab012c0dc4bc2c245209c0a12171aedfed Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 14 Dec 2025 23:32:36 +0800 Subject: [PATCH 21/30] Test-only improvements (#73) * Test-only improvements * enterprise option is obsolete * Updating circleci config * Fix smartgraph --- .circleci/config.yml | 4 ++-- pyproject.toml | 1 + tests/conftest.py | 23 +++++++++++++++++------ tests/static/cluster-3.11.conf | 14 -------------- tests/static/single-3.11.conf | 12 ------------ tests/test_analyzer.py | 4 ++-- tests/test_aql.py | 8 +++----- tests/test_backup.py | 23 +++++------------------ tests/test_client.py | 7 ++++--- tests/test_cluster.py | 4 ++-- tests/test_database.py | 4 ++-- tests/test_foxx.py | 5 ++++- tests/test_graph.py | 4 ++-- tests/test_transaction.py | 5 ++++- 14 files changed, 48 insertions(+), 70 deletions(-) delete mode 100644 tests/static/cluster-3.11.conf delete mode 100644 tests/static/single-3.11.conf diff --git a/.circleci/config.yml b/.circleci/config.yml index 836c418..b71ba0b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -86,8 +86,8 @@ jobs: args+=("--cluster" "--port=8539" "--port=8549") fi - if [ << parameters.arangodb_license >> = "enterprise" ]; then - args+=("--enterprise") + if [ << parameters.arangodb_license >> != "enterprise" ]; then + args+=("--skip enterprise") fi echo "Running pytest with args: ${args[@]}" diff --git a/pyproject.toml b/pyproject.toml index ef00aea..b01c76f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,6 +60,7 @@ dev = [ "pytest-cov>=5.0", "sphinx>=7.3", "sphinx_rtd_theme>=2.0", + "allure-pytest>=2.15", "types-setuptools", ] diff --git a/tests/conftest.py b/tests/conftest.py index 295b946..c09292d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -27,8 +27,8 @@ class GlobalData: graph_name: str = "test_graph" username: str = generate_username() cluster: bool = False - enterprise: bool = False - db_version: version = version.parse("0.0.0") + skip: list[str] = None + db_version: version.Version = version.parse("0.0.0") global_data = GlobalData() @@ -54,7 +54,18 @@ def pytest_addoption(parser): "--cluster", action="store_true", help="Run tests in a cluster setup" ) parser.addoption( - "--enterprise", action="store_true", help="Run tests in an enterprise setup" + "--skip", + action="store", + nargs="*", + choices=[ + "backup", # backup tests + "jwt-secret-keyfile", # server was not configured with a keyfile + "foxx", # foxx is not supported + "js-transactions", # javascript transactions are not supported + "enterprise", # skip what used to be "enterprise-only" before 3.12 + ], + default=[], + help="Skip specific tests", ) @@ -69,7 +80,7 @@ def pytest_configure(config): global_data.secret = config.getoption("secret") global_data.token = JwtToken.generate_token(global_data.secret) global_data.cluster = config.getoption("cluster") - global_data.enterprise = config.getoption("enterprise") + global_data.skip = config.getoption("skip") global_data.graph_name = generate_graph_name() async def get_db_version(): @@ -112,8 +123,8 @@ def cluster(): @pytest.fixture -def enterprise(): - return global_data.enterprise +def skip_tests(): + return global_data.skip @pytest.fixture diff --git a/tests/static/cluster-3.11.conf b/tests/static/cluster-3.11.conf deleted file mode 100644 index 86f7855..0000000 --- a/tests/static/cluster-3.11.conf +++ /dev/null @@ -1,14 +0,0 @@ -[starter] -mode = cluster -local = true -address = 0.0.0.0 -port = 8528 - -[auth] -jwt-secret = /tests/static/keyfile - -[args] -all.database.password = passwd -all.database.extended-names = true -all.log.api-enabled = true -all.javascript.allow-admin-execute = true diff --git a/tests/static/single-3.11.conf b/tests/static/single-3.11.conf deleted file mode 100644 index df45cb7..0000000 --- a/tests/static/single-3.11.conf +++ /dev/null @@ -1,12 +0,0 @@ -[starter] -mode = single -address = 0.0.0.0 -port = 8528 - -[auth] -jwt-secret = /tests/static/keyfile - -[args] -all.database.password = passwd -all.database.extended-names = true -all.javascript.allow-admin-execute = true diff --git a/tests/test_analyzer.py b/tests/test_analyzer.py index 856b6d7..0557f64 100644 --- a/tests/test_analyzer.py +++ b/tests/test_analyzer.py @@ -11,7 +11,7 @@ @pytest.mark.asyncio -async def test_analyzer_management(db, bad_db, enterprise, db_version): +async def test_analyzer_management(db, bad_db, skip_tests, db_version): analyzer_name = generate_analyzer_name() full_analyzer_name = db.name + "::" + analyzer_name bad_analyzer_name = generate_analyzer_name() @@ -68,7 +68,7 @@ async def test_analyzer_management(db, bad_db, enterprise, db_version): assert await db.delete_analyzer(analyzer_name, ignore_missing=True) is False # Test create geo_s2 analyzer - if enterprise: + if "enterprise" not in skip_tests: analyzer_name = generate_analyzer_name() result = await db.create_analyzer(analyzer_name, "geo_s2", properties={}) assert result["type"] == "geo_s2" diff --git a/tests/test_aql.py b/tests/test_aql.py index 24f233f..28fa91c 100644 --- a/tests/test_aql.py +++ b/tests/test_aql.py @@ -279,17 +279,15 @@ async def test_cache_plan_management(db, bad_db, doc_col, docs, db_version): entries = await cache.plan_entries() assert isinstance(entries, list) assert len(entries) > 0 - with pytest.raises(AQLCacheEntriesError) as err: - _ = await bad_db.aql.cache.plan_entries() - assert err.value.error_code == FORBIDDEN + with pytest.raises(AQLCacheEntriesError): + await bad_db.aql.cache.plan_entries() # Clear the cache await cache.clear_plan() entries = await cache.plan_entries() assert len(entries) == 0 - with pytest.raises(AQLCacheClearError) as err: + with pytest.raises(AQLCacheClearError): await bad_db.aql.cache.clear_plan() - assert err.value.error_code == FORBIDDEN @pytest.mark.asyncio diff --git a/tests/test_backup.py b/tests/test_backup.py index d2fb07e..3bb5492 100644 --- a/tests/test_backup.py +++ b/tests/test_backup.py @@ -2,19 +2,12 @@ from packaging import version from arangoasync.client import ArangoClient -from arangoasync.exceptions import ( - BackupCreateError, - BackupDeleteError, - BackupDownloadError, - BackupGetError, - BackupRestoreError, - BackupUploadError, -) +from arangoasync.exceptions import BackupDeleteError, BackupRestoreError @pytest.mark.asyncio -async def test_backup(url, sys_db_name, bad_db, token, enterprise, cluster, db_version): - if not enterprise: +async def test_backup(url, sys_db_name, bad_db, token, cluster, db_version, skip_tests): + if "enterprise" in skip_tests: pytest.skip("Backup API is only available in ArangoDB Enterprise Edition") if not cluster: pytest.skip("For simplicity, the backup API is only tested in cluster setups") @@ -22,19 +15,13 @@ async def test_backup(url, sys_db_name, bad_db, token, enterprise, cluster, db_v pytest.skip( "For simplicity, the backup API is only tested in the latest versions" ) + if "backup" in skip_tests: + pytest.skip("Skipping backup tests") - with pytest.raises(BackupCreateError): - await bad_db.backup.create() - with pytest.raises(BackupGetError): - await bad_db.backup.get() with pytest.raises(BackupRestoreError): await bad_db.backup.restore("foobar") with pytest.raises(BackupDeleteError): await bad_db.backup.delete("foobar") - with pytest.raises(BackupUploadError): - await bad_db.backup.upload() - with pytest.raises(BackupDownloadError): - await bad_db.backup.download() async with ArangoClient(hosts=url) as client: db = await client.db( diff --git a/tests/test_client.py b/tests/test_client.py index cb488a7..cbd96d4 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -121,16 +121,17 @@ async def test_client_jwt_auth(url, sys_db_name, basic_auth_root): @pytest.mark.asyncio async def test_client_jwt_superuser_auth( - url, sys_db_name, basic_auth_root, token, enterprise + url, sys_db_name, basic_auth_root, token, skip_tests ): # successful authentication async with ArangoClient(hosts=url) as client: db = await client.db( sys_db_name, auth_method="superuser", token=token, verify=True ) - if enterprise: + if "enterprise" not in skip_tests: await db.jwt_secrets() - await db.reload_jwt_secrets() + if "jwt-secret-keyfile" not in skip_tests: + await db.reload_jwt_secrets() # Get TLS data tls = await db.tls() diff --git a/tests/test_cluster.py b/tests/test_cluster.py index d5b0b75..9a68a6b 100644 --- a/tests/test_cluster.py +++ b/tests/test_cluster.py @@ -15,11 +15,11 @@ @pytest.mark.asyncio async def test_cluster( - url, sys_db_name, bad_db, token, enterprise, cluster, db_version + url, sys_db_name, bad_db, token, skip_tests, cluster, db_version ): if not cluster: pytest.skip("Cluster API is only tested in cluster setups") - if not enterprise or db_version < version.parse("3.12.0"): + if "enterprise" in skip_tests or db_version < version.parse("3.12.0"): pytest.skip( "For simplicity, the cluster API is only tested in the latest versions" ) diff --git a/tests/test_database.py b/tests/test_database.py index 33dcc56..519d0ce 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -59,7 +59,7 @@ @pytest.mark.asyncio async def test_database_misc_methods( - sys_db, db, bad_db, cluster, db_version, url, sys_db_name, token, enterprise + sys_db, db, bad_db, cluster, db_version, url, sys_db_name, token, skip_tests ): # Status status = await sys_db.status() @@ -181,7 +181,7 @@ async def test_database_misc_methods( response = await sys_db.request(request) assert json.loads(response.raw_body) == 1 - if enterprise and db_version >= version.parse("3.12.0"): + if "enterprise" not in skip_tests and db_version >= version.parse("3.12.0"): # API calls with pytest.raises(ServerApiCallsError): await bad_db.api_calls() diff --git a/tests/test_foxx.py b/tests/test_foxx.py index 065530d..c407215 100644 --- a/tests/test_foxx.py +++ b/tests/test_foxx.py @@ -35,7 +35,10 @@ @pytest.mark.asyncio -async def test_foxx(db, bad_db): +async def test_foxx(db, bad_db, skip_tests): + if "foxx" in skip_tests: + pytest.skip("Skipping Foxx tests") + # Test errors with pytest.raises(FoxxServiceGetError): await bad_db.foxx.service(service_name) diff --git a/tests/test_graph.py b/tests/test_graph.py index 6d5fcbe..5d70255 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -56,10 +56,10 @@ async def test_graph_basic(db, bad_db): @pytest.mark.asyncio -async def test_graph_properties(db, bad_graph, cluster, enterprise): +async def test_graph_properties(db, bad_graph, cluster, skip_tests): # Create a graph name = generate_graph_name() - is_smart = cluster and enterprise + is_smart = cluster and "enterprise" not in skip_tests options = GraphOptions(number_of_shards=3) graph = await db.create_graph(name, is_smart=is_smart, options=options) diff --git a/tests/test_transaction.py b/tests/test_transaction.py index f7d7f76..1a7363c 100644 --- a/tests/test_transaction.py +++ b/tests/test_transaction.py @@ -14,7 +14,10 @@ @pytest.mark.asyncio -async def test_transaction_execute_raw(db, doc_col, docs): +async def test_transaction_execute_raw(db, doc_col, docs, skip_tests): + if "js-transactions" in skip_tests: + pytest.skip("Skipping JS transaction tests") + # Test a valid JS transaction doc = docs[0] key = doc["_key"] From e4ade5552e3f1c758538cf322eb1aef6a3c0d3c7 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Mon, 15 Dec 2025 23:43:24 +0800 Subject: [PATCH 22/30] Option for skipping task test (#74) --- tests/conftest.py | 1 + tests/test_task.py | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index c09292d..f9b203f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -62,6 +62,7 @@ def pytest_addoption(parser): "jwt-secret-keyfile", # server was not configured with a keyfile "foxx", # foxx is not supported "js-transactions", # javascript transactions are not supported + "task", # tasks API "enterprise", # skip what used to be "enterprise-only" before 3.12 ], default=[], diff --git a/tests/test_task.py b/tests/test_task.py index 4e1aee6..008e25d 100644 --- a/tests/test_task.py +++ b/tests/test_task.py @@ -10,10 +10,13 @@ @pytest.mark.asyncio -async def test_task_management(sys_db, bad_db): +async def test_task_management(sys_db, bad_db, skip_tests): # This test intentionally uses the system database because cleaning up tasks is # easier there. + if "task" in skip_tests: + pytest.skip("Skipping task tests") + test_command = 'require("@arangodb").print(params);' # Test errors From eb4922abcb9b27c652d40a9fcbc41057da43e45a Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Fri, 19 Dec 2025 18:30:25 +0800 Subject: [PATCH 23/30] Access Tokens (#75) * Adding support for access tokens * Docs fix --- arangoasync/auth.py | 4 +- arangoasync/client.py | 8 +++- arangoasync/database.py | 94 +++++++++++++++++++++++++++++++++++++++ arangoasync/exceptions.py | 12 +++++ arangoasync/typings.py | 52 ++++++++++++++++++++++ tests/helpers.py | 9 ++++ tests/test_client.py | 56 ++++++++++++++++++++++- tests/test_typings.py | 26 +++++++++++ 8 files changed, 256 insertions(+), 5 deletions(-) diff --git a/arangoasync/auth.py b/arangoasync/auth.py index 96e9b1b..a4df28f 100644 --- a/arangoasync/auth.py +++ b/arangoasync/auth.py @@ -20,8 +20,8 @@ class Auth: encoding (str): Encoding for the password (default: utf-8) """ - username: str - password: str + username: str = "" + password: str = "" encoding: str = "utf-8" diff --git a/arangoasync/client.py b/arangoasync/client.py index 235cfae..b2eed10 100644 --- a/arangoasync/client.py +++ b/arangoasync/client.py @@ -147,7 +147,7 @@ async def db( self, name: str, auth_method: str = "basic", - auth: Optional[Auth] = None, + auth: Optional[Auth | str] = None, token: Optional[JwtToken] = None, verify: bool = False, compression: Optional[CompressionManager] = None, @@ -169,7 +169,8 @@ async def db( and client are synchronized. - "superuser": Superuser JWT authentication. The `token` parameter is required. The `auth` parameter is ignored. - auth (Auth | None): Login information. + auth (Auth | None): Login information (username and password) or + access token. token (JwtToken | None): JWT token. verify (bool): Verify the connection by sending a test request. compression (CompressionManager | None): If set, supersedes the @@ -188,6 +189,9 @@ async def db( """ connection: Connection + if isinstance(auth, str): + auth = Auth(password=auth) + if auth_method == "basic": if auth is None: raise ValueError("Basic authentication requires the `auth` parameter") diff --git a/arangoasync/database.py b/arangoasync/database.py index a28fa43..2cbbc68 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -17,6 +17,9 @@ from arangoasync.connection import Connection from arangoasync.errno import HTTP_FORBIDDEN, HTTP_NOT_FOUND from arangoasync.exceptions import ( + AccessTokenCreateError, + AccessTokenDeleteError, + AccessTokenListError, AnalyzerCreateError, AnalyzerDeleteError, AnalyzerGetError, @@ -107,6 +110,7 @@ from arangoasync.result import Result from arangoasync.serialization import Deserializer, Serializer from arangoasync.typings import ( + AccessToken, CollectionInfo, CollectionType, DatabaseProperties, @@ -2130,6 +2134,96 @@ def response_handler(resp: Response) -> Json: return await self._executor.execute(request, response_handler) + async def create_access_token( + self, + user: str, + name: str, + valid_until: int, + ) -> Result[AccessToken]: + """Create an access token for the given user. + + Args: + user (str): The name of the user. + name (str): A name for the access token to make identification easier, + like a short description. + valid_until (int): A Unix timestamp in seconds to set the expiration date and time. + + Returns: + AccessToken: Information about the created access token, including the token itself. + + Raises: + AccessTokenCreateError: If the operation fails. + + References: + - `create-an-access-token `__ + """ # noqa: E501 + data: Json = { + "name": name, + "valid_until": valid_until, + } + + request = Request( + method=Method.POST, + endpoint=f"/_api/token/{user}", + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> AccessToken: + if not resp.is_success: + raise AccessTokenCreateError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return AccessToken(result) + + return await self._executor.execute(request, response_handler) + + async def delete_access_token(self, user: str, token_id: int) -> None: + """List all access tokens for the given user. + + Args: + user (str): The name of the user. + token_id (int): The ID of the access token to delete. + + Raises: + AccessTokenDeleteError: If the operation fails. + + References: + - `delete-an-access-token `__ + """ # noqa: E501 + request = Request( + method=Method.DELETE, endpoint=f"/_api/token/{user}/{token_id}" + ) + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise AccessTokenDeleteError(resp, request) + + await self._executor.execute(request, response_handler) + + async def list_access_tokens(self, user: str) -> Result[Jsons]: + """List all access tokens for the given user. + + Args: + user (str): The name of the user. + + Returns: + list: List of access tokens for the user. + + Raises: + AccessTokenListError: If the operation fails. + + References: + - `list-all-access-tokens `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint=f"/_api/token/{user}") + + def response_handler(resp: Response) -> Jsons: + if not resp.is_success: + raise AccessTokenListError(resp, request) + result: Json = self.deserializer.loads(resp.raw_body) + return cast(Jsons, result["tokens"]) + + return await self._executor.execute(request, response_handler) + async def tls(self) -> Result[Json]: """Return TLS data (keyfile, clientCA). diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index a940e1b..58a9505 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -139,6 +139,18 @@ class AQLQueryValidateError(ArangoServerError): """Failed to parse and validate query.""" +class AccessTokenCreateError(ArangoServerError): + """Failed to create an access token.""" + + +class AccessTokenDeleteError(ArangoServerError): + """Failed to delete an access token.""" + + +class AccessTokenListError(ArangoServerError): + """Failed to retrieve access tokens.""" + + class AnalyzerCreateError(ArangoServerError): """Failed to create analyzer.""" diff --git a/arangoasync/typings.py b/arangoasync/typings.py index d49411d..0d85035 100644 --- a/arangoasync/typings.py +++ b/arangoasync/typings.py @@ -2024,3 +2024,55 @@ def __init__( @property def satellites(self) -> Optional[List[str]]: return cast(Optional[List[str]], self._data.get("satellites")) + + +class AccessToken(JsonWrapper): + """User access token. + + Example: + .. code-block:: json + + { + "id" : 1, + "name" : "Token for Service A", + "valid_until" : 1782864000, + "created_at" : 1765543306, + "fingerprint" : "v1...71227d", + "active" : true, + "token" : "v1.7b2265223a3137471227d" + } + + References: + - `create-an-access-token `__ + """ # noqa: E501 + + def __init__(self, data: Json) -> None: + super().__init__(data) + + @property + def active(self) -> bool: + return cast(bool, self._data["active"]) + + @property + def created_at(self) -> int: + return cast(int, self._data["created_at"]) + + @property + def fingerprint(self) -> str: + return cast(str, self._data["fingerprint"]) + + @property + def id(self) -> int: + return cast(int, self._data["id"]) + + @property + def name(self) -> str: + return cast(str, self._data["name"]) + + @property + def token(self) -> str: + return cast(str, self._data["token"]) + + @property + def valid_until(self) -> int: + return cast(int, self._data["valid_until"]) diff --git a/tests/helpers.py b/tests/helpers.py index 0e6e8a8..2bc04a5 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -89,3 +89,12 @@ def generate_service_mount(): str: Random service name. """ return f"/test_{uuid4().hex}" + + +def generate_token_name(): + """Generate and return a random token name. + + Returns: + str: Random token name. + """ + return f"test_token_{uuid4().hex}" diff --git a/tests/test_client.py b/tests/test_client.py index cbd96d4..2218384 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -1,12 +1,20 @@ +import time + import pytest from arangoasync.auth import JwtToken from arangoasync.client import ArangoClient from arangoasync.compression import DefaultCompressionManager -from arangoasync.exceptions import ServerEncryptionError +from arangoasync.exceptions import ( + AccessTokenCreateError, + AccessTokenDeleteError, + AccessTokenListError, + ServerEncryptionError, +) from arangoasync.http import DefaultHTTPClient from arangoasync.resolver import DefaultHostResolver, RoundRobinHostResolver from arangoasync.version import __version__ +from tests.helpers import generate_token_name @pytest.mark.asyncio @@ -152,3 +160,49 @@ async def test_client_jwt_superuser_auth( await client.db( sys_db_name, auth_method="superuser", auth=basic_auth_root, verify=True ) + + +@pytest.mark.asyncio +async def test_client_access_token(url, sys_db_name, basic_auth_root, bad_db): + username = basic_auth_root.username + + async with ArangoClient(hosts=url) as client: + # First login with basic auth + db_auth_basic = await client.db( + sys_db_name, + auth_method="basic", + auth=basic_auth_root, + verify=True, + ) + + # Create an access token + token_name = generate_token_name() + token = await db_auth_basic.create_access_token( + user=username, name=token_name, valid_until=int(time.time() + 3600) + ) + assert token.active is True + + # Cannot create a token with the same name + with pytest.raises(AccessTokenCreateError): + await db_auth_basic.create_access_token( + user=username, name=token_name, valid_until=int(time.time() + 3600) + ) + + # Authenticate with the created token + access_token_db = await client.db( + sys_db_name, + auth_method="basic", + auth=token.token, + verify=True, + ) + + # List access tokens + tokens = await access_token_db.list_access_tokens(username) + assert isinstance(tokens, list) + with pytest.raises(AccessTokenListError): + await bad_db.list_access_tokens(username) + + # Clean up - delete the created token + await access_token_db.delete_access_token(username, token.id) + with pytest.raises(AccessTokenDeleteError): + await access_token_db.delete_access_token(username, token.id) diff --git a/tests/test_typings.py b/tests/test_typings.py index 3b4e5e2..48e9eb0 100644 --- a/tests/test_typings.py +++ b/tests/test_typings.py @@ -1,6 +1,7 @@ import pytest from arangoasync.typings import ( + AccessToken, CollectionInfo, CollectionStatistics, CollectionStatus, @@ -446,3 +447,28 @@ def test_CollectionStatistics(): assert stats.key_options["type"] == "traditional" assert stats.computed_values is None assert stats.object_id == "69124" + + +def test_AccessToken(): + data = { + "active": True, + "created_at": 1720000000, + "fingerprint": "abc123fingerprint", + "id": 42, + "name": "ci-token", + "token": "v2.local.eyJhbGciOi...", + "valid_until": 1720003600, + } + + access_token = AccessToken(data) + + assert access_token.active is True + assert access_token.created_at == 1720000000 + assert access_token.fingerprint == "abc123fingerprint" + assert access_token.id == 42 + assert access_token.name == "ci-token" + assert access_token.token == "v2.local.eyJhbGciOi..." + assert access_token.valid_until == 1720003600 + + # JsonWrapper behavior + assert access_token.to_dict() == data From e833767d03fd4b99771f6d249cbf20d25c789c2a Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Fri, 19 Dec 2025 18:52:05 +0800 Subject: [PATCH 24/30] Adding test parameters for foxx service path and backup path (#76) --- tests/conftest.py | 26 ++++++++++++++++++++++++++ tests/test_backup.py | 10 +++++----- tests/test_foxx.py | 15 +++++++-------- 3 files changed, 38 insertions(+), 13 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index f9b203f..5025142 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -28,6 +28,8 @@ class GlobalData: username: str = generate_username() cluster: bool = False skip: list[str] = None + foxx_path: str = None + backup_path: str = None db_version: version.Version = version.parse("0.0.0") @@ -53,6 +55,18 @@ def pytest_addoption(parser): parser.addoption( "--cluster", action="store_true", help="Run tests in a cluster setup" ) + parser.addoption( + "--foxx-path", + action="store", + default="/tests/static/service.zip", + help="Foxx tests service path", + ) + parser.addoption( + "--backup-path", + action="store", + default="local://tmp", + help="Backup tests repository path", + ) parser.addoption( "--skip", action="store", @@ -82,6 +96,8 @@ def pytest_configure(config): global_data.token = JwtToken.generate_token(global_data.secret) global_data.cluster = config.getoption("cluster") global_data.skip = config.getoption("skip") + global_data.backup_path = config.getoption("backup_path") + global_data.foxx_path = config.getoption("foxx_path") global_data.graph_name = generate_graph_name() async def get_db_version(): @@ -123,6 +139,16 @@ def cluster(): return global_data.cluster +@pytest.fixture +def backup_path(): + return global_data.backup_path + + +@pytest.fixture +def foxx_path(): + return global_data.foxx_path + + @pytest.fixture def skip_tests(): return global_data.skip diff --git a/tests/test_backup.py b/tests/test_backup.py index 3bb5492..7e6e37e 100644 --- a/tests/test_backup.py +++ b/tests/test_backup.py @@ -6,7 +6,9 @@ @pytest.mark.asyncio -async def test_backup(url, sys_db_name, bad_db, token, cluster, db_version, skip_tests): +async def test_backup( + url, sys_db_name, bad_db, token, cluster, db_version, skip_tests, backup_path +): if "enterprise" in skip_tests: pytest.skip("Backup API is only available in ArangoDB Enterprise Edition") if not cluster: @@ -35,10 +37,8 @@ async def test_backup(url, sys_db_name, bad_db, token, cluster, db_version, skip result = await backup.restore(backup_id) assert "previous" in result config = {"local": {"type": "local"}} - result = await backup.upload(backup_id, repository="local://tmp", config=config) + result = await backup.upload(backup_id, repository=backup_path, config=config) assert "uploadId" in result - result = await backup.download( - backup_id, repository="local://tmp", config=config - ) + result = await backup.download(backup_id, repository=backup_path, config=config) assert "downloadId" in result await backup.delete(backup_id) diff --git a/tests/test_foxx.py b/tests/test_foxx.py index c407215..e972dc2 100644 --- a/tests/test_foxx.py +++ b/tests/test_foxx.py @@ -30,12 +30,11 @@ ) from tests.helpers import generate_service_mount -service_file = "/tests/static/service.zip" service_name = "test" @pytest.mark.asyncio -async def test_foxx(db, bad_db, skip_tests): +async def test_foxx(db, bad_db, skip_tests, foxx_path): if "foxx" in skip_tests: pytest.skip("Skipping Foxx tests") @@ -90,7 +89,7 @@ async def test_foxx(db, bad_db, skip_tests): # Service as a path mount1 = generate_service_mount() service1 = { - "source": service_file, + "source": foxx_path, "configuration": {"LOG_LEVEL": "info"}, "dependencies": {}, } @@ -102,7 +101,7 @@ async def test_foxx(db, bad_db, skip_tests): service2 = aiohttp.FormData() service2.add_field( "source", - open(f".{service_file}", "rb"), + open(f".{foxx_path}", "rb"), filename="service.zip", content_type="application/zip", ) @@ -115,7 +114,7 @@ async def test_foxx(db, bad_db, skip_tests): # Service as raw data mount3 = generate_service_mount() - async with aiofiles.open(f".{service_file}", mode="rb") as f: + async with aiofiles.open(f".{foxx_path}", mode="rb") as f: service3 = await f.read() service_info = await db.foxx.create_service( mount=mount3, service=service3, headers={"content-type": "application/zip"} @@ -127,14 +126,14 @@ async def test_foxx(db, bad_db, skip_tests): # Replace service service4 = { - "source": service_file, + "source": foxx_path, "configuration": {"LOG_LEVEL": "info"}, "dependencies": {}, } service_info = await db.foxx.replace_service(mount=mount2, service=service4) assert service_info["mount"] == mount2 - async with aiofiles.open(f".{service_file}", mode="rb") as f: + async with aiofiles.open(f".{foxx_path}", mode="rb") as f: service5 = await f.read() service_info = await db.foxx.replace_service( mount=mount1, service=service5, headers={"content-type": "application/zip"} @@ -143,7 +142,7 @@ async def test_foxx(db, bad_db, skip_tests): # Update service service6 = { - "source": service_file, + "source": foxx_path, "configuration": {"LOG_LEVEL": "debug"}, "dependencies": {}, } From 45cee455ad2bae8af6a8017b4d385346403c839a Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 20 Dec 2025 23:20:30 +0800 Subject: [PATCH 25/30] No longer using --enterprise option --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 66044c4..f2899c8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -10,7 +10,7 @@ pre-commit install # Install git pre-commit hooks Run unit tests with coverage: ```shell -pytest --enterprise --cluster --cov=arango --cov-report=html # Open htmlcov/index.html in your browser +pytest --cluster --cov=arango --cov-report=html # Open htmlcov/index.html in your browser ``` To start and ArangoDB instance locally, run: From ded4e013c4a557847e92a2a97eff1704d25e331a Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 20 Dec 2025 23:21:52 +0800 Subject: [PATCH 26/30] Image update --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b71ba0b..c7f0fdd 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -7,7 +7,7 @@ executors: resource_class: small python-vm: machine: - image: ubuntu-2204:current + image: ubuntu-2404:current resource_class: medium workflows: From 50cbb5c1f9ec06f2ffb3a4a055163a8d9b4baa15 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 20 Dec 2025 23:31:43 +0800 Subject: [PATCH 27/30] Bump driver version --- arangoasync/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/arangoasync/version.py b/arangoasync/version.py index 976498a..92192ee 100644 --- a/arangoasync/version.py +++ b/arangoasync/version.py @@ -1 +1 @@ -__version__ = "1.0.3" +__version__ = "1.0.4" From 039579b40c30e706f1be075cb2fa135f132c0917 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 20 Dec 2025 23:49:00 +0800 Subject: [PATCH 28/30] Changed docs URL (#77) --- README.md | 2 +- arangoasync/aql.py | 38 ++++---- arangoasync/backup.py | 12 +-- arangoasync/cluster.py | 24 ++--- arangoasync/collection.py | 90 ++++++++--------- arangoasync/cursor.py | 6 +- arangoasync/database.py | 192 ++++++++++++++++++------------------- arangoasync/foxx.py | 42 ++++---- arangoasync/graph.py | 38 ++++---- arangoasync/job.py | 10 +- arangoasync/replication.py | 14 +-- arangoasync/typings.py | 46 ++++----- docs/analyzer.rst | 2 +- docs/aql.rst | 2 +- docs/backup.rst | 2 +- docs/certificates.rst | 2 +- docs/cluster.rst | 2 +- docs/document.rst | 2 +- docs/foxx.rst | 2 +- docs/graph.rst | 2 +- docs/indexes.rst | 2 +- docs/overview.rst | 2 +- docs/view.rst | 6 +- 23 files changed, 270 insertions(+), 270 deletions(-) diff --git a/README.md b/README.md index b80d633..1232efa 100644 --- a/README.md +++ b/README.md @@ -75,7 +75,7 @@ async def main(): student_names.append(doc["name"]) ``` -Another example with [graphs](https://docs.arangodb.com/stable/graphs/): +Another example with [graphs](https://docs.arango.ai/stable/graphs/): ```python async def main(): diff --git a/arangoasync/aql.py b/arangoasync/aql.py index 1fad880..ec8efe4 100644 --- a/arangoasync/aql.py +++ b/arangoasync/aql.py @@ -78,7 +78,7 @@ async def entries(self) -> Result[Jsons]: AQLCacheEntriesError: If retrieval fails. References: - - `list-the-entries-of-the-aql-query-results-cache `__ + - `list-the-entries-of-the-aql-query-results-cache `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/query-cache/entries") @@ -99,7 +99,7 @@ async def plan_entries(self) -> Result[Jsons]: AQLCacheEntriesError: If retrieval fails. References: - - `list-the-entries-of-the-aql-query-plan-cache `__ + - `list-the-entries-of-the-aql-query-plan-cache `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/query-plan-cache") @@ -117,7 +117,7 @@ async def clear(self) -> Result[None]: AQLCacheClearError: If clearing the cache fails. References: - - `clear-the-aql-query-results-cache `__ + - `clear-the-aql-query-results-cache `__ """ # noqa: E501 request = Request(method=Method.DELETE, endpoint="/_api/query-cache") @@ -134,7 +134,7 @@ async def clear_plan(self) -> Result[None]: AQLCacheClearError: If clearing the cache fails. References: - - `clear-the-aql-query-plan-cache `__ + - `clear-the-aql-query-plan-cache `__ """ # noqa: E501 request = Request(method=Method.DELETE, endpoint="/_api/query-plan-cache") @@ -154,7 +154,7 @@ async def properties(self) -> Result[QueryCacheProperties]: AQLCachePropertiesError: If retrieval fails. References: - - `get-the-aql-query-results-cache-configuration `__ + - `get-the-aql-query-results-cache-configuration `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/query-cache/properties") @@ -193,7 +193,7 @@ async def configure( AQLCacheConfigureError: If setting the configuration fails. References: - - `set-the-aql-query-results-cache-configuration `__ + - `set-the-aql-query-results-cache-configuration `__ """ # noqa: E501 data: Json = dict() if mode is not None: @@ -298,7 +298,7 @@ async def execute( Cursor: Result cursor. References: - - `create-a-cursor `__ + - `create-a-cursor `__ """ # noqa: E501 data: Json = dict(query=query) if count is not None: @@ -353,7 +353,7 @@ async def tracking(self) -> Result[QueryTrackingConfiguration]: AQLQueryTrackingGetError: If retrieval fails. References: - - `get-the-aql-query-tracking-configuration `__ + - `get-the-aql-query-tracking-configuration `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/query/properties") @@ -397,7 +397,7 @@ async def set_tracking( AQLQueryTrackingSetError: If setting the configuration fails. References: - - `update-the-aql-query-tracking-configuration `__ + - `update-the-aql-query-tracking-configuration `__ """ # noqa: E501 data: Json = dict() @@ -462,7 +462,7 @@ async def queries(self, all_queries: bool = False) -> Result[Jsons]: AQLQueryListError: If retrieval fails. References: - - `list-the-running-queries `__ + - `list-the-running-queries `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -493,7 +493,7 @@ async def slow_queries(self, all_queries: bool = False) -> Result[Jsons]: AQLQueryListError: If retrieval fails. References: - - `list-the-slow-aql-queries `__ + - `list-the-slow-aql-queries `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -523,7 +523,7 @@ async def clear_slow_queries(self, all_queries: bool = False) -> Result[None]: AQLQueryClearError: If retrieval fails. References: - - `clear-the-list-of-slow-aql-queries `__ + - `clear-the-list-of-slow-aql-queries `__ """ # noqa: E501 request = Request( method=Method.DELETE, @@ -560,7 +560,7 @@ async def kill( AQLQueryKillError: If killing the query fails. References: - - `kill-a-running-aql-query `__ + - `kill-a-running-aql-query `__ """ # noqa: E501 request = Request( method=Method.DELETE, @@ -598,7 +598,7 @@ async def explain( AQLQueryExplainError: If retrieval fails. References: - - `explain-an-aql-query `__ + - `explain-an-aql-query `__ """ # noqa: E501 data: Json = dict(query=query) if bind_vars is not None: @@ -634,7 +634,7 @@ async def validate(self, query: str) -> Result[Json]: AQLQueryValidateError: If validation fails. References: - - `parse-an-aql-query `__ + - `parse-an-aql-query `__ """ # noqa: E501 request = Request( method=Method.POST, @@ -659,7 +659,7 @@ async def query_rules(self) -> Result[Jsons]: AQLQueryRulesGetError: If retrieval fails. References: - - `list-all-aql-optimizer-rules `__ + - `list-all-aql-optimizer-rules `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/query/rules") @@ -684,7 +684,7 @@ async def functions(self, namespace: Optional[str] = None) -> Result[Jsons]: AQLFunctionListError: If retrieval fails. References: - - `list-the-registered-user-defined-aql-functions `__ + - `list-the-registered-user-defined-aql-functions `__ """ # noqa: E501 params: Json = dict() if namespace is not None: @@ -726,7 +726,7 @@ async def create_function( AQLFunctionCreateError: If registration fails. References: - - `create-a-user-defined-aql-function `__ + - `create-a-user-defined-aql-function `__ """ # noqa: E501 request = Request( method=Method.POST, @@ -765,7 +765,7 @@ async def delete_function( AQLFunctionDeleteError: If removal fails. References: - - `remove-a-user-defined-aql-function `__ + - `remove-a-user-defined-aql-function `__ """ # noqa: E501 params: Json = dict() if group is not None: diff --git a/arangoasync/backup.py b/arangoasync/backup.py index 75a26a6..e0847e0 100644 --- a/arangoasync/backup.py +++ b/arangoasync/backup.py @@ -49,7 +49,7 @@ async def get(self, backup_id: Optional[str] = None) -> Result[Json]: BackupGetError: If the operation fails. References: - - `list-backups `__ + - `list-backups `__ """ # noqa: E501 data: Json = {} if backup_id is not None: @@ -97,7 +97,7 @@ async def create( BackupCreateError: If the backup creation fails. References: - - `create-backup `__ + - `create-backup `__ """ # noqa: E501 data: Json = {} if label is not None: @@ -137,7 +137,7 @@ async def restore(self, backup_id: str) -> Result[Json]: BackupRestoreError: If the restore operation fails. References: - - `restore-backup `__ + - `restore-backup `__ """ # noqa: E501 data: Json = {"id": backup_id} request = Request( @@ -165,7 +165,7 @@ async def delete(self, backup_id: str) -> None: BackupDeleteError: If the delete operation fails. References: - - `delete-backup `__ + - `delete-backup `__ """ # noqa: E501 data: Json = {"id": backup_id} request = Request( @@ -209,7 +209,7 @@ async def upload( BackupUploadError: If upload operation fails. References: - - `upload-a-backup-to-a-remote-repository `__ + - `upload-a-backup-to-a-remote-repository `__ """ # noqa: E501 data: Json = {} if upload_id is not None: @@ -265,7 +265,7 @@ async def download( BackupDownloadError: If the download operation fails. References: - - `download-a-backup-from-a-remote-repository `__ + - `download-a-backup-from-a-remote-repository `__ """ # noqa: E501 data: Json = {} if download_id is not None: diff --git a/arangoasync/cluster.py b/arangoasync/cluster.py index ce33b92..39e3d56 100644 --- a/arangoasync/cluster.py +++ b/arangoasync/cluster.py @@ -45,7 +45,7 @@ async def health(self) -> Result[Json]: ClusterHealthError: If retrieval fails. References: - - `get-the-cluster-health `__ + - `get-the-cluster-health `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -74,7 +74,7 @@ async def statistics(self, db_server: str) -> Result[Json]: ClusterStatisticsError: If retrieval fails. References: - - `get-the-statistics-of-a-db-server `__ + - `get-the-statistics-of-a-db-server `__ """ # noqa: E501 params: Params = {"DBserver": db_server} @@ -103,7 +103,7 @@ async def endpoints(self) -> Result[List[str]]: ClusterEndpointsError: If retrieval fails. References: - - `list-all-coordinator-endpoints `__ + - `list-all-coordinator-endpoints `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -129,7 +129,7 @@ async def server_id(self) -> Result[str]: ClusterServerIDError: If retrieval fails. References: - - `get-the-server-id `__ + - `get-the-server-id `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -154,7 +154,7 @@ async def server_role(self) -> Result[str]: ClusterServerRoleError: If retrieval fails. References: - - `get-the-server-role `__ + - `get-the-server-role `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -182,7 +182,7 @@ async def toggle_maintenance_mode(self, mode: str) -> Result[Json]: ClusterMaintenanceModeError: If the toggle operation fails. References: - - `toggle-cluster-maintenance-mode `__ + - `toggle-cluster-maintenance-mode `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -212,7 +212,7 @@ async def server_maintenance_mode(self, server_id: str) -> Result[Json]: ClusterMaintenanceModeError: If retrieval fails. References: - - `get-the-maintenance-status-of-a-db-server `__ + - `get-the-maintenance-status-of-a-db-server `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -242,7 +242,7 @@ async def toggle_server_maintenance_mode( ClusterMaintenanceModeError: If the operation fails. References: - - `set-the-maintenance-status-of-a-db-server `__ + - `set-the-maintenance-status-of-a-db-server `__ """ # noqa: E501 data: Json = {"mode": mode} if timeout is not None: @@ -271,7 +271,7 @@ async def calculate_imbalance(self) -> Result[Json]: ClusterRebalanceError: If retrieval fails. References: - - `get-the-current-cluster-imbalance `__ + - `get-the-current-cluster-imbalance `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/cluster/rebalance") @@ -315,7 +315,7 @@ async def calculate_rebalance_plan( ClusterRebalanceError: If retrieval fails. References: - - `compute-a-set-of-move-shard-operations-to-improve-balance `__ + - `compute-a-set-of-move-shard-operations-to-improve-balance `__ """ # noqa: E501 data: Json = dict(version=version) if databases_excluded is not None: @@ -380,7 +380,7 @@ async def rebalance( ClusterRebalanceError: If retrieval fails. References: - - `compute-and-execute-a-set-of-move-shard-operations-to-improve-balance `__ + - `compute-and-execute-a-set-of-move-shard-operations-to-improve-balance `__ """ # noqa: E501 data: Json = dict(version=version) if databases_excluded is not None: @@ -431,7 +431,7 @@ async def execute_rebalance_plan( ClusterRebalanceError: If the execution fails. References: - - `execute-a-set-of-move-shard-operations `__ + - `execute-a-set-of-move-shard-operations `__ """ # noqa: E501 data: Json = dict(version=version, moves=moves) diff --git a/arangoasync/collection.py b/arangoasync/collection.py index 52a9d9e..fae501a 100644 --- a/arangoasync/collection.py +++ b/arangoasync/collection.py @@ -333,7 +333,7 @@ async def indexes( IndexListError: If retrieval fails. References: - - `list-all-indexes-of-a-collection `__ + - `list-all-indexes-of-a-collection `__ """ # noqa: E501 params: Params = dict(collection=self._name) if with_stats is not None: @@ -368,7 +368,7 @@ async def get_index(self, id: str | int) -> Result[IndexProperties]: IndexGetError: If retrieval fails. References: - `get-an-index `__ + `get-an-index `__ """ # noqa: E501 if isinstance(id, int): full_id = f"{self._name}/{id}" @@ -408,12 +408,12 @@ async def add_index( IndexCreateError: If index creation fails. References: - - `create-an-index `__ - - `create-a-persistent-index `__ - - `create-an-inverted-index `__ - - `create-a-ttl-index `__ - - `create-a-multi-dimensional-index `__ - - `create-a-geo-spatial-index `__ + - `create-an-index `__ + - `create-a-persistent-index `__ + - `create-an-inverted-index `__ + - `create-a-ttl-index `__ + - `create-a-multi-dimensional-index `__ + - `create-a-geo-spatial-index `__ """ # noqa: E501 options = options or {} request = Request( @@ -447,7 +447,7 @@ async def delete_index( IndexDeleteError: If deletion fails. References: - - `delete-an-index `__ + - `delete-an-index `__ """ # noqa: E501 if isinstance(id, int): full_id = f"{self._name}/{id}" @@ -478,7 +478,7 @@ async def load_indexes(self) -> Result[bool]: IndexLoadError: If loading fails. References: - - `load-collection-indexes-into-memory `__ + - `load-collection-indexes-into-memory `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -499,7 +499,7 @@ async def recalculate_count(self) -> None: CollectionRecalculateCountError: If re-calculation fails. References: - - `recalculate-the-document-count-of-a-collection `__ + - `recalculate-the-document-count-of-a-collection `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -522,7 +522,7 @@ async def properties(self) -> Result[CollectionProperties]: CollectionPropertiesError: If retrieval fails. References: - - `get-the-properties-of-a-collection `__ + - `get-the-properties-of-a-collection `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -572,7 +572,7 @@ async def configure( CollectionConfigureError: If configuration fails. References: - - `change-the-properties-of-a-collection `__ + - `change-the-properties-of-a-collection `__ """ # noqa: E501 data: Json = {} if cache_enabled is not None: @@ -617,7 +617,7 @@ async def rename(self, new_name: str) -> None: CollectionRenameError: If rename fails. References: - - `rename-a-collection `__ + - `rename-a-collection `__ """ # noqa: E501 data: Json = {"name": new_name} request = Request( @@ -644,7 +644,7 @@ async def compact(self) -> Result[CollectionInfo]: CollectionCompactError: If compaction fails. References: - - `compact-a-collection `__ + - `compact-a-collection `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -677,7 +677,7 @@ async def truncate( CollectionTruncateError: If truncation fails. References: - - `truncate-a-collection `__ + - `truncate-a-collection `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -707,7 +707,7 @@ async def count(self) -> Result[int]: DocumentCountError: If retrieval fails. References: - - `get-the-document-count-of-a-collection `__ + - `get-the-document-count-of-a-collection `__ """ # noqa: E501 request = Request( method=Method.GET, endpoint=f"/_api/collection/{self.name}/count" @@ -731,7 +731,7 @@ async def statistics(self) -> Result[CollectionStatistics]: CollectionStatisticsError: If retrieval fails. References: - - `get-the-collection-statistics `__ + - `get-the-collection-statistics `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -761,7 +761,7 @@ async def responsible_shard(self, document: Json) -> Result[str]: CollectionResponsibleShardError: If retrieval fails. References: - - `get-the-responsible-shard-for-a-document `__ + - `get-the-responsible-shard-for-a-document `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -793,7 +793,7 @@ async def shards(self, details: Optional[bool] = None) -> Result[Json]: CollectionShardsError: If retrieval fails. References: - - `get-the-shard-ids-of-a-collection `__ + - `get-the-shard-ids-of-a-collection `__ """ # noqa: E501 params: Params = {} if details is not None: @@ -822,7 +822,7 @@ async def revision(self) -> Result[str]: CollectionRevisionError: If retrieval fails. References: - - `get-the-collection-revision-id `__ + - `get-the-collection-revision-id `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -852,7 +852,7 @@ async def checksum( CollectionChecksumError: If retrieval fails. References: - - `get-the-collection-checksum `__ + - `get-the-collection-checksum `__ """ # noqa: E501 params: Params = {} if with_rev is not None: @@ -899,7 +899,7 @@ async def has( DocumentGetError: If retrieval fails. References: - - `get-a-document-header `__ + - `get-a-document-header `__ """ # noqa: E501 handle = self._get_doc_id(document) @@ -956,7 +956,7 @@ async def get_many( DocumentGetError: If retrieval fails. References: - - `get-multiple-documents `__ + - `get-multiple-documents `__ """ # noqa: E501 params: Params = {"onlyget": True} if ignore_revs is not None: @@ -1283,7 +1283,7 @@ async def insert_many( DocumentInsertError: If insertion fails. References: - - `create-multiple-documents `__ + - `create-multiple-documents `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -1373,7 +1373,7 @@ async def replace_many( DocumentReplaceError: If replacing fails. References: - - `replace-multiple-documents `__ + - `replace-multiple-documents `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -1466,7 +1466,7 @@ async def update_many( DocumentUpdateError: If update fails. References: - - `update-multiple-documents `__ + - `update-multiple-documents `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -1548,7 +1548,7 @@ async def delete_many( DocumentRemoveError: If removal fails. References: - - `remove-multiple-documents `__ + - `remove-multiple-documents `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -1640,7 +1640,7 @@ async def import_bulk( DocumentInsertError: If import fails. References: - - `import-json-data-as-documents `__ + - `import-json-data-as-documents `__ """ # noqa: E501 params: Params = dict() params["collection"] = self.name @@ -1730,7 +1730,7 @@ async def get( DocumentParseError: If the document is malformed. References: - - `get-a-document `__ + - `get-a-document `__ """ # noqa: E501 handle = self._get_doc_id(document) @@ -1818,7 +1818,7 @@ async def insert( DocumentParseError: If the document is malformed. References: - - `create-a-document `__ + - `create-a-document `__ """ # noqa: E501 if isinstance(document, dict): document = cast(T, self._ensure_key_from_id(document)) @@ -1923,7 +1923,7 @@ async def update( DocumentUpdateError: If update fails. References: - - `update-a-document `__ + - `update-a-document `__ """ # noqa: E501 params: Params = {} if ignore_revs is not None: @@ -2017,7 +2017,7 @@ async def replace( DocumentReplaceError: If replace fails. References: - - `replace-a-document `__ + - `replace-a-document `__ """ # noqa: E501 params: Params = {} if ignore_revs is not None: @@ -2105,7 +2105,7 @@ async def delete( DocumentDeleteError: If deletion fails. References: - - `remove-a-document `__ + - `remove-a-document `__ """ # noqa: E501 handle = self._get_doc_id(cast(str | Json, document)) @@ -2232,7 +2232,7 @@ async def get( DocumentParseError: If the document is malformed. References: - - `get-a-vertex `__ + - `get-a-vertex `__ """ # noqa: E501 handle = self._get_doc_id(vertex) @@ -2294,7 +2294,7 @@ async def insert( DocumentParseError: If the document is malformed. References: - - `create-a-vertex `__ + - `create-a-vertex `__ """ # noqa: E501 if isinstance(vertex, dict): vertex = cast(T, self._ensure_key_from_id(vertex)) @@ -2359,7 +2359,7 @@ async def update( DocumentUpdateError: If update fails. References: - - `update-a-vertex `__ + - `update-a-vertex `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -2434,7 +2434,7 @@ async def replace( DocumentReplaceError: If replace fails. References: - - `replace-a-vertex `__ + - `replace-a-vertex `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -2506,7 +2506,7 @@ async def delete( DocumentDeleteError: If deletion fails. References: - - `remove-a-vertex `__ + - `remove-a-vertex `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -2631,7 +2631,7 @@ async def get( DocumentParseError: If the document is malformed. References: - - `get-an-edge `__ + - `get-an-edge `__ """ # noqa: E501 handle = self._get_doc_id(edge) @@ -2694,7 +2694,7 @@ async def insert( DocumentParseError: If the document is malformed. References: - - `create-an-edge `__ + - `create-an-edge `__ """ # noqa: E501 if isinstance(edge, dict): edge = cast(T, self._ensure_key_from_id(edge)) @@ -2763,7 +2763,7 @@ async def update( DocumentUpdateError: If update fails. References: - - `update-an-edge `__ + - `update-an-edge `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -2842,7 +2842,7 @@ async def replace( DocumentReplaceError: If replace fails. References: - - `replace-an-edge `__ + - `replace-an-edge `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -2917,7 +2917,7 @@ async def delete( DocumentDeleteError: If deletion fails. References: - - `remove-an-edge `__ + - `remove-an-edge `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -2978,7 +2978,7 @@ async def edges( EdgeListError: If retrieval fails. References: - - `get-inbound-and-outbound-edges `__ + - `get-inbound-and-outbound-edges `__ """ # noqa: E501 params: Params = { "vertex": self._get_doc_id(vertex, validate=False), diff --git a/arangoasync/cursor.py b/arangoasync/cursor.py index 5339455..1e3cc6c 100644 --- a/arangoasync/cursor.py +++ b/arangoasync/cursor.py @@ -192,8 +192,8 @@ async def fetch(self, batch_id: Optional[str] = None) -> List[Any]: CursorStateError: If the cursor ID is not set. References: - - `read-the-next-batch-from-a-cursor `__ - - `read-a-batch-from-the-cursor-again `__ + - `read-the-next-batch-from-a-cursor `__ + - `read-a-batch-from-the-cursor-again `__ """ # noqa: E501 if self._id is None: raise CursorStateError("Cursor ID is not set") @@ -229,7 +229,7 @@ async def close(self, ignore_missing: bool = False) -> bool: CursorCloseError: If the cursor failed to close. References: - - `delete-a-cursor `__ + - `delete-a-cursor `__ """ # noqa: E501 if self._id is None: return False diff --git a/arangoasync/database.py b/arangoasync/database.py index 2cbbc68..8e700e5 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -258,7 +258,7 @@ async def properties(self) -> Result[DatabaseProperties]: DatabasePropertiesError: If retrieval fails. References: - - `get-information-about-the-current-database `__ + - `get-information-about-the-current-database `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/database/current") @@ -281,7 +281,7 @@ async def status(self) -> Result[ServerStatusInformation]: ServerSatusError: If retrieval fails. References: - - `get-server-status-information `__ + - `get-server-status-information `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/status") @@ -305,7 +305,7 @@ async def databases(self) -> Result[List[str]]: DatabaseListError: If retrieval fails. References: - - `list-all-databases `__ + - `list-all-databases `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/database") @@ -333,7 +333,7 @@ async def databases_accessible_to_user(self) -> Result[List[str]]: DatabaseListError: If retrieval fails. References: - - `list-the-accessible-databases `__ + - `list-the-accessible-databases `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/database/user") @@ -417,7 +417,7 @@ async def create_database( DatabaseCreateError: If creation fails. References: - - `create-a-database `__ + - `create-a-database `__ """ # noqa: E501 data: Json = {"name": name} @@ -478,7 +478,7 @@ async def delete_database( DatabaseDeleteError: If deletion fails. References: - - `drop-a-database `__ + - `drop-a-database `__ """ # noqa: E501 request = Request(method=Method.DELETE, endpoint=f"/_api/database/{name}") @@ -533,7 +533,7 @@ async def collections( CollectionListError: If retrieval fails. References: - - `list-all-collections `__ + - `list-all-collections `__ """ # noqa: E501 params: Params = {} if exclude_system is not None: @@ -661,7 +661,7 @@ async def create_collection( CollectionCreateError: If the operation fails. References: - - `create-a-collection `__ + - `create-a-collection `__ """ # noqa: E501 data: Json = {"name": name} if col_type is not None: @@ -751,7 +751,7 @@ async def delete_collection( CollectionDeleteError: If the operation fails. References: - - `drop-a-collection `__ + - `drop-a-collection `__ """ # noqa: E501 params: Params = {} if is_system is not None: @@ -782,7 +782,7 @@ async def key_generators(self) -> Result[List[str]]: CollectionKeyGeneratorsError: If retrieval fails. References: - - `get-the-available-key-generators `__ + - `get-the-available-key-generators `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/key-generators") @@ -821,7 +821,7 @@ async def has_document( DocumentGetError: If retrieval fails. References: - - `get-a-document-header `__ + - `get-a-document-header `__ """ # noqa: E501 col = Collection.get_col_name(document) return await self.collection(col).has( @@ -858,7 +858,7 @@ async def document( DocumentParseError: If the document is malformed. References: - - `get-a-document `__ + - `get-a-document `__ """ # noqa: E501 col: StandardCollection[Json, Json, Jsons] = self.collection( Collection.get_col_name(document) @@ -927,7 +927,7 @@ async def insert_document( DocumentParseError: If the document is malformed. References: - - `create-a-document `__ + - `create-a-document `__ """ # noqa: E501 col: StandardCollection[Json, Json, Jsons] = self.collection(collection) return await col.insert( @@ -998,7 +998,7 @@ async def update_document( DocumentUpdateError: If update fails. References: - - `update-a-document `__ + - `update-a-document `__ """ # noqa: E501 col: StandardCollection[Json, Json, Jsons] = self.collection( Collection.get_col_name(document) @@ -1063,7 +1063,7 @@ async def replace_document( DocumentReplaceError: If replace fails. References: - - `replace-a-document `__ + - `replace-a-document `__ """ # noqa: E501 col: StandardCollection[Json, Json, Jsons] = self.collection( Collection.get_col_name(document) @@ -1124,7 +1124,7 @@ async def delete_document( DocumentDeleteError: If deletion fails. References: - - `remove-a-document `__ + - `remove-a-document `__ """ # noqa: E501 col: StandardCollection[Json, Json, Jsons] = self.collection( Collection.get_col_name(document) @@ -1198,7 +1198,7 @@ async def graphs(self) -> Result[List[GraphProperties]]: GraphListError: If the operation fails. References: - - `list-all-graphs `__ + - `list-all-graphs `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/gharial") @@ -1253,7 +1253,7 @@ async def create_graph( GraphCreateError: If the operation fails. References: - - `create-a-graph `__ + - `create-a-graph `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -1315,7 +1315,7 @@ async def delete_graph( GraphDeleteError: If the operation fails. References: - - `drop-a-graph `__ + - `drop-a-graph `__ """ # noqa: E501 params: Params = {} if drop_collections is not None: @@ -1347,8 +1347,8 @@ async def view(self, name: str) -> Result[Json]: ViewGetError: If the operation fails. References: - - `read-properties-of-a-view `__ - - `get-the-properties-of-a-view `__ + - `read-properties-of-a-view `__ + - `get-the-properties-of-a-view `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint=f"/_api/view/{name}/properties") @@ -1372,8 +1372,8 @@ async def view_info(self, name: str) -> Result[Json]: ViewGetError: If the operation fails. References: - - `get-information-about-a-view `_ - - `get-information-about-a-view `__ + - `get-information-about-a-view `_ + - `get-information-about-a-view `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint=f"/_api/view/{name}") @@ -1394,8 +1394,8 @@ async def views(self) -> Result[Jsons]: ViewListError: If the operation fails. References: - - `list-all-views `__ - - `list-all-views `__ + - `list-all-views `__ + - `list-all-views `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/view") @@ -1427,8 +1427,8 @@ async def create_view( ViewCreateError: If the operation fails. References: - - `create-a-search-alias-view `__ - - `create-an-arangosearch-view `__ + - `create-a-search-alias-view `__ + - `create-an-arangosearch-view `__ """ # noqa: E501 data: Json = {"name": name, "type": view_type} if properties is not None: @@ -1461,8 +1461,8 @@ async def replace_view(self, name: str, properties: Json) -> Result[Json]: ViewReplaceError: If the operation fails. References: - - `replace-the-properties-of-a-search-alias-view `__ - - `replace-the-properties-of-an-arangosearch-view `__ + - `replace-the-properties-of-a-search-alias-view `__ + - `replace-the-properties-of-an-arangosearch-view `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -1491,8 +1491,8 @@ async def update_view(self, name: str, properties: Json) -> Result[Json]: ViewUpdateError: If the operation fails. References: - - `update-the-properties-of-a-search-alias-view `__ - - `update-the-properties-of-an-arangosearch-view `__ + - `update-the-properties-of-a-search-alias-view `__ + - `update-the-properties-of-an-arangosearch-view `__ """ # noqa: E501 request = Request( method=Method.PATCH, @@ -1518,8 +1518,8 @@ async def rename_view(self, name: str, new_name: str) -> None: ViewRenameError: If the operation fails. References: - - `rename-a-view `__ - - `rename-a-view `__ + - `rename-a-view `__ + - `rename-a-view `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -1551,8 +1551,8 @@ async def delete_view( ViewDeleteError: If the operation fails. References: - - `drop-a-view `__ - - `drop-a-view `__ + - `drop-a-view `__ + - `drop-a-view `__ """ # noqa: E501 request = Request(method=Method.DELETE, endpoint=f"/_api/view/{name}") @@ -1575,7 +1575,7 @@ async def analyzers(self) -> Result[Jsons]: AnalyzerListError: If the operation fails. References: - - `list-all-analyzers `__ + - `list-all-analyzers `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/analyzer") @@ -1597,7 +1597,7 @@ async def analyzer(self, name: str) -> Result[Json]: dict: Analyzer properties. References: - - `get-an-analyzer-definition `__ + - `get-an-analyzer-definition `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint=f"/_api/analyzer/{name}") @@ -1632,7 +1632,7 @@ async def create_analyzer( AnalyzerCreateError: If the operation fails. References: - - `create-an-analyzer `__ + - `create-an-analyzer `__ """ # noqa: E501 data: Json = {"name": name, "type": analyzer_type} if properties is not None: @@ -1671,7 +1671,7 @@ async def delete_analyzer( AnalyzerDeleteError: If the operation fails. References: - - `remove-an-analyzer `__ + - `remove-an-analyzer `__ """ # noqa: E501 params: Params = {} if force is not None: @@ -1728,7 +1728,7 @@ async def user(self, username: str) -> Result[UserInfo]: UserGetError: If the operation fails. References: - - `get-a-user` `__ + - `get-a-user` `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint=f"/_api/user/{username}") @@ -1757,7 +1757,7 @@ async def users(self) -> Result[Sequence[UserInfo]]: UserListError: If the operation fails. References: - - `list-available-users `__ + - `list-available-users `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/user") @@ -1792,7 +1792,7 @@ async def create_user(self, user: UserInfo | Json) -> Result[UserInfo]: await db.create_user({user="john", password="secret"}) References: - - `create-a-user `__ + - `create-a-user `__ """ # noqa: E501 if isinstance(user, dict): user = UserInfo(**user) @@ -1832,7 +1832,7 @@ async def replace_user(self, user: UserInfo | Json) -> Result[UserInfo]: UserReplaceError: If the operation fails. References: - - `replace-a-user `__ + - `replace-a-user `__ """ # noqa: E501 if isinstance(user, dict): user = UserInfo(**user) @@ -1872,7 +1872,7 @@ async def update_user(self, user: UserInfo | Json) -> Result[UserInfo]: UserUpdateError: If the operation fails. References: - - `update-a-user `__ + - `update-a-user `__ """ # noqa: E501 if isinstance(user, dict): user = UserInfo(**user) @@ -1917,7 +1917,7 @@ async def delete_user( UserDeleteError: If the operation fails. References: - - `remove-a-user `__ + - `remove-a-user `__ """ # noqa: E501 request = Request(method=Method.DELETE, endpoint=f"/_api/user/{username}") @@ -1945,7 +1945,7 @@ async def permissions(self, username: str, full: bool = True) -> Result[Json]: PermissionListError: If the operation fails. References: - - `list-a-users-accessible-databases `__ + - `list-a-users-accessible-databases `__ """ # noqa: 501 request = Request( method=Method.GET, @@ -1981,8 +1981,8 @@ async def permission( PermissionGetError: If the operation fails. References: - - `get-a-users-database-access-level `__ - - `get-a-users-collection-access-level `__ + - `get-a-users-database-access-level `__ + - `get-a-users-collection-access-level `__ """ # noqa: 501 endpoint = f"/_api/user/{username}/database/{database}" if collection is not None: @@ -2022,8 +2022,8 @@ async def update_permission( is `False`. References: - - `set-a-users-database-access-level `__ - - `set-a-users-collection-access-level `__ + - `set-a-users-database-access-level `__ + - `set-a-users-collection-access-level `__ """ # noqa: E501 endpoint = f"/_api/user/{username}/database/{database}" if collection is not None: @@ -2067,8 +2067,8 @@ async def reset_permission( is `False`. References: - - `clear-a-users-database-access-level `__ - - `clear-a-users-collection-access-level `__ + - `clear-a-users-database-access-level `__ + - `clear-a-users-collection-access-level `__ """ # noqa: E501 endpoint = f"/_api/user/{username}/database/{database}" if collection is not None: @@ -2098,7 +2098,7 @@ async def jwt_secrets(self) -> Result[Json]: JWTSecretListError: If the operation fails. References: - - `get-information-about-the-loaded-jwt-secrets `__ + - `get-information-about-the-loaded-jwt-secrets `__ """ # noqa: 501 request = Request(method=Method.GET, endpoint="/_admin/server/jwt") @@ -2120,7 +2120,7 @@ async def reload_jwt_secrets(self) -> Result[Json]: JWTSecretReloadError: If the operation fails. References: - - `hot-reload-the-jwt-secrets-from-disk `__ + - `hot-reload-the-jwt-secrets-from-disk `__ """ # noqa: 501 request = Request( method=Method.POST, endpoint="/_admin/server/jwt", prefix_needed=False @@ -2238,7 +2238,7 @@ async def tls(self) -> Result[Json]: ServerTLSError: If the operation fails. References: - - `get-the-tls-data `__ + - `get-the-tls-data `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/server/tls") @@ -2262,7 +2262,7 @@ async def reload_tls(self) -> Result[Json]: ServerTLSReloadError: If the operation fails. References: - - `reload-the-tls-data `__ + - `reload-the-tls-data `__ """ # noqa: E501 request = Request(method=Method.POST, endpoint="/_admin/server/tls") @@ -2287,7 +2287,7 @@ async def encryption(self) -> Result[Json]: ServerEncryptionError: If the operation fails. References: - - `rotate-the-encryption-keys `__ + - `rotate-the-encryption-keys `__ """ # noqa: E501 request = Request(method=Method.POST, endpoint="/_admin/server/encryption") @@ -2360,7 +2360,7 @@ async def execute_transaction( TransactionExecuteError: If the operation fails on the server side. References: - - `execute-a-javascript-transaction `__ + - `execute-a-javascript-transaction `__ """ # noqa: 501 m = "JavaScript Transactions are deprecated from ArangoDB v3.12.0 onward and will be removed in a future version." # noqa: E501 warn(m, DeprecationWarning, stacklevel=2) @@ -2411,7 +2411,7 @@ async def version(self, details: bool = False) -> Result[Json]: ServerVersionError: If the operation fails on the server side. References: - - `get-the-server-version `__ + - `get-the-server-version `__ """ # noqa: E501 request = Request( method=Method.GET, endpoint="/_api/version", params={"details": details} @@ -2434,7 +2434,7 @@ async def tasks(self) -> Result[Jsons]: TaskListError: If the list cannot be retrieved. References: - - `list-all-tasks `__ + - `list-all-tasks `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/tasks") @@ -2459,7 +2459,7 @@ async def task(self, task_id: str) -> Result[Json]: TaskGetError: If the task details cannot be retrieved. References: - - `get-a-task `__ + - `get-a-task `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint=f"/_api/tasks/{task_id}") @@ -2499,8 +2499,8 @@ async def create_task( TaskCreateError: If the task cannot be created. References: - - `create-a-task `__ - - `create-a-task-with-id `__ + - `create-a-task `__ + - `create-a-task-with-id `__ """ # noqa: E501 data: Json = {"command": command} if name is not None: @@ -2553,7 +2553,7 @@ async def delete_task( TaskDeleteError: If the operation fails. References: - - `delete-a-task `__ + - `delete-a-task `__ """ # noqa: E501 request = Request(method=Method.DELETE, endpoint=f"/_api/tasks/{task_id}") @@ -2576,7 +2576,7 @@ async def engine(self) -> Result[Json]: ServerEngineError: If the operation fails. References: - - `get-the-storage-engine-type `__ + - `get-the-storage-engine-type `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/engine") @@ -2598,7 +2598,7 @@ async def time(self) -> Result[datetime]: ServerTimeError: If the operation fails. References: - - `get-the-system-time `__ + - `get-the-system-time `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/time") @@ -2621,7 +2621,7 @@ async def check_availability(self) -> Result[str]: ServerCheckAvailabilityError: If the operation fails. References: - - `check-server-availability `__ + - `check-server-availability `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -2649,7 +2649,7 @@ async def support_info(self) -> Result[Json]: DatabaseSupportInfoError: If the operation fails. References: - - `get-information-about-the-deployment `__ + - `get-information-about-the-deployment `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/support-info") @@ -2672,7 +2672,7 @@ async def options(self) -> Result[Json]: ServerCurrentOptionsGetError: If the operation fails. References: - - `get-the-startup-option-configuration `__ + - `get-the-startup-option-configuration `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/options") @@ -2694,7 +2694,7 @@ async def options_available(self) -> Result[Json]: ServerAvailableOptionsGetError: If the operation fails. References: - - `get-the-available-startup-options `__ + - `get-the-available-startup-options `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/options-description") @@ -2716,7 +2716,7 @@ async def mode(self) -> Result[str]: ServerModeError: If the operation fails. References: - - `return-whether-or-not-a-server-is-in-read-only-mode `__ + - `return-whether-or-not-a-server-is-in-read-only-mode `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/server/mode") @@ -2740,7 +2740,7 @@ async def set_mode(self, mode: str) -> Result[str]: ServerModeSetError: If the operation fails. References: - - `set-the-server-mode-to-read-only-or-default `__ + - `set-the-server-mode-to-read-only-or-default `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -2766,7 +2766,7 @@ async def license(self) -> Result[Json]: ServerLicenseGetError: If the operation fails. References: - - `get-information-about-the-current-license `__ + - `get-information-about-the-current-license `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/license") @@ -2790,7 +2790,7 @@ async def set_license(self, license: str, force: Optional[bool] = False) -> None ServerLicenseSetError: If the operation fails. References: - - `set-a-new-license `__ + - `set-a-new-license `__ """ # noqa: E501 params: Params = {} if force is not None: @@ -2819,7 +2819,7 @@ async def shutdown(self, soft: Optional[bool] = None) -> None: ServerShutdownError: If the operation fails. References: - - `start-the-shutdown-sequence `__ + - `start-the-shutdown-sequence `__ """ # noqa: E501 params: Params = {} if soft is not None: @@ -2847,7 +2847,7 @@ async def shutdown_progress(self) -> Result[Json]: ServerShutdownProgressError: If the operation fails. References: - - `query-the-soft-shutdown-progress `__ + - `query-the-soft-shutdown-progress `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/shutdown") @@ -2884,7 +2884,7 @@ async def compact( DatabaseCompactError: If the operation fails. References: - - `compact-all-databases `__ + - `compact-all-databases `__ """ # noqa: E501 data = {} if change_level is not None: @@ -2911,7 +2911,7 @@ async def reload_routing(self) -> None: ServerReloadRoutingError: If the operation fails. References: - - `reload-the-routing-table `__ + - `reload-the-routing-table `__ """ # noqa: E501 request = Request(method=Method.POST, endpoint="/_admin/routing/reload") @@ -2934,7 +2934,7 @@ async def echo(self, body: Optional[Json] = None) -> Result[Json]: ServerEchoError: If the operation fails. References: - - `echo-a-request `__ + - `echo-a-request `__ """ # noqa: E501 data = body if body is not None else {} request = Request(method=Method.POST, endpoint="/_admin/echo", data=data) @@ -2960,7 +2960,7 @@ async def execute(self, command: str) -> Result[Any]: ServerExecuteError: If the execution fails. References: - - `execute-a-script `__ + - `execute-a-script `__ """ # noqa: E501 request = Request( method=Method.POST, endpoint="/_admin/execute", data=command.encode("utf-8") @@ -3002,7 +3002,7 @@ async def metrics(self, server_id: Optional[str] = None) -> Result[str]: ServerMetricsError: If the operation fails. References: - - `metrics-api-v2 `__ + - `metrics-api-v2 `__ """ # noqa: E501 params: Params = {} if server_id is not None: @@ -3058,7 +3058,7 @@ async def read_log_entries( ServerReadLogError: If the operation fails. References: - - `get-the-global-server-logs `__ + - `get-the-global-server-logs `__ """ # noqa: E501 params: Params = {} if upto is not None: @@ -3110,7 +3110,7 @@ async def log_levels( ServerLogLevelError: If the operation fails. References: - - `get-the-server-log-levels `__ + - `get-the-server-log-levels `__ """ # noqa: E501 params: Params = {} if server_id is not None: @@ -3166,7 +3166,7 @@ async def set_log_levels( ServerLogLevelSetError: If the operation fails. References: - - `set-the-structured-log-settings `__ + - `set-the-structured-log-settings `__ """ # noqa: E501 params: Params = {} if server_id is not None: @@ -3207,7 +3207,7 @@ async def reset_log_levels(self, server_id: Optional[str] = None) -> Result[Json ServerLogLevelResetError: If the operation fails. References: - - `reset-the-server-log-levels `__ + - `reset-the-server-log-levels `__ """ # noqa: E501 params: Params = {} if server_id is not None: @@ -3238,7 +3238,7 @@ async def log_settings(self) -> Result[Json]: ServerLogSettingError: If the operation fails. References: - - `get-the-structured-log-settings `__ + - `get-the-structured-log-settings `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -3279,7 +3279,7 @@ async def set_log_settings(self, **kwargs: Dict[str, Any]) -> Result[Json]: ServerLogSettingSetError: If the operation fails. References: - - `set-the-structured-log-settings `__ + - `set-the-structured-log-settings `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -3306,7 +3306,7 @@ async def api_calls(self) -> Result[Json]: ServerApiCallsError: If the operation fails. References: - - `get-recent-api-calls `__ + - `get-recent-api-calls `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -3379,7 +3379,7 @@ async def begin_transaction( TransactionInitError: If the operation fails on the server side. References: - - `begin-a-stream-transaction `__ + - `begin-a-stream-transaction `__ """ # noqa: E501 collections = dict() if read is not None: @@ -3463,7 +3463,7 @@ async def async_jobs( AsyncJobListError: If retrieval fails. References: - - `list-async-jobs-by-status-or-get-the-status-of-specific-job `__ + - `list-async-jobs-by-status-or-get-the-status-of-specific-job `__ """ # noqa: E501 params: Params = {} if count is not None: @@ -3496,7 +3496,7 @@ async def clear_async_jobs(self, threshold: Optional[float] = None) -> None: AsyncJobClearError: If the operation fails. References: - - `delete-async-job-results `__ + - `delete-async-job-results `__ """ # noqa: E501 if threshold is None: request = Request(method=Method.DELETE, endpoint="/_api/job/all") @@ -3516,7 +3516,7 @@ def response_handler(resp: Response) -> None: class TransactionDatabase(Database): """Database API tailored specifically for - `Stream Transactions `__. + `Stream Transactions `__. It allows you start a transaction, run multiple operations (eg. AQL queries) over a short period of time, and then commit or abort the transaction. @@ -3551,7 +3551,7 @@ async def transaction_status(self) -> str: TransactionStatusError: If the transaction is not found. References: - - `get-the-status-of-a-stream-transaction `__ + - `get-the-status-of-a-stream-transaction `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -3573,7 +3573,7 @@ async def commit_transaction(self) -> None: TransactionCommitError: If the operation fails on the server side. References: - - `commit-a-stream-transaction `__ + - `commit-a-stream-transaction `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -3593,7 +3593,7 @@ async def abort_transaction(self) -> None: TransactionAbortError: If the operation fails on the server side. References: - - `abort-a-stream-transaction `__ + - `abort-a-stream-transaction `__ """ # noqa: E501 request = Request( method=Method.DELETE, @@ -3620,7 +3620,7 @@ class AsyncDatabase(Database): and no results are stored on server. References: - - `jobs `__ + - `jobs `__ """ # noqa: E501 def __init__(self, connection: Connection, return_result: bool) -> None: diff --git a/arangoasync/foxx.py b/arangoasync/foxx.py index b74d933..fe02b41 100644 --- a/arangoasync/foxx.py +++ b/arangoasync/foxx.py @@ -65,7 +65,7 @@ async def services(self, exclude_system: Optional[bool] = False) -> Result[Jsons FoxxServiceListError: If retrieval fails. References: - - `list-the-installed-services `__ + - `list-the-installed-services `__ """ # noqa: E501 params: Params = {} if exclude_system is not None: @@ -98,7 +98,7 @@ async def service(self, mount: str) -> Result[Json]: FoxxServiceGetError: If retrieval fails. References: - - `get-the-service-description `__ + - `get-the-service-description `__ """ # noqa: E501 params: Params = {"mount": mount} request = Request( @@ -142,7 +142,7 @@ async def create_service( FoxxServiceCreateError: If installation fails. References: - - `install-a-new-service-mode `__ + - `install-a-new-service-mode `__ """ # noqa: E501 params: Params = dict() params["mount"] = mount @@ -189,7 +189,7 @@ async def delete_service( FoxxServiceDeleteError: If operations fails. References: - - `uninstall-a-service `__ + - `uninstall-a-service `__ """ # noqa: E501 params: Params = dict() params["mount"] = mount @@ -236,7 +236,7 @@ async def replace_service( FoxxServiceReplaceError: If replacement fails. References: - - `replace-a-service `__ + - `replace-a-service `__ """ # noqa: E501 params: Params = dict() params["mount"] = mount @@ -298,7 +298,7 @@ async def update_service( FoxxServiceUpdateError: If upgrade fails. References: - - `upgrade-a-service `__ + - `upgrade-a-service `__ """ # noqa: E501 params: Params = dict() params["mount"] = mount @@ -345,7 +345,7 @@ async def config(self, mount: str) -> Result[Json]: FoxxConfigGetError: If retrieval fails. References: - - `get-the-configuration-options `__ + - `get-the-configuration-options `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -375,7 +375,7 @@ async def update_config(self, mount: str, options: Json) -> Result[Json]: FoxxConfigUpdateError: If update fails. References: - - `update-the-configuration-options `__ + - `update-the-configuration-options `__ """ # noqa: E501 request = Request( method=Method.PATCH, @@ -407,7 +407,7 @@ async def replace_config(self, mount: str, options: Json) -> Result[Json]: FoxxConfigReplaceError: If replace fails. References: - - `replace-the-configuration-options `__ + - `replace-the-configuration-options `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -437,7 +437,7 @@ async def dependencies(self, mount: str) -> Result[Json]: FoxxDependencyGetError: If retrieval fails. References: - - `get-the-dependency-options `__ + - `get-the-dependency-options `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -467,7 +467,7 @@ async def update_dependencies(self, mount: str, options: Json) -> Result[Json]: FoxxDependencyUpdateError: If update fails. References: - - `update-the-dependency-options `__ + - `update-the-dependency-options `__ """ # noqa: E501 request = Request( method=Method.PATCH, @@ -498,7 +498,7 @@ async def replace_dependencies(self, mount: str, options: Json) -> Result[Json]: FoxxDependencyReplaceError: If replace fails. References: - - `replace-the-dependency-options `__ + - `replace-the-dependency-options `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -528,7 +528,7 @@ async def scripts(self, mount: str) -> Result[Json]: FoxxScriptListError: If retrieval fails. References: - - `list-the-service-scripts `__ + - `list-the-service-scripts `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -561,7 +561,7 @@ async def run_script( FoxxScriptRunError: If script fails. References: - - `run-a-service-script `__ + - `run-a-service-script `__ """ # noqa: E501 request = Request( method=Method.POST, @@ -612,7 +612,7 @@ async def run_tests( FoxxTestRunError: If test fails. References: - - `run-the-service-tests `__ + - `run-the-service-tests `__ """ # noqa: E501 params: Params = dict() params["mount"] = mount @@ -665,7 +665,7 @@ async def enable_development(self, mount: str) -> Result[Json]: FoxxDevModeEnableError: If the operation fails. References: - - `enable-the-development-mode `__ + - `enable-the-development-mode `__ """ # noqa: E501 request = Request( method=Method.POST, @@ -697,7 +697,7 @@ async def disable_development(self, mount: str) -> Result[Json]: FoxxDevModeDisableError: If the operation fails. References: - - `disable-the-development-mode `__ + - `disable-the-development-mode `__ """ # noqa: E501 request = Request( method=Method.DELETE, @@ -726,7 +726,7 @@ async def readme(self, mount: str) -> Result[str]: FoxxReadmeGetError: If retrieval fails. References: - - `get-the-service-readme `__ + - `get-the-service-readme `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -754,7 +754,7 @@ async def swagger(self, mount: str) -> Result[Json]: FoxxSwaggerGetError: If retrieval fails. References: - - `get-the-swagger-description `__ + - `get-the-swagger-description `__ """ # noqa: E501 request = Request( method=Method.GET, endpoint="/_api/foxx/swagger", params={"mount": mount} @@ -785,7 +785,7 @@ async def download(self, mount: str) -> Result[bytes]: FoxxDownloadError: If download fails. References: - - `download-a-service-bundle `__ + - `download-a-service-bundle `__ """ # noqa: E501 request = Request( method=Method.POST, endpoint="/_api/foxx/download", params={"mount": mount} @@ -812,7 +812,7 @@ async def commit(self, replace: Optional[bool] = None) -> None: FoxxCommitError: If commit fails. References: - - `commit-the-local-service-state `__ + - `commit-the-local-service-state `__ """ # noqa: E501 params: Params = {} if replace is not None: diff --git a/arangoasync/graph.py b/arangoasync/graph.py index 059a53e..dbb9732 100644 --- a/arangoasync/graph.py +++ b/arangoasync/graph.py @@ -93,7 +93,7 @@ async def properties(self) -> Result[GraphProperties]: GraphProperties: If the operation fails. References: - - `get-a-graph `__ + - `get-a-graph `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint=f"/_api/gharial/{self._name}") @@ -132,7 +132,7 @@ async def vertex_collections(self) -> Result[List[str]]: VertexCollectionListError: If the operation fails. References: - - `list-vertex-collections `__ + - `list-vertex-collections `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -191,7 +191,7 @@ async def create_vertex_collection( VertexCollectionCreateError: If the operation fails. References: - - `add-a-vertex-collection `__ + - `add-a-vertex-collection `__ """ # noqa: E501 data: Json = {"collection": name} @@ -228,7 +228,7 @@ async def delete_vertex_collection(self, name: str, purge: bool = False) -> None VertexCollectionDeleteError: If the operation fails. References: - - `remove-a-vertex-collection `__ + - `remove-a-vertex-collection `__ """ # noqa: E501 request = Request( method=Method.DELETE, @@ -300,7 +300,7 @@ async def vertex( DocumentParseError: If the document is malformed. References: - - `get-a-vertex `__ + - `get-a-vertex `__ """ # noqa: E501 col = Collection.get_col_name(vertex) return await self.vertex_collection(col).get( @@ -337,7 +337,7 @@ async def insert_vertex( DocumentParseError: If the document is malformed. References: - - `create-a-vertex `__ + - `create-a-vertex `__ """ # noqa: E501 return await self.vertex_collection(collection).insert( vertex, @@ -379,7 +379,7 @@ async def update_vertex( DocumentUpdateError: If update fails. References: - - `update-a-vertex `__ + - `update-a-vertex `__ """ # noqa: E501 col = Collection.get_col_name(cast(Json | str, vertex)) return await self.vertex_collection(col).update( @@ -425,7 +425,7 @@ async def replace_vertex( DocumentReplaceError: If replace fails. References: - - `replace-a-vertex `__ + - `replace-a-vertex `__ """ # noqa: E501 col = Collection.get_col_name(cast(Json | str, vertex)) return await self.vertex_collection(col).replace( @@ -468,7 +468,7 @@ async def delete_vertex( DocumentDeleteError: If deletion fails. References: - - `remove-a-vertex `__ + - `remove-a-vertex `__ """ # noqa: E501 col = Collection.get_col_name(cast(Json | str, vertex)) return await self.vertex_collection(col).delete( @@ -551,7 +551,7 @@ async def edge_collections(self) -> Result[List[str]]: EdgeCollectionListError: If the operation fails. References: - - `list-edge-collections `__ + - `list-edge-collections `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -602,7 +602,7 @@ async def create_edge_definition( EdgeDefinitionCreateError: If the operation fails. References: - - `add-an-edge-definition `__ + - `add-an-edge-definition `__ """ # noqa: E501 data: Json = { "collection": edge_collection, @@ -659,7 +659,7 @@ async def replace_edge_definition( EdgeDefinitionReplaceError: If the operation fails. References: - - `replace-an-edge-definition `__ + - `replace-an-edge-definition `__ """ # noqa: E501 data: Json = { "collection": edge_collection, @@ -712,7 +712,7 @@ async def delete_edge_definition( EdgeDefinitionDeleteError: If the operation fails. References: - - `remove-an-edge-definition `__ + - `remove-an-edge-definition `__ """ # noqa: E501 params: Params = {} if drop_collections is not None: @@ -793,7 +793,7 @@ async def edge( DocumentParseError: If the document is malformed. References: - - `get-an-edge `__ + - `get-an-edge `__ """ # noqa: E501 col = Collection.get_col_name(edge) return await self.edge_collection(col).get( @@ -832,7 +832,7 @@ async def insert_edge( DocumentParseError: If the document is malformed. References: - - `create-an-edge `__ + - `create-an-edge `__ """ # noqa: E501 return await self.edge_collection(collection).insert( edge, @@ -875,7 +875,7 @@ async def update_edge( DocumentUpdateError: If update fails. References: - - `update-an-edge `__ + - `update-an-edge `__ """ # noqa: E501 col = Collection.get_col_name(cast(Json | str, edge)) return await self.edge_collection(col).update( @@ -923,7 +923,7 @@ async def replace_edge( DocumentReplaceError: If replace fails. References: - - `replace-an-edge `__ + - `replace-an-edge `__ """ # noqa: E501 col = Collection.get_col_name(cast(Json | str, edge)) return await self.edge_collection(col).replace( @@ -967,7 +967,7 @@ async def delete_edge( DocumentDeleteError: If deletion fails. References: - - `remove-an-edge `__ + - `remove-an-edge `__ """ # noqa: E501 col = Collection.get_col_name(cast(Json | str, edge)) return await self.edge_collection(col).delete( @@ -1001,7 +1001,7 @@ async def edges( EdgeListError: If retrieval fails. References: - - `get-inbound-and-outbound-edges `__ + - `get-inbound-and-outbound-edges `__ """ # noqa: E501 return await self.edge_collection(collection).edges( vertex, diff --git a/arangoasync/job.py b/arangoasync/job.py index 13794fe..9f64764 100644 --- a/arangoasync/job.py +++ b/arangoasync/job.py @@ -27,7 +27,7 @@ class AsyncJob(Generic[T]): response_handler: HTTP response handler References: - - `jobs `__ + - `jobs `__ """ # noqa: E501 def __init__( @@ -68,7 +68,7 @@ async def status(self) -> str: AsyncJobStatusError: If retrieval fails or the job is not found. References: - - `list-async-jobs-by-status-or-get-the-status-of-specific-job `__ + - `list-async-jobs-by-status-or-get-the-status-of-specific-job `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint=f"/_api/job/{self._id}") response = await self._conn.send_request(request) @@ -101,7 +101,7 @@ async def result(self) -> T: is still pending. References: - - `get-the-results-of-an-async-job `__ + - `get-the-results-of-an-async-job `__ """ # noqa: E501 request = Request(method=Method.PUT, endpoint=f"/_api/job/{self._id}") response = await self._conn.send_request(request) @@ -142,7 +142,7 @@ async def cancel(self, ignore_missing: bool = False) -> bool: AsyncJobCancelError: If cancellation fails. References: - - `cancel-an-async-job `__ + - `cancel-an-async-job `__ """ # noqa: E501 request = Request(method=Method.PUT, endpoint=f"/_api/job/{self._id}/cancel") response = await self._conn.send_request(request) @@ -173,7 +173,7 @@ async def clear( AsyncJobClearError: If deletion fails. References: - - `delete-async-job-results `__ + - `delete-async-job-results `__ """ # noqa: E501 request = Request(method=Method.DELETE, endpoint=f"/_api/job/{self._id}") resp = await self._conn.send_request(request) diff --git a/arangoasync/replication.py b/arangoasync/replication.py index 9d96709..436dc94 100644 --- a/arangoasync/replication.py +++ b/arangoasync/replication.py @@ -64,7 +64,7 @@ async def inventory( ReplicationInventoryError: If retrieval fails. References: - - `get-a-replication-inventory `__ + - `get-a-replication-inventory `__ """ # noqa: E501 params: Params = dict() params["batchId"] = batch_id @@ -112,7 +112,7 @@ async def dump( ReplicationDumpError: If retrieval fails. References: - - `get-a-replication-dump `__ + - `get-a-replication-dump `__ """ # noqa: E501 params: Params = dict() params["collection"] = collection @@ -149,7 +149,7 @@ async def cluster_inventory( ReplicationClusterInventoryError: If retrieval fails. References: - - `get-the-cluster-collections-and-indexes `__ + - `get-the-cluster-collections-and-indexes `__ """ # noqa: E501 params: Params = {} if include_system is not None: @@ -179,7 +179,7 @@ async def logger_state(self) -> Result[Json]: ReplicationLoggerStateError: If retrieval fails. References: - - `get-the-replication-logger-state `__ + - `get-the-replication-logger-state `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -204,7 +204,7 @@ async def applier_config(self) -> Result[Json]: ReplicationApplierConfigError: If retrieval fails. References: - - `get-the-replication-applier-configuration `__ + - `get-the-replication-applier-configuration `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -229,7 +229,7 @@ async def applier_state(self) -> Result[Json]: ReplicationApplierStateError: If retrieval fails. References: - - `get-the-replication-applier-state `__ + - `get-the-replication-applier-state `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -254,7 +254,7 @@ async def server_id(self) -> Result[str]: ReplicationServerIDError: If retrieval fails. References: - - `get-the-replication-server-id `__ + - `get-the-replication-server-id `__ """ # noqa: E501 request = Request( method=Method.GET, diff --git a/arangoasync/typings.py b/arangoasync/typings.py index 0d85035..d6adb4d 100644 --- a/arangoasync/typings.py +++ b/arangoasync/typings.py @@ -223,7 +223,7 @@ class KeyOptions(JsonWrapper): } References: - - `create-a-collection `__ + - `create-a-collection `__ """ # noqa: E501 def __init__( @@ -310,7 +310,7 @@ class CollectionInfo(JsonWrapper): } References: - - `get-the-collection-information `__ + - `get-the-collection-information `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -384,7 +384,7 @@ class UserInfo(JsonWrapper): } References: - - `create-a-user `__ + - `create-a-user `__ """ # noqa: E501 def __init__( @@ -484,7 +484,7 @@ class ServerStatusInformation(JsonWrapper): } References: - - `get-server-status-information `__ + - `get-server-status-information `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -543,7 +543,7 @@ class DatabaseProperties(JsonWrapper): """Properties of the database. References: - - `get-information-about-the-current-database `__ + - `get-information-about-the-current-database `__ """ # noqa: E501 def __init__(self, data: Json, strip_result: bool = False) -> None: @@ -650,7 +650,7 @@ class CollectionProperties(JsonWrapper): } References: - - `get-the-properties-of-a-collection `__ + - `get-the-properties-of-a-collection `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -870,7 +870,7 @@ class CollectionStatistics(JsonWrapper): } References: - - `get-the-collection-statistics `__ + - `get-the-collection-statistics `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -976,7 +976,7 @@ class IndexProperties(JsonWrapper): } References: - - `get-an-index `__ + - `get-an-index `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -1253,7 +1253,7 @@ class QueryProperties(JsonWrapper): } References: - - `create-a-cursor `__ + - `create-a-cursor `__ """ # noqa: E501 def __init__( @@ -1414,7 +1414,7 @@ class QueryExecutionPlan(JsonWrapper): """The execution plan of an AQL query. References: - - `plan `__ + - `plan `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -1468,7 +1468,7 @@ class QueryExecutionProfile(JsonWrapper): } References: - - `profile `__ + - `profile `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -1536,7 +1536,7 @@ class QueryExecutionStats(JsonWrapper): } References: - - `stats `__ + - `stats `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -1615,7 +1615,7 @@ class QueryExecutionExtra(JsonWrapper): """Extra information about the query result. References: - - `extra `__ + - `extra `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -1659,7 +1659,7 @@ class QueryTrackingConfiguration(JsonWrapper): } References: - - `get-the-aql-query-tracking-configuration `__ + - `get-the-aql-query-tracking-configuration `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -1718,7 +1718,7 @@ class QueryExplainOptions(JsonWrapper): } References: - - `explain-an-aql-query `__ + - `explain-an-aql-query `__ """ # noqa: E501 def __init__( @@ -1764,8 +1764,8 @@ class QueryCacheProperties(JsonWrapper): } References: - - `get-the-aql-query-results-cache-configuration `__ - - `set-the-aql-query-results-cache-configuration `__ + - `get-the-aql-query-results-cache-configuration `__ + - `set-the-aql-query-results-cache-configuration `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -1818,9 +1818,9 @@ class GraphProperties(JsonWrapper): } References: - - `get-a-graph `__ - - `list-all-graphs `__ - - `create-a-graph `__ + - `get-a-graph `__ + - `list-all-graphs `__ + - `create-a-graph `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -1927,7 +1927,7 @@ class GraphOptions(JsonWrapper): graph. References: - - `create-a-graph `__ + - `create-a-graph `__ """ # noqa: E501 def __init__( @@ -1982,7 +1982,7 @@ class VertexCollectionOptions(JsonWrapper): be a string and a valid collection name. References: - - `add-a-vertex-collection `__ + - `add-a-vertex-collection `__ """ # noqa: E501 def __init__( @@ -2009,7 +2009,7 @@ class EdgeDefinitionOptions(JsonWrapper): be a string and a valid collection name. References: - - `add-an-edge-definition `__ + - `add-an-edge-definition `__ """ # noqa: E501 def __init__( diff --git a/docs/analyzer.rst b/docs/analyzer.rst index cd92018..851ab02 100644 --- a/docs/analyzer.rst +++ b/docs/analyzer.rst @@ -3,7 +3,7 @@ Analyzers For more information on analyzers, refer to `ArangoDB Manual`_. -.. _ArangoDB Manual: https://docs.arangodb.com +.. _ArangoDB Manual: https://docs.arango.ai **Example:** diff --git a/docs/aql.rst b/docs/aql.rst index 69a9bf6..97d4f6c 100644 --- a/docs/aql.rst +++ b/docs/aql.rst @@ -7,7 +7,7 @@ operations such as creating or deleting :doc:`databases `, :doc:`collections ` or :doc:`indexes `. For more information, refer to `ArangoDB Manual`_. -.. _ArangoDB Manual: https://docs.arangodb.com +.. _ArangoDB Manual: https://docs.arango.ai AQL Queries =========== diff --git a/docs/backup.rst b/docs/backup.rst index de36041..93085f0 100644 --- a/docs/backup.rst +++ b/docs/backup.rst @@ -5,7 +5,7 @@ Hot Backups are near instantaneous consistent snapshots of an entire ArangoDB de This includes all databases, collections, indexes, Views, graphs, and users at any given time. For more information, refer to `ArangoDB Manual`_. -.. _ArangoDB Manual: https://docs.arangodb.com +.. _ArangoDB Manual: https://docs.arango.ai **Example:** diff --git a/docs/certificates.rst b/docs/certificates.rst index ee49e13..c0ba7af 100644 --- a/docs/certificates.rst +++ b/docs/certificates.rst @@ -129,4 +129,4 @@ See the `ArangoDB Manual`_ for more information on security features. # Reload TLS data tls = await db.reload_tls() -.. _ArangoDB Manual: https://docs.arangodb.com/stable/develop/http-api/security/ +.. _ArangoDB Manual: https://docs.arango.ai/stable/develop/http-api/security/ diff --git a/docs/cluster.rst b/docs/cluster.rst index c5e58aa..d5c4908 100644 --- a/docs/cluster.rst +++ b/docs/cluster.rst @@ -6,7 +6,7 @@ cluster nodes and the cluster as a whole, as well as monitor and administrate cluster deployments. For more information on the design and architecture, refer to `ArangoDB Manual`_. -.. _ArangoDB Manual: https://docs.arangodb.com +.. _ArangoDB Manual: https://docs.arango.ai .. code-block:: python diff --git a/docs/document.rst b/docs/document.rst index da6434b..09b87e0 100644 --- a/docs/document.rst +++ b/docs/document.rst @@ -23,7 +23,7 @@ For more information on documents and associated terminologies, refer to `ArangoDB Manual`_. Here is an example of a valid document in "students" collection: -.. _ArangoDB Manual: https://docs.arangodb.com +.. _ArangoDB Manual: https://docs.arango.ai .. code-block:: json diff --git a/docs/foxx.rst b/docs/foxx.rst index 818c80e..91e3423 100644 --- a/docs/foxx.rst +++ b/docs/foxx.rst @@ -4,7 +4,7 @@ Foxx **Foxx** is a microservice framework which lets you define custom HTTP endpoints that extend ArangoDB's REST API. For more information, refer to `ArangoDB Manual`_. -.. _ArangoDB Manual: https://docs.arangodb.com +.. _ArangoDB Manual: https://docs.arango.ai **Example:** diff --git a/docs/graph.rst b/docs/graph.rst index 0f0bbbf..b2c2467 100644 --- a/docs/graph.rst +++ b/docs/graph.rst @@ -7,7 +7,7 @@ A **graph** consists of vertices and edges. Vertices are stored as documents in their relations are specified with :ref:`edge definitions `. For more information, refer to `ArangoDB Manual`_. -.. _ArangoDB Manual: https://docs.arangodb.com +.. _ArangoDB Manual: https://docs.arango.ai **Example:** diff --git a/docs/indexes.rst b/docs/indexes.rst index 911efaa..63e2359 100644 --- a/docs/indexes.rst +++ b/docs/indexes.rst @@ -7,7 +7,7 @@ cannot be deleted or modified. Every edge collection has additional indexes on fields ``_from`` and ``_to``. For more information on indexes, refer to `ArangoDB Manual`_. -.. _ArangoDB Manual: https://docs.arangodb.com +.. _ArangoDB Manual: https://docs.arango.ai **Example:** diff --git a/docs/overview.rst b/docs/overview.rst index f723234..77c0fc7 100644 --- a/docs/overview.rst +++ b/docs/overview.rst @@ -64,7 +64,7 @@ You may also use the client without a context manager, but you must ensure to cl Another example with `graphs`_: -.. _graphs: https://docs.arangodb.com/stable/graphs/ +.. _graphs: https://docs.arango.ai/stable/graphs/ .. code-block:: python diff --git a/docs/view.rst b/docs/view.rst index f680b54..3a1ef06 100644 --- a/docs/view.rst +++ b/docs/view.rst @@ -4,7 +4,7 @@ Views All types of views are supported. . For more information on **view** management, refer to `ArangoDB Manual`_. -.. _ArangoDB Manual: https://docs.arangodb.com +.. _ArangoDB Manual: https://docs.arango.ai **Example:** @@ -63,7 +63,7 @@ management, refer to `ArangoDB Manual`_. For more information on the content of view **properties**, see `Search Alias Views`_ and `Arangosearch Views`_. -.. _Search Alias Views: https://docs.arangodb.com/stable/develop/http-api/views/search-alias-views/ -.. _Arangosearch Views: https://docs.arangodb.com/stable/develop/http-api/views/arangosearch-views/ +.. _Search Alias Views: https://docs.arango.ai/stable/develop/http-api/views/search-alias-views/ +.. _Arangosearch Views: https://docs.arango.ai/stable/develop/http-api/views/arangosearch-views/ Refer to :class:`arangoasync.database.StandardDatabase` class for API specification. From 4f7bafb05d82d68b97e39b2b22e6b55add835d43 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 20 Dec 2025 23:55:58 +0800 Subject: [PATCH 29/30] Fixing job parameters --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index c7f0fdd..cb02c17 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -87,7 +87,7 @@ jobs: fi if [ << parameters.arangodb_license >> != "enterprise" ]; then - args+=("--skip enterprise") + args+=("--skip" "enterprise") fi echo "Running pytest with args: ${args[@]}" From 6e9b9d587aaacfb0ea39cdd66518d3f4bb725439 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 21 Dec 2025 00:15:31 +0800 Subject: [PATCH 30/30] URL fix --- README.md | 2 +- arangoasync/aql.py | 38 +++---- arangoasync/backup.py | 12 +-- arangoasync/cluster.py | 24 ++--- arangoasync/collection.py | 90 ++++++++--------- arangoasync/cursor.py | 6 +- arangoasync/database.py | 198 ++++++++++++++++++------------------- arangoasync/foxx.py | 42 ++++---- arangoasync/graph.py | 38 +++---- arangoasync/job.py | 10 +- arangoasync/replication.py | 14 +-- arangoasync/typings.py | 48 ++++----- arangoasync/version.py | 2 +- docs/certificates.rst | 2 +- docs/overview.rst | 2 +- docs/view.rst | 4 +- 16 files changed, 266 insertions(+), 266 deletions(-) diff --git a/README.md b/README.md index 1232efa..e35c413 100644 --- a/README.md +++ b/README.md @@ -75,7 +75,7 @@ async def main(): student_names.append(doc["name"]) ``` -Another example with [graphs](https://docs.arango.ai/stable/graphs/): +Another example with [graphs](https://docs.arango.ai/arangodb/stable/graphs/): ```python async def main(): diff --git a/arangoasync/aql.py b/arangoasync/aql.py index ec8efe4..ea57b75 100644 --- a/arangoasync/aql.py +++ b/arangoasync/aql.py @@ -78,7 +78,7 @@ async def entries(self) -> Result[Jsons]: AQLCacheEntriesError: If retrieval fails. References: - - `list-the-entries-of-the-aql-query-results-cache `__ + - `list-the-entries-of-the-aql-query-results-cache `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/query-cache/entries") @@ -99,7 +99,7 @@ async def plan_entries(self) -> Result[Jsons]: AQLCacheEntriesError: If retrieval fails. References: - - `list-the-entries-of-the-aql-query-plan-cache `__ + - `list-the-entries-of-the-aql-query-plan-cache `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/query-plan-cache") @@ -117,7 +117,7 @@ async def clear(self) -> Result[None]: AQLCacheClearError: If clearing the cache fails. References: - - `clear-the-aql-query-results-cache `__ + - `clear-the-aql-query-results-cache `__ """ # noqa: E501 request = Request(method=Method.DELETE, endpoint="/_api/query-cache") @@ -134,7 +134,7 @@ async def clear_plan(self) -> Result[None]: AQLCacheClearError: If clearing the cache fails. References: - - `clear-the-aql-query-plan-cache `__ + - `clear-the-aql-query-plan-cache `__ """ # noqa: E501 request = Request(method=Method.DELETE, endpoint="/_api/query-plan-cache") @@ -154,7 +154,7 @@ async def properties(self) -> Result[QueryCacheProperties]: AQLCachePropertiesError: If retrieval fails. References: - - `get-the-aql-query-results-cache-configuration `__ + - `get-the-aql-query-results-cache-configuration `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/query-cache/properties") @@ -193,7 +193,7 @@ async def configure( AQLCacheConfigureError: If setting the configuration fails. References: - - `set-the-aql-query-results-cache-configuration `__ + - `set-the-aql-query-results-cache-configuration `__ """ # noqa: E501 data: Json = dict() if mode is not None: @@ -298,7 +298,7 @@ async def execute( Cursor: Result cursor. References: - - `create-a-cursor `__ + - `create-a-cursor `__ """ # noqa: E501 data: Json = dict(query=query) if count is not None: @@ -353,7 +353,7 @@ async def tracking(self) -> Result[QueryTrackingConfiguration]: AQLQueryTrackingGetError: If retrieval fails. References: - - `get-the-aql-query-tracking-configuration `__ + - `get-the-aql-query-tracking-configuration `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/query/properties") @@ -397,7 +397,7 @@ async def set_tracking( AQLQueryTrackingSetError: If setting the configuration fails. References: - - `update-the-aql-query-tracking-configuration `__ + - `update-the-aql-query-tracking-configuration `__ """ # noqa: E501 data: Json = dict() @@ -462,7 +462,7 @@ async def queries(self, all_queries: bool = False) -> Result[Jsons]: AQLQueryListError: If retrieval fails. References: - - `list-the-running-queries `__ + - `list-the-running-queries `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -493,7 +493,7 @@ async def slow_queries(self, all_queries: bool = False) -> Result[Jsons]: AQLQueryListError: If retrieval fails. References: - - `list-the-slow-aql-queries `__ + - `list-the-slow-aql-queries `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -523,7 +523,7 @@ async def clear_slow_queries(self, all_queries: bool = False) -> Result[None]: AQLQueryClearError: If retrieval fails. References: - - `clear-the-list-of-slow-aql-queries `__ + - `clear-the-list-of-slow-aql-queries `__ """ # noqa: E501 request = Request( method=Method.DELETE, @@ -560,7 +560,7 @@ async def kill( AQLQueryKillError: If killing the query fails. References: - - `kill-a-running-aql-query `__ + - `kill-a-running-aql-query `__ """ # noqa: E501 request = Request( method=Method.DELETE, @@ -598,7 +598,7 @@ async def explain( AQLQueryExplainError: If retrieval fails. References: - - `explain-an-aql-query `__ + - `explain-an-aql-query `__ """ # noqa: E501 data: Json = dict(query=query) if bind_vars is not None: @@ -634,7 +634,7 @@ async def validate(self, query: str) -> Result[Json]: AQLQueryValidateError: If validation fails. References: - - `parse-an-aql-query `__ + - `parse-an-aql-query `__ """ # noqa: E501 request = Request( method=Method.POST, @@ -659,7 +659,7 @@ async def query_rules(self) -> Result[Jsons]: AQLQueryRulesGetError: If retrieval fails. References: - - `list-all-aql-optimizer-rules `__ + - `list-all-aql-optimizer-rules `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/query/rules") @@ -684,7 +684,7 @@ async def functions(self, namespace: Optional[str] = None) -> Result[Jsons]: AQLFunctionListError: If retrieval fails. References: - - `list-the-registered-user-defined-aql-functions `__ + - `list-the-registered-user-defined-aql-functions `__ """ # noqa: E501 params: Json = dict() if namespace is not None: @@ -726,7 +726,7 @@ async def create_function( AQLFunctionCreateError: If registration fails. References: - - `create-a-user-defined-aql-function `__ + - `create-a-user-defined-aql-function `__ """ # noqa: E501 request = Request( method=Method.POST, @@ -765,7 +765,7 @@ async def delete_function( AQLFunctionDeleteError: If removal fails. References: - - `remove-a-user-defined-aql-function `__ + - `remove-a-user-defined-aql-function `__ """ # noqa: E501 params: Json = dict() if group is not None: diff --git a/arangoasync/backup.py b/arangoasync/backup.py index e0847e0..7be69cd 100644 --- a/arangoasync/backup.py +++ b/arangoasync/backup.py @@ -49,7 +49,7 @@ async def get(self, backup_id: Optional[str] = None) -> Result[Json]: BackupGetError: If the operation fails. References: - - `list-backups `__ + - `list-backups `__ """ # noqa: E501 data: Json = {} if backup_id is not None: @@ -97,7 +97,7 @@ async def create( BackupCreateError: If the backup creation fails. References: - - `create-backup `__ + - `create-backup `__ """ # noqa: E501 data: Json = {} if label is not None: @@ -137,7 +137,7 @@ async def restore(self, backup_id: str) -> Result[Json]: BackupRestoreError: If the restore operation fails. References: - - `restore-backup `__ + - `restore-backup `__ """ # noqa: E501 data: Json = {"id": backup_id} request = Request( @@ -165,7 +165,7 @@ async def delete(self, backup_id: str) -> None: BackupDeleteError: If the delete operation fails. References: - - `delete-backup `__ + - `delete-backup `__ """ # noqa: E501 data: Json = {"id": backup_id} request = Request( @@ -209,7 +209,7 @@ async def upload( BackupUploadError: If upload operation fails. References: - - `upload-a-backup-to-a-remote-repository `__ + - `upload-a-backup-to-a-remote-repository `__ """ # noqa: E501 data: Json = {} if upload_id is not None: @@ -265,7 +265,7 @@ async def download( BackupDownloadError: If the download operation fails. References: - - `download-a-backup-from-a-remote-repository `__ + - `download-a-backup-from-a-remote-repository `__ """ # noqa: E501 data: Json = {} if download_id is not None: diff --git a/arangoasync/cluster.py b/arangoasync/cluster.py index 39e3d56..fa42ea3 100644 --- a/arangoasync/cluster.py +++ b/arangoasync/cluster.py @@ -45,7 +45,7 @@ async def health(self) -> Result[Json]: ClusterHealthError: If retrieval fails. References: - - `get-the-cluster-health `__ + - `get-the-cluster-health `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -74,7 +74,7 @@ async def statistics(self, db_server: str) -> Result[Json]: ClusterStatisticsError: If retrieval fails. References: - - `get-the-statistics-of-a-db-server `__ + - `get-the-statistics-of-a-db-server `__ """ # noqa: E501 params: Params = {"DBserver": db_server} @@ -103,7 +103,7 @@ async def endpoints(self) -> Result[List[str]]: ClusterEndpointsError: If retrieval fails. References: - - `list-all-coordinator-endpoints `__ + - `list-all-coordinator-endpoints `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -129,7 +129,7 @@ async def server_id(self) -> Result[str]: ClusterServerIDError: If retrieval fails. References: - - `get-the-server-id `__ + - `get-the-server-id `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -154,7 +154,7 @@ async def server_role(self) -> Result[str]: ClusterServerRoleError: If retrieval fails. References: - - `get-the-server-role `__ + - `get-the-server-role `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -182,7 +182,7 @@ async def toggle_maintenance_mode(self, mode: str) -> Result[Json]: ClusterMaintenanceModeError: If the toggle operation fails. References: - - `toggle-cluster-maintenance-mode `__ + - `toggle-cluster-maintenance-mode `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -212,7 +212,7 @@ async def server_maintenance_mode(self, server_id: str) -> Result[Json]: ClusterMaintenanceModeError: If retrieval fails. References: - - `get-the-maintenance-status-of-a-db-server `__ + - `get-the-maintenance-status-of-a-db-server `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -242,7 +242,7 @@ async def toggle_server_maintenance_mode( ClusterMaintenanceModeError: If the operation fails. References: - - `set-the-maintenance-status-of-a-db-server `__ + - `set-the-maintenance-status-of-a-db-server `__ """ # noqa: E501 data: Json = {"mode": mode} if timeout is not None: @@ -271,7 +271,7 @@ async def calculate_imbalance(self) -> Result[Json]: ClusterRebalanceError: If retrieval fails. References: - - `get-the-current-cluster-imbalance `__ + - `get-the-current-cluster-imbalance `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/cluster/rebalance") @@ -315,7 +315,7 @@ async def calculate_rebalance_plan( ClusterRebalanceError: If retrieval fails. References: - - `compute-a-set-of-move-shard-operations-to-improve-balance `__ + - `compute-a-set-of-move-shard-operations-to-improve-balance `__ """ # noqa: E501 data: Json = dict(version=version) if databases_excluded is not None: @@ -380,7 +380,7 @@ async def rebalance( ClusterRebalanceError: If retrieval fails. References: - - `compute-and-execute-a-set-of-move-shard-operations-to-improve-balance `__ + - `compute-and-execute-a-set-of-move-shard-operations-to-improve-balance `__ """ # noqa: E501 data: Json = dict(version=version) if databases_excluded is not None: @@ -431,7 +431,7 @@ async def execute_rebalance_plan( ClusterRebalanceError: If the execution fails. References: - - `execute-a-set-of-move-shard-operations `__ + - `execute-a-set-of-move-shard-operations `__ """ # noqa: E501 data: Json = dict(version=version, moves=moves) diff --git a/arangoasync/collection.py b/arangoasync/collection.py index fae501a..cc372bf 100644 --- a/arangoasync/collection.py +++ b/arangoasync/collection.py @@ -333,7 +333,7 @@ async def indexes( IndexListError: If retrieval fails. References: - - `list-all-indexes-of-a-collection `__ + - `list-all-indexes-of-a-collection `__ """ # noqa: E501 params: Params = dict(collection=self._name) if with_stats is not None: @@ -368,7 +368,7 @@ async def get_index(self, id: str | int) -> Result[IndexProperties]: IndexGetError: If retrieval fails. References: - `get-an-index `__ + `get-an-index `__ """ # noqa: E501 if isinstance(id, int): full_id = f"{self._name}/{id}" @@ -408,12 +408,12 @@ async def add_index( IndexCreateError: If index creation fails. References: - - `create-an-index `__ - - `create-a-persistent-index `__ - - `create-an-inverted-index `__ - - `create-a-ttl-index `__ - - `create-a-multi-dimensional-index `__ - - `create-a-geo-spatial-index `__ + - `create-an-index `__ + - `create-a-persistent-index `__ + - `create-an-inverted-index `__ + - `create-a-ttl-index `__ + - `create-a-multi-dimensional-index `__ + - `create-a-geo-spatial-index `__ """ # noqa: E501 options = options or {} request = Request( @@ -447,7 +447,7 @@ async def delete_index( IndexDeleteError: If deletion fails. References: - - `delete-an-index `__ + - `delete-an-index `__ """ # noqa: E501 if isinstance(id, int): full_id = f"{self._name}/{id}" @@ -478,7 +478,7 @@ async def load_indexes(self) -> Result[bool]: IndexLoadError: If loading fails. References: - - `load-collection-indexes-into-memory `__ + - `load-collection-indexes-into-memory `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -499,7 +499,7 @@ async def recalculate_count(self) -> None: CollectionRecalculateCountError: If re-calculation fails. References: - - `recalculate-the-document-count-of-a-collection `__ + - `recalculate-the-document-count-of-a-collection `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -522,7 +522,7 @@ async def properties(self) -> Result[CollectionProperties]: CollectionPropertiesError: If retrieval fails. References: - - `get-the-properties-of-a-collection `__ + - `get-the-properties-of-a-collection `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -572,7 +572,7 @@ async def configure( CollectionConfigureError: If configuration fails. References: - - `change-the-properties-of-a-collection `__ + - `change-the-properties-of-a-collection `__ """ # noqa: E501 data: Json = {} if cache_enabled is not None: @@ -617,7 +617,7 @@ async def rename(self, new_name: str) -> None: CollectionRenameError: If rename fails. References: - - `rename-a-collection `__ + - `rename-a-collection `__ """ # noqa: E501 data: Json = {"name": new_name} request = Request( @@ -644,7 +644,7 @@ async def compact(self) -> Result[CollectionInfo]: CollectionCompactError: If compaction fails. References: - - `compact-a-collection `__ + - `compact-a-collection `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -677,7 +677,7 @@ async def truncate( CollectionTruncateError: If truncation fails. References: - - `truncate-a-collection `__ + - `truncate-a-collection `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -707,7 +707,7 @@ async def count(self) -> Result[int]: DocumentCountError: If retrieval fails. References: - - `get-the-document-count-of-a-collection `__ + - `get-the-document-count-of-a-collection `__ """ # noqa: E501 request = Request( method=Method.GET, endpoint=f"/_api/collection/{self.name}/count" @@ -731,7 +731,7 @@ async def statistics(self) -> Result[CollectionStatistics]: CollectionStatisticsError: If retrieval fails. References: - - `get-the-collection-statistics `__ + - `get-the-collection-statistics `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -761,7 +761,7 @@ async def responsible_shard(self, document: Json) -> Result[str]: CollectionResponsibleShardError: If retrieval fails. References: - - `get-the-responsible-shard-for-a-document `__ + - `get-the-responsible-shard-for-a-document `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -793,7 +793,7 @@ async def shards(self, details: Optional[bool] = None) -> Result[Json]: CollectionShardsError: If retrieval fails. References: - - `get-the-shard-ids-of-a-collection `__ + - `get-the-shard-ids-of-a-collection `__ """ # noqa: E501 params: Params = {} if details is not None: @@ -822,7 +822,7 @@ async def revision(self) -> Result[str]: CollectionRevisionError: If retrieval fails. References: - - `get-the-collection-revision-id `__ + - `get-the-collection-revision-id `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -852,7 +852,7 @@ async def checksum( CollectionChecksumError: If retrieval fails. References: - - `get-the-collection-checksum `__ + - `get-the-collection-checksum `__ """ # noqa: E501 params: Params = {} if with_rev is not None: @@ -899,7 +899,7 @@ async def has( DocumentGetError: If retrieval fails. References: - - `get-a-document-header `__ + - `get-a-document-header `__ """ # noqa: E501 handle = self._get_doc_id(document) @@ -956,7 +956,7 @@ async def get_many( DocumentGetError: If retrieval fails. References: - - `get-multiple-documents `__ + - `get-multiple-documents `__ """ # noqa: E501 params: Params = {"onlyget": True} if ignore_revs is not None: @@ -1283,7 +1283,7 @@ async def insert_many( DocumentInsertError: If insertion fails. References: - - `create-multiple-documents `__ + - `create-multiple-documents `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -1373,7 +1373,7 @@ async def replace_many( DocumentReplaceError: If replacing fails. References: - - `replace-multiple-documents `__ + - `replace-multiple-documents `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -1466,7 +1466,7 @@ async def update_many( DocumentUpdateError: If update fails. References: - - `update-multiple-documents `__ + - `update-multiple-documents `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -1548,7 +1548,7 @@ async def delete_many( DocumentRemoveError: If removal fails. References: - - `remove-multiple-documents `__ + - `remove-multiple-documents `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -1640,7 +1640,7 @@ async def import_bulk( DocumentInsertError: If import fails. References: - - `import-json-data-as-documents `__ + - `import-json-data-as-documents `__ """ # noqa: E501 params: Params = dict() params["collection"] = self.name @@ -1730,7 +1730,7 @@ async def get( DocumentParseError: If the document is malformed. References: - - `get-a-document `__ + - `get-a-document `__ """ # noqa: E501 handle = self._get_doc_id(document) @@ -1818,7 +1818,7 @@ async def insert( DocumentParseError: If the document is malformed. References: - - `create-a-document `__ + - `create-a-document `__ """ # noqa: E501 if isinstance(document, dict): document = cast(T, self._ensure_key_from_id(document)) @@ -1923,7 +1923,7 @@ async def update( DocumentUpdateError: If update fails. References: - - `update-a-document `__ + - `update-a-document `__ """ # noqa: E501 params: Params = {} if ignore_revs is not None: @@ -2017,7 +2017,7 @@ async def replace( DocumentReplaceError: If replace fails. References: - - `replace-a-document `__ + - `replace-a-document `__ """ # noqa: E501 params: Params = {} if ignore_revs is not None: @@ -2105,7 +2105,7 @@ async def delete( DocumentDeleteError: If deletion fails. References: - - `remove-a-document `__ + - `remove-a-document `__ """ # noqa: E501 handle = self._get_doc_id(cast(str | Json, document)) @@ -2232,7 +2232,7 @@ async def get( DocumentParseError: If the document is malformed. References: - - `get-a-vertex `__ + - `get-a-vertex `__ """ # noqa: E501 handle = self._get_doc_id(vertex) @@ -2294,7 +2294,7 @@ async def insert( DocumentParseError: If the document is malformed. References: - - `create-a-vertex `__ + - `create-a-vertex `__ """ # noqa: E501 if isinstance(vertex, dict): vertex = cast(T, self._ensure_key_from_id(vertex)) @@ -2359,7 +2359,7 @@ async def update( DocumentUpdateError: If update fails. References: - - `update-a-vertex `__ + - `update-a-vertex `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -2434,7 +2434,7 @@ async def replace( DocumentReplaceError: If replace fails. References: - - `replace-a-vertex `__ + - `replace-a-vertex `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -2506,7 +2506,7 @@ async def delete( DocumentDeleteError: If deletion fails. References: - - `remove-a-vertex `__ + - `remove-a-vertex `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -2631,7 +2631,7 @@ async def get( DocumentParseError: If the document is malformed. References: - - `get-an-edge `__ + - `get-an-edge `__ """ # noqa: E501 handle = self._get_doc_id(edge) @@ -2694,7 +2694,7 @@ async def insert( DocumentParseError: If the document is malformed. References: - - `create-an-edge `__ + - `create-an-edge `__ """ # noqa: E501 if isinstance(edge, dict): edge = cast(T, self._ensure_key_from_id(edge)) @@ -2763,7 +2763,7 @@ async def update( DocumentUpdateError: If update fails. References: - - `update-an-edge `__ + - `update-an-edge `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -2842,7 +2842,7 @@ async def replace( DocumentReplaceError: If replace fails. References: - - `replace-an-edge `__ + - `replace-an-edge `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -2917,7 +2917,7 @@ async def delete( DocumentDeleteError: If deletion fails. References: - - `remove-an-edge `__ + - `remove-an-edge `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -2978,7 +2978,7 @@ async def edges( EdgeListError: If retrieval fails. References: - - `get-inbound-and-outbound-edges `__ + - `get-inbound-and-outbound-edges `__ """ # noqa: E501 params: Params = { "vertex": self._get_doc_id(vertex, validate=False), diff --git a/arangoasync/cursor.py b/arangoasync/cursor.py index 1e3cc6c..68ecdad 100644 --- a/arangoasync/cursor.py +++ b/arangoasync/cursor.py @@ -192,8 +192,8 @@ async def fetch(self, batch_id: Optional[str] = None) -> List[Any]: CursorStateError: If the cursor ID is not set. References: - - `read-the-next-batch-from-a-cursor `__ - - `read-a-batch-from-the-cursor-again `__ + - `read-the-next-batch-from-a-cursor `__ + - `read-a-batch-from-the-cursor-again `__ """ # noqa: E501 if self._id is None: raise CursorStateError("Cursor ID is not set") @@ -229,7 +229,7 @@ async def close(self, ignore_missing: bool = False) -> bool: CursorCloseError: If the cursor failed to close. References: - - `delete-a-cursor `__ + - `delete-a-cursor `__ """ # noqa: E501 if self._id is None: return False diff --git a/arangoasync/database.py b/arangoasync/database.py index 8e700e5..c1dc1b9 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -258,7 +258,7 @@ async def properties(self) -> Result[DatabaseProperties]: DatabasePropertiesError: If retrieval fails. References: - - `get-information-about-the-current-database `__ + - `get-information-about-the-current-database `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/database/current") @@ -281,7 +281,7 @@ async def status(self) -> Result[ServerStatusInformation]: ServerSatusError: If retrieval fails. References: - - `get-server-status-information `__ + - `get-server-status-information `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/status") @@ -305,7 +305,7 @@ async def databases(self) -> Result[List[str]]: DatabaseListError: If retrieval fails. References: - - `list-all-databases `__ + - `list-all-databases `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/database") @@ -333,7 +333,7 @@ async def databases_accessible_to_user(self) -> Result[List[str]]: DatabaseListError: If retrieval fails. References: - - `list-the-accessible-databases `__ + - `list-the-accessible-databases `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/database/user") @@ -417,7 +417,7 @@ async def create_database( DatabaseCreateError: If creation fails. References: - - `create-a-database `__ + - `create-a-database `__ """ # noqa: E501 data: Json = {"name": name} @@ -478,7 +478,7 @@ async def delete_database( DatabaseDeleteError: If deletion fails. References: - - `drop-a-database `__ + - `drop-a-database `__ """ # noqa: E501 request = Request(method=Method.DELETE, endpoint=f"/_api/database/{name}") @@ -533,7 +533,7 @@ async def collections( CollectionListError: If retrieval fails. References: - - `list-all-collections `__ + - `list-all-collections `__ """ # noqa: E501 params: Params = {} if exclude_system is not None: @@ -661,7 +661,7 @@ async def create_collection( CollectionCreateError: If the operation fails. References: - - `create-a-collection `__ + - `create-a-collection `__ """ # noqa: E501 data: Json = {"name": name} if col_type is not None: @@ -751,7 +751,7 @@ async def delete_collection( CollectionDeleteError: If the operation fails. References: - - `drop-a-collection `__ + - `drop-a-collection `__ """ # noqa: E501 params: Params = {} if is_system is not None: @@ -782,7 +782,7 @@ async def key_generators(self) -> Result[List[str]]: CollectionKeyGeneratorsError: If retrieval fails. References: - - `get-the-available-key-generators `__ + - `get-the-available-key-generators `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/key-generators") @@ -821,7 +821,7 @@ async def has_document( DocumentGetError: If retrieval fails. References: - - `get-a-document-header `__ + - `get-a-document-header `__ """ # noqa: E501 col = Collection.get_col_name(document) return await self.collection(col).has( @@ -858,7 +858,7 @@ async def document( DocumentParseError: If the document is malformed. References: - - `get-a-document `__ + - `get-a-document `__ """ # noqa: E501 col: StandardCollection[Json, Json, Jsons] = self.collection( Collection.get_col_name(document) @@ -927,7 +927,7 @@ async def insert_document( DocumentParseError: If the document is malformed. References: - - `create-a-document `__ + - `create-a-document `__ """ # noqa: E501 col: StandardCollection[Json, Json, Jsons] = self.collection(collection) return await col.insert( @@ -998,7 +998,7 @@ async def update_document( DocumentUpdateError: If update fails. References: - - `update-a-document `__ + - `update-a-document `__ """ # noqa: E501 col: StandardCollection[Json, Json, Jsons] = self.collection( Collection.get_col_name(document) @@ -1063,7 +1063,7 @@ async def replace_document( DocumentReplaceError: If replace fails. References: - - `replace-a-document `__ + - `replace-a-document `__ """ # noqa: E501 col: StandardCollection[Json, Json, Jsons] = self.collection( Collection.get_col_name(document) @@ -1124,7 +1124,7 @@ async def delete_document( DocumentDeleteError: If deletion fails. References: - - `remove-a-document `__ + - `remove-a-document `__ """ # noqa: E501 col: StandardCollection[Json, Json, Jsons] = self.collection( Collection.get_col_name(document) @@ -1198,7 +1198,7 @@ async def graphs(self) -> Result[List[GraphProperties]]: GraphListError: If the operation fails. References: - - `list-all-graphs `__ + - `list-all-graphs `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/gharial") @@ -1253,7 +1253,7 @@ async def create_graph( GraphCreateError: If the operation fails. References: - - `create-a-graph `__ + - `create-a-graph `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: @@ -1315,7 +1315,7 @@ async def delete_graph( GraphDeleteError: If the operation fails. References: - - `drop-a-graph `__ + - `drop-a-graph `__ """ # noqa: E501 params: Params = {} if drop_collections is not None: @@ -1347,8 +1347,8 @@ async def view(self, name: str) -> Result[Json]: ViewGetError: If the operation fails. References: - - `read-properties-of-a-view `__ - - `get-the-properties-of-a-view `__ + - `read-properties-of-a-view `__ + - `get-the-properties-of-a-view `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint=f"/_api/view/{name}/properties") @@ -1372,8 +1372,8 @@ async def view_info(self, name: str) -> Result[Json]: ViewGetError: If the operation fails. References: - - `get-information-about-a-view `_ - - `get-information-about-a-view `__ + - `get-information-about-a-view `_ + - `get-information-about-a-view `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint=f"/_api/view/{name}") @@ -1394,8 +1394,8 @@ async def views(self) -> Result[Jsons]: ViewListError: If the operation fails. References: - - `list-all-views `__ - - `list-all-views `__ + - `list-all-views `__ + - `list-all-views `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/view") @@ -1427,8 +1427,8 @@ async def create_view( ViewCreateError: If the operation fails. References: - - `create-a-search-alias-view `__ - - `create-an-arangosearch-view `__ + - `create-a-search-alias-view `__ + - `create-an-arangosearch-view `__ """ # noqa: E501 data: Json = {"name": name, "type": view_type} if properties is not None: @@ -1461,8 +1461,8 @@ async def replace_view(self, name: str, properties: Json) -> Result[Json]: ViewReplaceError: If the operation fails. References: - - `replace-the-properties-of-a-search-alias-view `__ - - `replace-the-properties-of-an-arangosearch-view `__ + - `replace-the-properties-of-a-search-alias-view `__ + - `replace-the-properties-of-an-arangosearch-view `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -1491,8 +1491,8 @@ async def update_view(self, name: str, properties: Json) -> Result[Json]: ViewUpdateError: If the operation fails. References: - - `update-the-properties-of-a-search-alias-view `__ - - `update-the-properties-of-an-arangosearch-view `__ + - `update-the-properties-of-a-search-alias-view `__ + - `update-the-properties-of-an-arangosearch-view `__ """ # noqa: E501 request = Request( method=Method.PATCH, @@ -1518,8 +1518,8 @@ async def rename_view(self, name: str, new_name: str) -> None: ViewRenameError: If the operation fails. References: - - `rename-a-view `__ - - `rename-a-view `__ + - `rename-a-view `__ + - `rename-a-view `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -1551,8 +1551,8 @@ async def delete_view( ViewDeleteError: If the operation fails. References: - - `drop-a-view `__ - - `drop-a-view `__ + - `drop-a-view `__ + - `drop-a-view `__ """ # noqa: E501 request = Request(method=Method.DELETE, endpoint=f"/_api/view/{name}") @@ -1575,7 +1575,7 @@ async def analyzers(self) -> Result[Jsons]: AnalyzerListError: If the operation fails. References: - - `list-all-analyzers `__ + - `list-all-analyzers `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/analyzer") @@ -1597,7 +1597,7 @@ async def analyzer(self, name: str) -> Result[Json]: dict: Analyzer properties. References: - - `get-an-analyzer-definition `__ + - `get-an-analyzer-definition `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint=f"/_api/analyzer/{name}") @@ -1632,7 +1632,7 @@ async def create_analyzer( AnalyzerCreateError: If the operation fails. References: - - `create-an-analyzer `__ + - `create-an-analyzer `__ """ # noqa: E501 data: Json = {"name": name, "type": analyzer_type} if properties is not None: @@ -1671,7 +1671,7 @@ async def delete_analyzer( AnalyzerDeleteError: If the operation fails. References: - - `remove-an-analyzer `__ + - `remove-an-analyzer `__ """ # noqa: E501 params: Params = {} if force is not None: @@ -1728,7 +1728,7 @@ async def user(self, username: str) -> Result[UserInfo]: UserGetError: If the operation fails. References: - - `get-a-user` `__ + - `get-a-user` `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint=f"/_api/user/{username}") @@ -1757,7 +1757,7 @@ async def users(self) -> Result[Sequence[UserInfo]]: UserListError: If the operation fails. References: - - `list-available-users `__ + - `list-available-users `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/user") @@ -1792,7 +1792,7 @@ async def create_user(self, user: UserInfo | Json) -> Result[UserInfo]: await db.create_user({user="john", password="secret"}) References: - - `create-a-user `__ + - `create-a-user `__ """ # noqa: E501 if isinstance(user, dict): user = UserInfo(**user) @@ -1832,7 +1832,7 @@ async def replace_user(self, user: UserInfo | Json) -> Result[UserInfo]: UserReplaceError: If the operation fails. References: - - `replace-a-user `__ + - `replace-a-user `__ """ # noqa: E501 if isinstance(user, dict): user = UserInfo(**user) @@ -1872,7 +1872,7 @@ async def update_user(self, user: UserInfo | Json) -> Result[UserInfo]: UserUpdateError: If the operation fails. References: - - `update-a-user `__ + - `update-a-user `__ """ # noqa: E501 if isinstance(user, dict): user = UserInfo(**user) @@ -1917,7 +1917,7 @@ async def delete_user( UserDeleteError: If the operation fails. References: - - `remove-a-user `__ + - `remove-a-user `__ """ # noqa: E501 request = Request(method=Method.DELETE, endpoint=f"/_api/user/{username}") @@ -1945,7 +1945,7 @@ async def permissions(self, username: str, full: bool = True) -> Result[Json]: PermissionListError: If the operation fails. References: - - `list-a-users-accessible-databases `__ + - `list-a-users-accessible-databases `__ """ # noqa: 501 request = Request( method=Method.GET, @@ -1981,8 +1981,8 @@ async def permission( PermissionGetError: If the operation fails. References: - - `get-a-users-database-access-level `__ - - `get-a-users-collection-access-level `__ + - `get-a-users-database-access-level `__ + - `get-a-users-collection-access-level `__ """ # noqa: 501 endpoint = f"/_api/user/{username}/database/{database}" if collection is not None: @@ -2022,8 +2022,8 @@ async def update_permission( is `False`. References: - - `set-a-users-database-access-level `__ - - `set-a-users-collection-access-level `__ + - `set-a-users-database-access-level `__ + - `set-a-users-collection-access-level `__ """ # noqa: E501 endpoint = f"/_api/user/{username}/database/{database}" if collection is not None: @@ -2067,8 +2067,8 @@ async def reset_permission( is `False`. References: - - `clear-a-users-database-access-level `__ - - `clear-a-users-collection-access-level `__ + - `clear-a-users-database-access-level `__ + - `clear-a-users-collection-access-level `__ """ # noqa: E501 endpoint = f"/_api/user/{username}/database/{database}" if collection is not None: @@ -2098,7 +2098,7 @@ async def jwt_secrets(self) -> Result[Json]: JWTSecretListError: If the operation fails. References: - - `get-information-about-the-loaded-jwt-secrets `__ + - `get-information-about-the-loaded-jwt-secrets `__ """ # noqa: 501 request = Request(method=Method.GET, endpoint="/_admin/server/jwt") @@ -2120,7 +2120,7 @@ async def reload_jwt_secrets(self) -> Result[Json]: JWTSecretReloadError: If the operation fails. References: - - `hot-reload-the-jwt-secrets-from-disk `__ + - `hot-reload-the-jwt-secrets-from-disk `__ """ # noqa: 501 request = Request( method=Method.POST, endpoint="/_admin/server/jwt", prefix_needed=False @@ -2155,7 +2155,7 @@ async def create_access_token( AccessTokenCreateError: If the operation fails. References: - - `create-an-access-token `__ + - `create-an-access-token `__ """ # noqa: E501 data: Json = { "name": name, @@ -2187,7 +2187,7 @@ async def delete_access_token(self, user: str, token_id: int) -> None: AccessTokenDeleteError: If the operation fails. References: - - `delete-an-access-token `__ + - `delete-an-access-token `__ """ # noqa: E501 request = Request( method=Method.DELETE, endpoint=f"/_api/token/{user}/{token_id}" @@ -2212,7 +2212,7 @@ async def list_access_tokens(self, user: str) -> Result[Jsons]: AccessTokenListError: If the operation fails. References: - - `list-all-access-tokens `__ + - `list-all-access-tokens `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint=f"/_api/token/{user}") @@ -2238,7 +2238,7 @@ async def tls(self) -> Result[Json]: ServerTLSError: If the operation fails. References: - - `get-the-tls-data `__ + - `get-the-tls-data `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/server/tls") @@ -2262,7 +2262,7 @@ async def reload_tls(self) -> Result[Json]: ServerTLSReloadError: If the operation fails. References: - - `reload-the-tls-data `__ + - `reload-the-tls-data `__ """ # noqa: E501 request = Request(method=Method.POST, endpoint="/_admin/server/tls") @@ -2287,7 +2287,7 @@ async def encryption(self) -> Result[Json]: ServerEncryptionError: If the operation fails. References: - - `rotate-the-encryption-keys `__ + - `rotate-the-encryption-keys `__ """ # noqa: E501 request = Request(method=Method.POST, endpoint="/_admin/server/encryption") @@ -2360,7 +2360,7 @@ async def execute_transaction( TransactionExecuteError: If the operation fails on the server side. References: - - `execute-a-javascript-transaction `__ + - `execute-a-javascript-transaction `__ """ # noqa: 501 m = "JavaScript Transactions are deprecated from ArangoDB v3.12.0 onward and will be removed in a future version." # noqa: E501 warn(m, DeprecationWarning, stacklevel=2) @@ -2411,7 +2411,7 @@ async def version(self, details: bool = False) -> Result[Json]: ServerVersionError: If the operation fails on the server side. References: - - `get-the-server-version `__ + - `get-the-server-version `__ """ # noqa: E501 request = Request( method=Method.GET, endpoint="/_api/version", params={"details": details} @@ -2434,7 +2434,7 @@ async def tasks(self) -> Result[Jsons]: TaskListError: If the list cannot be retrieved. References: - - `list-all-tasks `__ + - `list-all-tasks `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/tasks") @@ -2459,7 +2459,7 @@ async def task(self, task_id: str) -> Result[Json]: TaskGetError: If the task details cannot be retrieved. References: - - `get-a-task `__ + - `get-a-task `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint=f"/_api/tasks/{task_id}") @@ -2499,8 +2499,8 @@ async def create_task( TaskCreateError: If the task cannot be created. References: - - `create-a-task `__ - - `create-a-task-with-id `__ + - `create-a-task `__ + - `create-a-task-with-id `__ """ # noqa: E501 data: Json = {"command": command} if name is not None: @@ -2553,7 +2553,7 @@ async def delete_task( TaskDeleteError: If the operation fails. References: - - `delete-a-task `__ + - `delete-a-task `__ """ # noqa: E501 request = Request(method=Method.DELETE, endpoint=f"/_api/tasks/{task_id}") @@ -2576,7 +2576,7 @@ async def engine(self) -> Result[Json]: ServerEngineError: If the operation fails. References: - - `get-the-storage-engine-type `__ + - `get-the-storage-engine-type `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_api/engine") @@ -2598,7 +2598,7 @@ async def time(self) -> Result[datetime]: ServerTimeError: If the operation fails. References: - - `get-the-system-time `__ + - `get-the-system-time `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/time") @@ -2621,7 +2621,7 @@ async def check_availability(self) -> Result[str]: ServerCheckAvailabilityError: If the operation fails. References: - - `check-server-availability `__ + - `check-server-availability `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -2649,7 +2649,7 @@ async def support_info(self) -> Result[Json]: DatabaseSupportInfoError: If the operation fails. References: - - `get-information-about-the-deployment `__ + - `get-information-about-the-deployment `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/support-info") @@ -2672,7 +2672,7 @@ async def options(self) -> Result[Json]: ServerCurrentOptionsGetError: If the operation fails. References: - - `get-the-startup-option-configuration `__ + - `get-the-startup-option-configuration `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/options") @@ -2694,7 +2694,7 @@ async def options_available(self) -> Result[Json]: ServerAvailableOptionsGetError: If the operation fails. References: - - `get-the-available-startup-options `__ + - `get-the-available-startup-options `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/options-description") @@ -2716,7 +2716,7 @@ async def mode(self) -> Result[str]: ServerModeError: If the operation fails. References: - - `return-whether-or-not-a-server-is-in-read-only-mode `__ + - `return-whether-or-not-a-server-is-in-read-only-mode `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/server/mode") @@ -2740,7 +2740,7 @@ async def set_mode(self, mode: str) -> Result[str]: ServerModeSetError: If the operation fails. References: - - `set-the-server-mode-to-read-only-or-default `__ + - `set-the-server-mode-to-read-only-or-default `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -2766,7 +2766,7 @@ async def license(self) -> Result[Json]: ServerLicenseGetError: If the operation fails. References: - - `get-information-about-the-current-license `__ + - `get-information-about-the-current-license `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/license") @@ -2790,7 +2790,7 @@ async def set_license(self, license: str, force: Optional[bool] = False) -> None ServerLicenseSetError: If the operation fails. References: - - `set-a-new-license `__ + - `set-a-new-license `__ """ # noqa: E501 params: Params = {} if force is not None: @@ -2819,7 +2819,7 @@ async def shutdown(self, soft: Optional[bool] = None) -> None: ServerShutdownError: If the operation fails. References: - - `start-the-shutdown-sequence `__ + - `start-the-shutdown-sequence `__ """ # noqa: E501 params: Params = {} if soft is not None: @@ -2847,7 +2847,7 @@ async def shutdown_progress(self) -> Result[Json]: ServerShutdownProgressError: If the operation fails. References: - - `query-the-soft-shutdown-progress `__ + - `query-the-soft-shutdown-progress `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint="/_admin/shutdown") @@ -2884,7 +2884,7 @@ async def compact( DatabaseCompactError: If the operation fails. References: - - `compact-all-databases `__ + - `compact-all-databases `__ """ # noqa: E501 data = {} if change_level is not None: @@ -2911,7 +2911,7 @@ async def reload_routing(self) -> None: ServerReloadRoutingError: If the operation fails. References: - - `reload-the-routing-table `__ + - `reload-the-routing-table `__ """ # noqa: E501 request = Request(method=Method.POST, endpoint="/_admin/routing/reload") @@ -2934,7 +2934,7 @@ async def echo(self, body: Optional[Json] = None) -> Result[Json]: ServerEchoError: If the operation fails. References: - - `echo-a-request `__ + - `echo-a-request `__ """ # noqa: E501 data = body if body is not None else {} request = Request(method=Method.POST, endpoint="/_admin/echo", data=data) @@ -2960,7 +2960,7 @@ async def execute(self, command: str) -> Result[Any]: ServerExecuteError: If the execution fails. References: - - `execute-a-script `__ + - `execute-a-script `__ """ # noqa: E501 request = Request( method=Method.POST, endpoint="/_admin/execute", data=command.encode("utf-8") @@ -3002,7 +3002,7 @@ async def metrics(self, server_id: Optional[str] = None) -> Result[str]: ServerMetricsError: If the operation fails. References: - - `metrics-api-v2 `__ + - `metrics-api-v2 `__ """ # noqa: E501 params: Params = {} if server_id is not None: @@ -3058,7 +3058,7 @@ async def read_log_entries( ServerReadLogError: If the operation fails. References: - - `get-the-global-server-logs `__ + - `get-the-global-server-logs `__ """ # noqa: E501 params: Params = {} if upto is not None: @@ -3110,7 +3110,7 @@ async def log_levels( ServerLogLevelError: If the operation fails. References: - - `get-the-server-log-levels `__ + - `get-the-server-log-levels `__ """ # noqa: E501 params: Params = {} if server_id is not None: @@ -3166,7 +3166,7 @@ async def set_log_levels( ServerLogLevelSetError: If the operation fails. References: - - `set-the-structured-log-settings `__ + - `set-the-structured-log-settings `__ """ # noqa: E501 params: Params = {} if server_id is not None: @@ -3207,7 +3207,7 @@ async def reset_log_levels(self, server_id: Optional[str] = None) -> Result[Json ServerLogLevelResetError: If the operation fails. References: - - `reset-the-server-log-levels `__ + - `reset-the-server-log-levels `__ """ # noqa: E501 params: Params = {} if server_id is not None: @@ -3238,7 +3238,7 @@ async def log_settings(self) -> Result[Json]: ServerLogSettingError: If the operation fails. References: - - `get-the-structured-log-settings `__ + - `get-the-structured-log-settings `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -3279,7 +3279,7 @@ async def set_log_settings(self, **kwargs: Dict[str, Any]) -> Result[Json]: ServerLogSettingSetError: If the operation fails. References: - - `set-the-structured-log-settings `__ + - `set-the-structured-log-settings `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -3306,7 +3306,7 @@ async def api_calls(self) -> Result[Json]: ServerApiCallsError: If the operation fails. References: - - `get-recent-api-calls `__ + - `get-recent-api-calls `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -3379,7 +3379,7 @@ async def begin_transaction( TransactionInitError: If the operation fails on the server side. References: - - `begin-a-stream-transaction `__ + - `begin-a-stream-transaction `__ """ # noqa: E501 collections = dict() if read is not None: @@ -3463,7 +3463,7 @@ async def async_jobs( AsyncJobListError: If retrieval fails. References: - - `list-async-jobs-by-status-or-get-the-status-of-specific-job `__ + - `list-async-jobs-by-status-or-get-the-status-of-specific-job `__ """ # noqa: E501 params: Params = {} if count is not None: @@ -3496,7 +3496,7 @@ async def clear_async_jobs(self, threshold: Optional[float] = None) -> None: AsyncJobClearError: If the operation fails. References: - - `delete-async-job-results `__ + - `delete-async-job-results `__ """ # noqa: E501 if threshold is None: request = Request(method=Method.DELETE, endpoint="/_api/job/all") @@ -3516,7 +3516,7 @@ def response_handler(resp: Response) -> None: class TransactionDatabase(Database): """Database API tailored specifically for - `Stream Transactions `__. + `Stream Transactions `__. It allows you start a transaction, run multiple operations (eg. AQL queries) over a short period of time, and then commit or abort the transaction. @@ -3551,7 +3551,7 @@ async def transaction_status(self) -> str: TransactionStatusError: If the transaction is not found. References: - - `get-the-status-of-a-stream-transaction `__ + - `get-the-status-of-a-stream-transaction `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -3573,7 +3573,7 @@ async def commit_transaction(self) -> None: TransactionCommitError: If the operation fails on the server side. References: - - `commit-a-stream-transaction `__ + - `commit-a-stream-transaction `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -3593,7 +3593,7 @@ async def abort_transaction(self) -> None: TransactionAbortError: If the operation fails on the server side. References: - - `abort-a-stream-transaction `__ + - `abort-a-stream-transaction `__ """ # noqa: E501 request = Request( method=Method.DELETE, @@ -3620,7 +3620,7 @@ class AsyncDatabase(Database): and no results are stored on server. References: - - `jobs `__ + - `jobs `__ """ # noqa: E501 def __init__(self, connection: Connection, return_result: bool) -> None: diff --git a/arangoasync/foxx.py b/arangoasync/foxx.py index fe02b41..0b068da 100644 --- a/arangoasync/foxx.py +++ b/arangoasync/foxx.py @@ -65,7 +65,7 @@ async def services(self, exclude_system: Optional[bool] = False) -> Result[Jsons FoxxServiceListError: If retrieval fails. References: - - `list-the-installed-services `__ + - `list-the-installed-services `__ """ # noqa: E501 params: Params = {} if exclude_system is not None: @@ -98,7 +98,7 @@ async def service(self, mount: str) -> Result[Json]: FoxxServiceGetError: If retrieval fails. References: - - `get-the-service-description `__ + - `get-the-service-description `__ """ # noqa: E501 params: Params = {"mount": mount} request = Request( @@ -142,7 +142,7 @@ async def create_service( FoxxServiceCreateError: If installation fails. References: - - `install-a-new-service-mode `__ + - `install-a-new-service-mode `__ """ # noqa: E501 params: Params = dict() params["mount"] = mount @@ -189,7 +189,7 @@ async def delete_service( FoxxServiceDeleteError: If operations fails. References: - - `uninstall-a-service `__ + - `uninstall-a-service `__ """ # noqa: E501 params: Params = dict() params["mount"] = mount @@ -236,7 +236,7 @@ async def replace_service( FoxxServiceReplaceError: If replacement fails. References: - - `replace-a-service `__ + - `replace-a-service `__ """ # noqa: E501 params: Params = dict() params["mount"] = mount @@ -298,7 +298,7 @@ async def update_service( FoxxServiceUpdateError: If upgrade fails. References: - - `upgrade-a-service `__ + - `upgrade-a-service `__ """ # noqa: E501 params: Params = dict() params["mount"] = mount @@ -345,7 +345,7 @@ async def config(self, mount: str) -> Result[Json]: FoxxConfigGetError: If retrieval fails. References: - - `get-the-configuration-options `__ + - `get-the-configuration-options `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -375,7 +375,7 @@ async def update_config(self, mount: str, options: Json) -> Result[Json]: FoxxConfigUpdateError: If update fails. References: - - `update-the-configuration-options `__ + - `update-the-configuration-options `__ """ # noqa: E501 request = Request( method=Method.PATCH, @@ -407,7 +407,7 @@ async def replace_config(self, mount: str, options: Json) -> Result[Json]: FoxxConfigReplaceError: If replace fails. References: - - `replace-the-configuration-options `__ + - `replace-the-configuration-options `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -437,7 +437,7 @@ async def dependencies(self, mount: str) -> Result[Json]: FoxxDependencyGetError: If retrieval fails. References: - - `get-the-dependency-options `__ + - `get-the-dependency-options `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -467,7 +467,7 @@ async def update_dependencies(self, mount: str, options: Json) -> Result[Json]: FoxxDependencyUpdateError: If update fails. References: - - `update-the-dependency-options `__ + - `update-the-dependency-options `__ """ # noqa: E501 request = Request( method=Method.PATCH, @@ -498,7 +498,7 @@ async def replace_dependencies(self, mount: str, options: Json) -> Result[Json]: FoxxDependencyReplaceError: If replace fails. References: - - `replace-the-dependency-options `__ + - `replace-the-dependency-options `__ """ # noqa: E501 request = Request( method=Method.PUT, @@ -528,7 +528,7 @@ async def scripts(self, mount: str) -> Result[Json]: FoxxScriptListError: If retrieval fails. References: - - `list-the-service-scripts `__ + - `list-the-service-scripts `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -561,7 +561,7 @@ async def run_script( FoxxScriptRunError: If script fails. References: - - `run-a-service-script `__ + - `run-a-service-script `__ """ # noqa: E501 request = Request( method=Method.POST, @@ -612,7 +612,7 @@ async def run_tests( FoxxTestRunError: If test fails. References: - - `run-the-service-tests `__ + - `run-the-service-tests `__ """ # noqa: E501 params: Params = dict() params["mount"] = mount @@ -665,7 +665,7 @@ async def enable_development(self, mount: str) -> Result[Json]: FoxxDevModeEnableError: If the operation fails. References: - - `enable-the-development-mode `__ + - `enable-the-development-mode `__ """ # noqa: E501 request = Request( method=Method.POST, @@ -697,7 +697,7 @@ async def disable_development(self, mount: str) -> Result[Json]: FoxxDevModeDisableError: If the operation fails. References: - - `disable-the-development-mode `__ + - `disable-the-development-mode `__ """ # noqa: E501 request = Request( method=Method.DELETE, @@ -726,7 +726,7 @@ async def readme(self, mount: str) -> Result[str]: FoxxReadmeGetError: If retrieval fails. References: - - `get-the-service-readme `__ + - `get-the-service-readme `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -754,7 +754,7 @@ async def swagger(self, mount: str) -> Result[Json]: FoxxSwaggerGetError: If retrieval fails. References: - - `get-the-swagger-description `__ + - `get-the-swagger-description `__ """ # noqa: E501 request = Request( method=Method.GET, endpoint="/_api/foxx/swagger", params={"mount": mount} @@ -785,7 +785,7 @@ async def download(self, mount: str) -> Result[bytes]: FoxxDownloadError: If download fails. References: - - `download-a-service-bundle `__ + - `download-a-service-bundle `__ """ # noqa: E501 request = Request( method=Method.POST, endpoint="/_api/foxx/download", params={"mount": mount} @@ -812,7 +812,7 @@ async def commit(self, replace: Optional[bool] = None) -> None: FoxxCommitError: If commit fails. References: - - `commit-the-local-service-state `__ + - `commit-the-local-service-state `__ """ # noqa: E501 params: Params = {} if replace is not None: diff --git a/arangoasync/graph.py b/arangoasync/graph.py index dbb9732..1fba982 100644 --- a/arangoasync/graph.py +++ b/arangoasync/graph.py @@ -93,7 +93,7 @@ async def properties(self) -> Result[GraphProperties]: GraphProperties: If the operation fails. References: - - `get-a-graph `__ + - `get-a-graph `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint=f"/_api/gharial/{self._name}") @@ -132,7 +132,7 @@ async def vertex_collections(self) -> Result[List[str]]: VertexCollectionListError: If the operation fails. References: - - `list-vertex-collections `__ + - `list-vertex-collections `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -191,7 +191,7 @@ async def create_vertex_collection( VertexCollectionCreateError: If the operation fails. References: - - `add-a-vertex-collection `__ + - `add-a-vertex-collection `__ """ # noqa: E501 data: Json = {"collection": name} @@ -228,7 +228,7 @@ async def delete_vertex_collection(self, name: str, purge: bool = False) -> None VertexCollectionDeleteError: If the operation fails. References: - - `remove-a-vertex-collection `__ + - `remove-a-vertex-collection `__ """ # noqa: E501 request = Request( method=Method.DELETE, @@ -300,7 +300,7 @@ async def vertex( DocumentParseError: If the document is malformed. References: - - `get-a-vertex `__ + - `get-a-vertex `__ """ # noqa: E501 col = Collection.get_col_name(vertex) return await self.vertex_collection(col).get( @@ -337,7 +337,7 @@ async def insert_vertex( DocumentParseError: If the document is malformed. References: - - `create-a-vertex `__ + - `create-a-vertex `__ """ # noqa: E501 return await self.vertex_collection(collection).insert( vertex, @@ -379,7 +379,7 @@ async def update_vertex( DocumentUpdateError: If update fails. References: - - `update-a-vertex `__ + - `update-a-vertex `__ """ # noqa: E501 col = Collection.get_col_name(cast(Json | str, vertex)) return await self.vertex_collection(col).update( @@ -425,7 +425,7 @@ async def replace_vertex( DocumentReplaceError: If replace fails. References: - - `replace-a-vertex `__ + - `replace-a-vertex `__ """ # noqa: E501 col = Collection.get_col_name(cast(Json | str, vertex)) return await self.vertex_collection(col).replace( @@ -468,7 +468,7 @@ async def delete_vertex( DocumentDeleteError: If deletion fails. References: - - `remove-a-vertex `__ + - `remove-a-vertex `__ """ # noqa: E501 col = Collection.get_col_name(cast(Json | str, vertex)) return await self.vertex_collection(col).delete( @@ -551,7 +551,7 @@ async def edge_collections(self) -> Result[List[str]]: EdgeCollectionListError: If the operation fails. References: - - `list-edge-collections `__ + - `list-edge-collections `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -602,7 +602,7 @@ async def create_edge_definition( EdgeDefinitionCreateError: If the operation fails. References: - - `add-an-edge-definition `__ + - `add-an-edge-definition `__ """ # noqa: E501 data: Json = { "collection": edge_collection, @@ -659,7 +659,7 @@ async def replace_edge_definition( EdgeDefinitionReplaceError: If the operation fails. References: - - `replace-an-edge-definition `__ + - `replace-an-edge-definition `__ """ # noqa: E501 data: Json = { "collection": edge_collection, @@ -712,7 +712,7 @@ async def delete_edge_definition( EdgeDefinitionDeleteError: If the operation fails. References: - - `remove-an-edge-definition `__ + - `remove-an-edge-definition `__ """ # noqa: E501 params: Params = {} if drop_collections is not None: @@ -793,7 +793,7 @@ async def edge( DocumentParseError: If the document is malformed. References: - - `get-an-edge `__ + - `get-an-edge `__ """ # noqa: E501 col = Collection.get_col_name(edge) return await self.edge_collection(col).get( @@ -832,7 +832,7 @@ async def insert_edge( DocumentParseError: If the document is malformed. References: - - `create-an-edge `__ + - `create-an-edge `__ """ # noqa: E501 return await self.edge_collection(collection).insert( edge, @@ -875,7 +875,7 @@ async def update_edge( DocumentUpdateError: If update fails. References: - - `update-an-edge `__ + - `update-an-edge `__ """ # noqa: E501 col = Collection.get_col_name(cast(Json | str, edge)) return await self.edge_collection(col).update( @@ -923,7 +923,7 @@ async def replace_edge( DocumentReplaceError: If replace fails. References: - - `replace-an-edge `__ + - `replace-an-edge `__ """ # noqa: E501 col = Collection.get_col_name(cast(Json | str, edge)) return await self.edge_collection(col).replace( @@ -967,7 +967,7 @@ async def delete_edge( DocumentDeleteError: If deletion fails. References: - - `remove-an-edge `__ + - `remove-an-edge `__ """ # noqa: E501 col = Collection.get_col_name(cast(Json | str, edge)) return await self.edge_collection(col).delete( @@ -1001,7 +1001,7 @@ async def edges( EdgeListError: If retrieval fails. References: - - `get-inbound-and-outbound-edges `__ + - `get-inbound-and-outbound-edges `__ """ # noqa: E501 return await self.edge_collection(collection).edges( vertex, diff --git a/arangoasync/job.py b/arangoasync/job.py index 9f64764..bec3c6a 100644 --- a/arangoasync/job.py +++ b/arangoasync/job.py @@ -27,7 +27,7 @@ class AsyncJob(Generic[T]): response_handler: HTTP response handler References: - - `jobs `__ + - `jobs `__ """ # noqa: E501 def __init__( @@ -68,7 +68,7 @@ async def status(self) -> str: AsyncJobStatusError: If retrieval fails or the job is not found. References: - - `list-async-jobs-by-status-or-get-the-status-of-specific-job `__ + - `list-async-jobs-by-status-or-get-the-status-of-specific-job `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint=f"/_api/job/{self._id}") response = await self._conn.send_request(request) @@ -101,7 +101,7 @@ async def result(self) -> T: is still pending. References: - - `get-the-results-of-an-async-job `__ + - `get-the-results-of-an-async-job `__ """ # noqa: E501 request = Request(method=Method.PUT, endpoint=f"/_api/job/{self._id}") response = await self._conn.send_request(request) @@ -142,7 +142,7 @@ async def cancel(self, ignore_missing: bool = False) -> bool: AsyncJobCancelError: If cancellation fails. References: - - `cancel-an-async-job `__ + - `cancel-an-async-job `__ """ # noqa: E501 request = Request(method=Method.PUT, endpoint=f"/_api/job/{self._id}/cancel") response = await self._conn.send_request(request) @@ -173,7 +173,7 @@ async def clear( AsyncJobClearError: If deletion fails. References: - - `delete-async-job-results `__ + - `delete-async-job-results `__ """ # noqa: E501 request = Request(method=Method.DELETE, endpoint=f"/_api/job/{self._id}") resp = await self._conn.send_request(request) diff --git a/arangoasync/replication.py b/arangoasync/replication.py index 436dc94..e495e89 100644 --- a/arangoasync/replication.py +++ b/arangoasync/replication.py @@ -64,7 +64,7 @@ async def inventory( ReplicationInventoryError: If retrieval fails. References: - - `get-a-replication-inventory `__ + - `get-a-replication-inventory `__ """ # noqa: E501 params: Params = dict() params["batchId"] = batch_id @@ -112,7 +112,7 @@ async def dump( ReplicationDumpError: If retrieval fails. References: - - `get-a-replication-dump `__ + - `get-a-replication-dump `__ """ # noqa: E501 params: Params = dict() params["collection"] = collection @@ -149,7 +149,7 @@ async def cluster_inventory( ReplicationClusterInventoryError: If retrieval fails. References: - - `get-the-cluster-collections-and-indexes `__ + - `get-the-cluster-collections-and-indexes `__ """ # noqa: E501 params: Params = {} if include_system is not None: @@ -179,7 +179,7 @@ async def logger_state(self) -> Result[Json]: ReplicationLoggerStateError: If retrieval fails. References: - - `get-the-replication-logger-state `__ + - `get-the-replication-logger-state `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -204,7 +204,7 @@ async def applier_config(self) -> Result[Json]: ReplicationApplierConfigError: If retrieval fails. References: - - `get-the-replication-applier-configuration `__ + - `get-the-replication-applier-configuration `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -229,7 +229,7 @@ async def applier_state(self) -> Result[Json]: ReplicationApplierStateError: If retrieval fails. References: - - `get-the-replication-applier-state `__ + - `get-the-replication-applier-state `__ """ # noqa: E501 request = Request( method=Method.GET, @@ -254,7 +254,7 @@ async def server_id(self) -> Result[str]: ReplicationServerIDError: If retrieval fails. References: - - `get-the-replication-server-id `__ + - `get-the-replication-server-id `__ """ # noqa: E501 request = Request( method=Method.GET, diff --git a/arangoasync/typings.py b/arangoasync/typings.py index d6adb4d..cd1c472 100644 --- a/arangoasync/typings.py +++ b/arangoasync/typings.py @@ -223,7 +223,7 @@ class KeyOptions(JsonWrapper): } References: - - `create-a-collection `__ + - `create-a-collection `__ """ # noqa: E501 def __init__( @@ -310,7 +310,7 @@ class CollectionInfo(JsonWrapper): } References: - - `get-the-collection-information `__ + - `get-the-collection-information `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -384,7 +384,7 @@ class UserInfo(JsonWrapper): } References: - - `create-a-user `__ + - `create-a-user `__ """ # noqa: E501 def __init__( @@ -484,7 +484,7 @@ class ServerStatusInformation(JsonWrapper): } References: - - `get-server-status-information `__ + - `get-server-status-information `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -543,7 +543,7 @@ class DatabaseProperties(JsonWrapper): """Properties of the database. References: - - `get-information-about-the-current-database `__ + - `get-information-about-the-current-database `__ """ # noqa: E501 def __init__(self, data: Json, strip_result: bool = False) -> None: @@ -650,7 +650,7 @@ class CollectionProperties(JsonWrapper): } References: - - `get-the-properties-of-a-collection `__ + - `get-the-properties-of-a-collection `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -870,7 +870,7 @@ class CollectionStatistics(JsonWrapper): } References: - - `get-the-collection-statistics `__ + - `get-the-collection-statistics `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -976,7 +976,7 @@ class IndexProperties(JsonWrapper): } References: - - `get-an-index `__ + - `get-an-index `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -1253,7 +1253,7 @@ class QueryProperties(JsonWrapper): } References: - - `create-a-cursor `__ + - `create-a-cursor `__ """ # noqa: E501 def __init__( @@ -1414,7 +1414,7 @@ class QueryExecutionPlan(JsonWrapper): """The execution plan of an AQL query. References: - - `plan `__ + - `plan `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -1468,7 +1468,7 @@ class QueryExecutionProfile(JsonWrapper): } References: - - `profile `__ + - `profile `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -1536,7 +1536,7 @@ class QueryExecutionStats(JsonWrapper): } References: - - `stats `__ + - `stats `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -1615,7 +1615,7 @@ class QueryExecutionExtra(JsonWrapper): """Extra information about the query result. References: - - `extra `__ + - `extra `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -1659,7 +1659,7 @@ class QueryTrackingConfiguration(JsonWrapper): } References: - - `get-the-aql-query-tracking-configuration `__ + - `get-the-aql-query-tracking-configuration `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -1718,7 +1718,7 @@ class QueryExplainOptions(JsonWrapper): } References: - - `explain-an-aql-query `__ + - `explain-an-aql-query `__ """ # noqa: E501 def __init__( @@ -1764,8 +1764,8 @@ class QueryCacheProperties(JsonWrapper): } References: - - `get-the-aql-query-results-cache-configuration `__ - - `set-the-aql-query-results-cache-configuration `__ + - `get-the-aql-query-results-cache-configuration `__ + - `set-the-aql-query-results-cache-configuration `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -1818,9 +1818,9 @@ class GraphProperties(JsonWrapper): } References: - - `get-a-graph `__ - - `list-all-graphs `__ - - `create-a-graph `__ + - `get-a-graph `__ + - `list-all-graphs `__ + - `create-a-graph `__ """ # noqa: E501 def __init__(self, data: Json) -> None: @@ -1927,7 +1927,7 @@ class GraphOptions(JsonWrapper): graph. References: - - `create-a-graph `__ + - `create-a-graph `__ """ # noqa: E501 def __init__( @@ -1982,7 +1982,7 @@ class VertexCollectionOptions(JsonWrapper): be a string and a valid collection name. References: - - `add-a-vertex-collection `__ + - `add-a-vertex-collection `__ """ # noqa: E501 def __init__( @@ -2009,7 +2009,7 @@ class EdgeDefinitionOptions(JsonWrapper): be a string and a valid collection name. References: - - `add-an-edge-definition `__ + - `add-an-edge-definition `__ """ # noqa: E501 def __init__( @@ -2043,7 +2043,7 @@ class AccessToken(JsonWrapper): } References: - - `create-an-access-token `__ + - `create-an-access-token `__ """ # noqa: E501 def __init__(self, data: Json) -> None: diff --git a/arangoasync/version.py b/arangoasync/version.py index 92192ee..68cdeee 100644 --- a/arangoasync/version.py +++ b/arangoasync/version.py @@ -1 +1 @@ -__version__ = "1.0.4" +__version__ = "1.0.5" diff --git a/docs/certificates.rst b/docs/certificates.rst index c0ba7af..f8fa1e5 100644 --- a/docs/certificates.rst +++ b/docs/certificates.rst @@ -129,4 +129,4 @@ See the `ArangoDB Manual`_ for more information on security features. # Reload TLS data tls = await db.reload_tls() -.. _ArangoDB Manual: https://docs.arango.ai/stable/develop/http-api/security/ +.. _ArangoDB Manual: https://docs.arango.ai/arangodb/arangodb/stable/develop/http-api/security/ diff --git a/docs/overview.rst b/docs/overview.rst index 77c0fc7..38ecfd7 100644 --- a/docs/overview.rst +++ b/docs/overview.rst @@ -64,7 +64,7 @@ You may also use the client without a context manager, but you must ensure to cl Another example with `graphs`_: -.. _graphs: https://docs.arango.ai/stable/graphs/ +.. _graphs: https://docs.arango.ai/arangodb/stable/graphs/ .. code-block:: python diff --git a/docs/view.rst b/docs/view.rst index 3a1ef06..5ab61e9 100644 --- a/docs/view.rst +++ b/docs/view.rst @@ -63,7 +63,7 @@ management, refer to `ArangoDB Manual`_. For more information on the content of view **properties**, see `Search Alias Views`_ and `Arangosearch Views`_. -.. _Search Alias Views: https://docs.arango.ai/stable/develop/http-api/views/search-alias-views/ -.. _Arangosearch Views: https://docs.arango.ai/stable/develop/http-api/views/arangosearch-views/ +.. _Search Alias Views: https://docs.arango.ai/arangodb/stable/develop/http-api/views/search-alias-views/ +.. _Arangosearch Views: https://docs.arango.ai/arangodb/stable/develop/http-api/views/arangosearch-views/ Refer to :class:`arangoasync.database.StandardDatabase` class for API specification.