From 741e3a3048fa628ee1737671de14c61ee374facd Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 11 May 2025 19:39:20 +0000 Subject: [PATCH 1/3] Basic graph support (#51) --- arangoasync/database.py | 175 ++++++++++++++++++++++++++++++++++++++ arangoasync/exceptions.py | 12 +++ arangoasync/graph.py | 21 +++++ arangoasync/typings.py | 128 ++++++++++++++++++++++++++-- tests/test_graph.py | 37 ++++++++ tests/test_typings.py | 38 +++++++++ 6 files changed, 406 insertions(+), 5 deletions(-) create mode 100644 arangoasync/graph.py create mode 100644 tests/test_graph.py diff --git a/arangoasync/database.py b/arangoasync/database.py index e1200df..60f6ee9 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -23,6 +23,9 @@ DatabaseDeleteError, DatabaseListError, DatabasePropertiesError, + GraphCreateError, + GraphDeleteError, + GraphListError, JWTSecretListError, JWTSecretReloadError, PermissionGetError, @@ -50,6 +53,7 @@ DefaultApiExecutor, TransactionApiExecutor, ) +from arangoasync.graph import Graph from arangoasync.request import Method, Request from arangoasync.response import Response from arangoasync.result import Result @@ -58,6 +62,8 @@ CollectionInfo, CollectionType, DatabaseProperties, + GraphOptions, + GraphProperties, Json, Jsons, KeyOptions, @@ -655,6 +661,175 @@ def response_handler(resp: Response) -> bool: return await self._executor.execute(request, response_handler) + def graph(self, name: str) -> Graph: + """Return the graph API wrapper. + + Args: + name (str): Graph name. + + Returns: + Graph: Graph API wrapper. + """ + return Graph(self._executor, name) + + async def has_graph(self, name: str) -> Result[bool]: + """Check if a graph exists in the database. + + Args: + name (str): Graph name. + + Returns: + bool: True if the graph exists, False otherwise. + + Raises: + GraphListError: If the operation fails. + """ + request = Request(method=Method.GET, endpoint=f"/_api/gharial/{name}") + + def response_handler(resp: Response) -> bool: + if resp.is_success: + return True + if resp.status_code == HTTP_NOT_FOUND: + return False + raise GraphListError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def graphs(self) -> Result[List[GraphProperties]]: + """List all graphs stored in the database. + + Returns: + list: Graph properties. + + Raises: + GraphListError: If the operation fails. + + References: + - `list-all-graphs `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint="/_api/gharial") + + def response_handler(resp: Response) -> List[GraphProperties]: + if not resp.is_success: + raise GraphListError(resp, request) + body = self.deserializer.loads(resp.raw_body) + return [GraphProperties(u) for u in body["graphs"]] + + return await self._executor.execute(request, response_handler) + + async def create_graph( + self, + name: str, + edge_definitions: Optional[Sequence[Json]] = None, + is_disjoint: Optional[bool] = None, + is_smart: Optional[bool] = None, + options: Optional[GraphOptions | Json] = None, + orphan_collections: Optional[Sequence[str]] = None, + wait_for_sync: Optional[bool] = None, + ) -> Result[Graph]: + """Create a new graph. + + Args: + name (str): Graph name. + edge_definitions (list | None): List of edge definitions, where each edge + definition entry is a dictionary with fields "collection" (name of the + edge collection), "from" (list of vertex collection names) and "to" + (list of vertex collection names). + is_disjoint (bool | None): Whether to create a Disjoint SmartGraph + instead of a regular SmartGraph (Enterprise Edition only). + is_smart (bool | None): Define if the created graph should be smart + (Enterprise Edition only). + options (GraphOptions | dict | None): Options for creating collections + within this graph. + orphan_collections (list | None): An array of additional vertex + collections. Documents in these collections do not have edges + within this graph. + wait_for_sync (bool | None): If `True`, wait until everything is + synced to disk. + + Returns: + Graph: Graph API wrapper. + + Raises: + GraphCreateError: If the operation fails. + + References: + - `create-a-graph `__ + """ # noqa: E501 + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + + data: Json = {"name": name} + if edge_definitions is not None: + data["edgeDefinitions"] = edge_definitions + if is_disjoint is not None: + data["isDisjoint"] = is_disjoint + if is_smart is not None: + data["isSmart"] = is_smart + if options is not None: + if isinstance(options, GraphOptions): + data["options"] = options.to_dict() + else: + data["options"] = options + if orphan_collections is not None: + data["orphanCollections"] = orphan_collections + + request = Request( + method=Method.POST, + endpoint="/_api/gharial", + data=self.serializer.dumps(data), + params=params, + ) + + def response_handler(resp: Response) -> Graph: + if resp.is_success: + return Graph(self._executor, name) + raise GraphCreateError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def delete_graph( + self, + name: str, + drop_collections: Optional[bool] = None, + ignore_missing: bool = False, + ) -> Result[bool]: + """Drops an existing graph object by name. + + Args: + name (str): Graph name. + drop_collections (bool | None): Optionally all collections not used by + other graphs can be dropped as well. + ignore_missing (bool): Do not raise an exception on missing graph. + + Returns: + bool: True if the graph was deleted successfully, `False` if the + graph was not found but **ignore_missing** was set to `True`. + + Raises: + GraphDeleteError: If the operation fails. + + References: + - `drop-a-graph `__ + """ # noqa: E501 + params: Params = {} + if drop_collections is not None: + params["dropCollections"] = drop_collections + + request = Request( + method=Method.DELETE, endpoint=f"/_api/gharial/{name}", params=params + ) + + def response_handler(resp: Response) -> bool: + if not resp.is_success: + if resp.status_code == HTTP_NOT_FOUND and ignore_missing: + return False + raise GraphDeleteError(resp, request) + return True + + return await self._executor.execute(request, response_handler) + async def has_user(self, username: str) -> Result[bool]: """Check if a user exists. diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index 1274df2..a62e64e 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -263,6 +263,18 @@ class DocumentUpdateError(ArangoServerError): """Failed to update document.""" +class GraphCreateError(ArangoServerError): + """Failed to create the graph.""" + + +class GraphDeleteError(ArangoServerError): + """Failed to delete the graph.""" + + +class GraphListError(ArangoServerError): + """Failed to retrieve graphs.""" + + class IndexCreateError(ArangoServerError): """Failed to create collection index.""" diff --git a/arangoasync/graph.py b/arangoasync/graph.py new file mode 100644 index 0000000..2047d96 --- /dev/null +++ b/arangoasync/graph.py @@ -0,0 +1,21 @@ +from arangoasync.executor import ApiExecutor + + +class Graph: + """Graph API wrapper, representing a graph in ArangoDB. + + Args: + executor: API executor. Required to execute the API requests. + """ + + def __init__(self, executor: ApiExecutor, name: str) -> None: + self._executor = executor + self._name = name + + def __repr__(self) -> str: + return f"" + + @property + def name(self) -> str: + """Name of the graph.""" + return self._name diff --git a/arangoasync/typings.py b/arangoasync/typings.py index 44631f8..86c32fd 100644 --- a/arangoasync/typings.py +++ b/arangoasync/typings.py @@ -167,6 +167,14 @@ def items(self) -> Iterator[Tuple[str, Any]]: """Return an iterator over the dictionary’s key-value pairs.""" return iter(self._data.items()) + def keys(self) -> Iterator[str]: + """Return an iterator over the dictionary’s keys.""" + return iter(self._data.keys()) + + def values(self) -> Iterator[Any]: + """Return an iterator over the dictionary’s values.""" + return iter(self._data.values()) + def to_dict(self) -> Json: """Return the dictionary.""" return self._data @@ -227,15 +235,15 @@ def __init__( data: Optional[Json] = None, ) -> None: if data is None: - data = { + data: Json = { # type: ignore[no-redef] "allowUserKeys": allow_user_keys, "type": generator_type, } if increment is not None: - data["increment"] = increment + data["increment"] = increment # type: ignore[index] if offset is not None: - data["offset"] = offset - super().__init__(data) + data["offset"] = offset # type: ignore[index] + super().__init__(cast(Json, data)) def validate(self) -> None: """Validate key options.""" @@ -386,7 +394,7 @@ def __init__( active: bool = True, extra: Optional[Json] = None, ) -> None: - data = {"user": user, "active": active} + data: Json = {"user": user, "active": active} if password is not None: data["password"] = password if extra is not None: @@ -1644,3 +1652,113 @@ def max_entry_size(self) -> int: @property def include_system(self) -> bool: return cast(bool, self._data.get("includeSystem", False)) + + +class GraphProperties(JsonWrapper): + """Graph properties. + + Example: + .. code-block:: json + + { + "_key" : "myGraph", + "edgeDefinitions" : [ + { + "collection" : "edges", + "from" : [ + "startVertices" + ], + "to" : [ + "endVertices" + ] + } + ], + "orphanCollections" : [ ], + "_rev" : "_jJdpHEy--_", + "_id" : "_graphs/myGraph", + "name" : "myGraph" + } + + References: + - `get-a-graph `__ + - `list-all-graphs `__ + - `create-a-graph `__ + """ # noqa: E501 + + def __init__(self, data: Json) -> None: + super().__init__(data) + + @property + def name(self) -> str: + return cast(str, self._data["name"]) + + @property + def edge_definitions(self) -> Jsons: + return cast(Jsons, self._data.get("edgeDefinitions", list())) + + @property + def orphan_collections(self) -> List[str]: + return cast(List[str], self._data.get("orphanCollections", list())) + + +class GraphOptions(JsonWrapper): + """Special options for graph creation. + + Args: + number_of_shards (int): The number of shards that is used for every + collection within this graph. Cannot be modified later. + replication_factor (int | str): The replication factor used when initially + creating collections for this graph. Can be set to "satellite" to create + a SatelliteGraph, which then ignores `numberOfShards`, + `minReplicationFactor`, and `writeConcern` (Enterprise Edition only). + satellites (list[str] | None): An array of collection names that is used to + create SatelliteCollections for a (Disjoint) SmartGraph using + SatelliteCollections (Enterprise Edition only). Each array element must + be a string and a valid collection name. + smart_graph_attribute (str | None): The attribute name that is used to + smartly shard the vertices of a graph. Only available in + Enterprise Edition. + write_concern (int | None): The write concern for new collections in the + graph. + """ # noqa: E501 + + def __init__( + self, + number_of_shards: Optional[int], + replication_factor: Optional[int | str], + satellites: Optional[List[str]], + smart_graph_attribute: Optional[str], + write_concern: Optional[int], + ) -> None: + data: Json = dict() + if number_of_shards is not None: + data["numberOfShards"] = number_of_shards + if replication_factor is not None: + data["replicationFactor"] = replication_factor + if satellites is not None: + data["satellites"] = satellites + if smart_graph_attribute is not None: + data["smartGraphAttribute"] = smart_graph_attribute + if write_concern is not None: + data["writeConcern"] = write_concern + super().__init__(data) + + @property + def number_of_shards(self) -> Optional[int]: + return cast(int, self._data.get("numberOfShards")) + + @property + def replication_factor(self) -> Optional[int | str]: + return cast(int | str, self._data.get("replicationFactor")) + + @property + def satellites(self) -> Optional[List[str]]: + return cast(Optional[List[str]], self._data.get("satellites")) + + @property + def smart_graph_attribute(self) -> Optional[str]: + return cast(Optional[str], self._data.get("smartGraphAttribute")) + + @property + def write_concern(self) -> Optional[int]: + return cast(Optional[int], self._data.get("writeConcern")) diff --git a/tests/test_graph.py b/tests/test_graph.py new file mode 100644 index 0000000..0967ff9 --- /dev/null +++ b/tests/test_graph.py @@ -0,0 +1,37 @@ +import pytest + +from arangoasync.exceptions import GraphCreateError, GraphDeleteError, GraphListError + + +@pytest.mark.asyncio +async def test_graph_basic(db, bad_db): + # Test the graph representation + graph = db.graph("test_graph") + assert graph.name == "test_graph" + assert "test_graph" in repr(graph) + + # Cannot find any graph + assert await db.graphs() == [] + assert await db.has_graph("fake_graph") is False + with pytest.raises(GraphListError): + await bad_db.has_graph("fake_graph") + with pytest.raises(GraphListError): + await bad_db.graphs() + + # Create a graph + graph = await db.create_graph("test_graph", wait_for_sync=True) + assert graph.name == "test_graph" + with pytest.raises(GraphCreateError): + await bad_db.create_graph("test_graph") + + # Check if the graph exists + assert await db.has_graph("test_graph") is True + graphs = await db.graphs() + assert len(graphs) == 1 + assert graphs[0].name == "test_graph" + + # Delete the graph + await db.delete_graph("test_graph") + assert await db.has_graph("test_graph") is False + with pytest.raises(GraphDeleteError): + await bad_db.delete_graph("test_graph") diff --git a/tests/test_typings.py b/tests/test_typings.py index 9d8e2d5..7a40c33 100644 --- a/tests/test_typings.py +++ b/tests/test_typings.py @@ -4,6 +4,8 @@ CollectionInfo, CollectionStatus, CollectionType, + GraphOptions, + GraphProperties, JsonWrapper, KeyOptions, QueryCacheProperties, @@ -23,6 +25,9 @@ def test_basic_wrapper(): assert wrapper["a"] == 1 assert wrapper["b"] == 2 + assert list(wrapper.keys()) == ["a", "b"] + assert list(wrapper.values()) == [1, 2] + wrapper["c"] = 3 assert wrapper["c"] == 3 @@ -330,3 +335,36 @@ def test_QueryCacheProperties(): assert cache_properties._data["maxResults"] == 128 assert cache_properties._data["maxEntrySize"] == 1024 assert cache_properties._data["includeSystem"] is False + + +def test_GraphProperties(): + data = { + "name": "myGraph", + "edgeDefinitions": [ + {"collection": "edges", "from": ["vertices1"], "to": ["vertices2"]} + ], + "orphanCollections": ["orphan1", "orphan2"], + } + graph_properties = GraphProperties(data) + + assert graph_properties.name == "myGraph" + assert graph_properties.edge_definitions == [ + {"collection": "edges", "from": ["vertices1"], "to": ["vertices2"]} + ] + assert graph_properties.orphan_collections == ["orphan1", "orphan2"] + + +def test_GraphOptions(): + graph_options = GraphOptions( + number_of_shards=3, + replication_factor=2, + satellites=["satellite1", "satellite2"], + smart_graph_attribute="region", + write_concern=1, + ) + + assert graph_options.number_of_shards == 3 + assert graph_options.replication_factor == 2 + assert graph_options.satellites == ["satellite1", "satellite2"] + assert graph_options.smart_graph_attribute == "region" + assert graph_options.write_concern == 1 From db0a397e873fdf683da505199d9f61d771bed964 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 1 Jun 2025 12:26:48 +0300 Subject: [PATCH 2/3] Graph Collections (#52) * Highlighting boolean values * Adding vertex and edge collection skeleton * Refactoring serializers * Using randomized graph name * Improving helper types * Facilitating edge and vertex collection creation * Vertex collection management * Edge collection management * Adding cluster testcase * Adding note about dictionary-like indexing * Inserting and retrieving vertex documents * Moving methods from StandardCollection to base Collection so they are available to other subclasses * Adding CRUD for vertex collections * Adding "has" for vertex collections * Marking tests as asyncio * Inserting and retrieving edges * Event loop scope * Event loop scope again * Updating edge * Edges CRUD * Extra edge methods * Fixing lint * Added github gist example * Adding graph docs * Adding graphs example in the readme --- README.md | 60 + arangoasync/collection.py | 2507 +++++++++++++++++++++++++------------ arangoasync/database.py | 102 +- arangoasync/exceptions.py | 40 + arangoasync/graph.py | 1035 ++++++++++++++- arangoasync/typings.py | 134 +- docs/collection.rst | 6 +- docs/document.rst | 20 + docs/graph.rst | 415 ++++++ docs/index.rst | 1 + docs/overview.rst | 64 +- docs/serialization.rst | 6 + docs/specs.rst | 3 + tests/conftest.py | 16 +- tests/helpers.py | 9 + tests/test_graph.py | 395 +++++- tests/test_typings.py | 18 + 17 files changed, 3986 insertions(+), 845 deletions(-) create mode 100644 docs/graph.rst diff --git a/README.md b/README.md index 4f6cd2b..507c3e9 100644 --- a/README.md +++ b/README.md @@ -73,7 +73,67 @@ async def main(): student_names = [] async for doc in cursor: student_names.append(doc["name"]) +``` + +Another example with [graphs](https://docs.arangodb.com/stable/graphs/): +```python +async def main(): + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for graph "school". + if await db.has_graph("school"): + graph = db.graph("school") + else: + graph = await db.create_graph("school") + + # Create vertex collections for the graph. + students = await graph.create_vertex_collection("students") + lectures = await graph.create_vertex_collection("lectures") + + # Create an edge definition (relation) for the graph. + edges = await graph.create_edge_definition( + edge_collection="register", + from_vertex_collections=["students"], + to_vertex_collections=["lectures"] + ) + + # Insert vertex documents into "students" (from) vertex collection. + await students.insert({"_key": "01", "full_name": "Anna Smith"}) + await students.insert({"_key": "02", "full_name": "Jake Clark"}) + await students.insert({"_key": "03", "full_name": "Lisa Jones"}) + + # Insert vertex documents into "lectures" (to) vertex collection. + await lectures.insert({"_key": "MAT101", "title": "Calculus"}) + await lectures.insert({"_key": "STA101", "title": "Statistics"}) + await lectures.insert({"_key": "CSC101", "title": "Algorithms"}) + + # Insert edge documents into "register" edge collection. + await edges.insert({"_from": "students/01", "_to": "lectures/MAT101"}) + await edges.insert({"_from": "students/01", "_to": "lectures/STA101"}) + await edges.insert({"_from": "students/01", "_to": "lectures/CSC101"}) + await edges.insert({"_from": "students/02", "_to": "lectures/MAT101"}) + await edges.insert({"_from": "students/02", "_to": "lectures/STA101"}) + await edges.insert({"_from": "students/03", "_to": "lectures/CSC101"}) + + # Traverse the graph in outbound direction, breath-first. + query = """ + FOR v, e, p IN 1..3 OUTBOUND 'students/01' GRAPH 'school' + OPTIONS { bfs: true, uniqueVertices: 'global' } + RETURN {vertex: v, edge: e, path: p} + """ + + async with await db.aql.execute(query) as cursor: + async for doc in cursor: + print(doc) ``` Please see the [documentation](https://python-arango-async.readthedocs.io/en/latest/) for more details. diff --git a/arangoasync/collection.py b/arangoasync/collection.py index 3b4e5a9..c742714 100644 --- a/arangoasync/collection.py +++ b/arangoasync/collection.py @@ -1,7 +1,12 @@ -__all__ = ["Collection", "StandardCollection"] +__all__ = [ + "Collection", + "EdgeCollection", + "StandardCollection", + "VertexCollection", +] -from typing import Any, Generic, List, Optional, Sequence, Tuple, TypeVar, cast +from typing import Any, Generic, List, Literal, Optional, Sequence, TypeVar, cast from arangoasync.cursor import Cursor from arangoasync.errno import ( @@ -21,6 +26,7 @@ DocumentReplaceError, DocumentRevisionError, DocumentUpdateError, + EdgeListError, IndexCreateError, IndexDeleteError, IndexGetError, @@ -70,6 +76,26 @@ def __init__( self._doc_deserializer = doc_deserializer self._id_prefix = f"{self._name}/" + @staticmethod + def get_col_name(doc: str | Json) -> str: + """Extract the collection name from the document. + + Args: + doc (str | dict): Document ID or body with "_id" field. + + Returns: + str: Collection name. + + Raises: + DocumentParseError: If document ID is missing. + """ + try: + doc_id: str = doc if isinstance(doc, str) else doc["_id"] + except KeyError: + raise DocumentParseError('field "_id" required') + else: + return doc_id.split("/", 1)[0] + def _validate_id(self, doc_id: str) -> str: """Check the collection name in the document ID. @@ -86,11 +112,13 @@ def _validate_id(self, doc_id: str) -> str: raise DocumentParseError(f'Bad collection name in document ID "{doc_id}"') return doc_id - def _extract_id(self, body: Json) -> str: + def _extract_id(self, body: Json, validate: bool = True) -> str: """Extract the document ID from document body. Args: body (dict): Document body. + validate (bool): Whether to validate the document ID, + checking if it belongs to the current collection. Returns: str: Document ID. @@ -100,7 +128,10 @@ def _extract_id(self, body: Json) -> str: """ try: if "_id" in body: - return self._validate_id(body["_id"]) + if validate: + return self._validate_id(body["_id"]) + else: + return cast(str, body["_id"]) else: key: str = body["_key"] return self._id_prefix + key @@ -115,6 +146,9 @@ def _ensure_key_from_id(self, body: Json) -> Json: Returns: dict: Document body with "_key" field if it has "_id" field. + + Raises: + DocumentParseError: If document is malformed. """ if "_id" in body and "_key" not in body: doc_id = self._validate_id(body["_id"]) @@ -122,41 +156,32 @@ def _ensure_key_from_id(self, body: Json) -> Json: body["_key"] = doc_id[len(self._id_prefix) :] return body - def _prep_from_doc( - self, - document: str | Json, - rev: Optional[str] = None, - check_rev: bool = False, - ) -> Tuple[str, Json]: - """Prepare document ID, body and request headers before a query. + def _get_doc_id(self, document: str | Json, validate: bool = True) -> str: + """Prepare document ID before a query. Args: document (str | dict): Document ID, key or body. - rev (str | None): Document revision. - check_rev (bool): Whether to check the revision. + validate (bool): Whether to validate the document ID, + checking if it belongs to the current collection. Returns: Document ID and request headers. Raises: DocumentParseError: On missing ID and key. - TypeError: On bad document type. """ - if isinstance(document, dict): - doc_id = self._extract_id(document) - rev = rev or document.get("_rev") - elif isinstance(document, str): + if isinstance(document, str): if "/" in document: - doc_id = self._validate_id(document) + if validate: + doc_id = self._validate_id(document) + else: + doc_id = document else: doc_id = self._id_prefix + document else: - raise TypeError("Document must be str or a dict") + doc_id = self._extract_id(document, validate) - if not check_rev or rev is None: - return doc_id, {} - else: - return doc_id, {"If-Match": rev} + return doc_id def _build_filter_conditions(self, filters: Optional[Json]) -> str: """Build filter conditions for an AQL query. @@ -456,29 +481,6 @@ def response_handler(resp: Response) -> bool: return await self._executor.execute(request, response_handler) - -class StandardCollection(Collection[T, U, V]): - """Standard collection API wrapper. - - Args: - executor (ApiExecutor): API executor. - name (str): Collection name - doc_serializer (Serializer): Document serializer. - doc_deserializer (Deserializer): Document deserializer. - """ - - def __init__( - self, - executor: ApiExecutor, - name: str, - doc_serializer: Serializer[T], - doc_deserializer: Deserializer[U, V], - ) -> None: - super().__init__(executor, name, doc_serializer, doc_deserializer) - - def __repr__(self) -> str: - return f"" - async def properties(self) -> Result[CollectionProperties]: """Return the full properties of the current collection. @@ -563,14 +565,14 @@ def response_handler(resp: Response) -> int: return await self._executor.execute(request, response_handler) - async def get( + async def has( self, document: str | Json, allow_dirty_read: bool = False, if_match: Optional[str] = None, if_none_match: Optional[str] = None, - ) -> Result[Optional[U]]: - """Return a document. + ) -> Result[bool]: + """Check if a document exists in the collection. Args: document (str | dict): Document ID, key or body. @@ -582,17 +584,16 @@ async def get( different revision than the given ETag. Returns: - Document or `None` if not found. + `True` if the document exists, `False` otherwise. Raises: DocumentRevisionError: If the revision is incorrect. DocumentGetError: If retrieval fails. - DocumentParseError: If the document is malformed. References: - - `get-a-document `__ + - `get-a-document-header `__ """ # noqa: E501 - handle, _ = self._prep_from_doc(document) + handle = self._get_doc_id(document) headers: RequestHeaders = {} if allow_dirty_read: @@ -603,19 +604,16 @@ async def get( headers["If-None-Match"] = if_none_match request = Request( - method=Method.GET, + method=Method.HEAD, endpoint=f"/_api/document/{handle}", headers=headers, ) - def response_handler(resp: Response) -> Optional[U]: + def response_handler(resp: Response) -> bool: if resp.is_success: - return self._doc_deserializer.loads(resp.raw_body) + return True elif resp.status_code == HTTP_NOT_FOUND: - if resp.error_code == DOCUMENT_NOT_FOUND: - return None - else: - raise DocumentGetError(resp, request) + return False elif resp.status_code == HTTP_PRECONDITION_FAILED: raise DocumentRevisionError(resp, request) else: @@ -623,255 +621,1103 @@ def response_handler(resp: Response) -> Optional[U]: return await self._executor.execute(request, response_handler) - async def has( + async def get_many( self, - document: str | Json, - allow_dirty_read: bool = False, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - ) -> Result[bool]: - """Check if a document exists in the collection. + documents: Sequence[str | T], + allow_dirty_read: Optional[bool] = None, + ignore_revs: Optional[bool] = None, + ) -> Result[V]: + """Return multiple documents ignoring any missing ones. Args: - document (str | dict): Document ID, key or body. - Document body must contain the "_id" or "_key" field. - allow_dirty_read (bool): Allow reads from followers in a cluster. - if_match (str | None): The document is returned, if it has the same - revision as the given ETag. - if_none_match (str | None): The document is returned, if it has a - different revision than the given ETag. + documents (list): List of document IDs, keys or bodies. A search document + must contain at least a value for the `_key` field. A value for `_rev` + may be specified to verify whether the document has the same revision + value, unless `ignoreRevs` is set to false. + allow_dirty_read (bool | None): Allow reads from followers in a cluster. + ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the + document is ignored. If this is set to `False`, then the `_rev` + attribute given in the body document is taken as a precondition. + The document is only replaced if the current revision is the one + specified. Returns: - `True` if the document exists, `False` otherwise. + list: List of documents. Missing ones are not included. Raises: - DocumentRevisionError: If the revision is incorrect. DocumentGetError: If retrieval fails. References: - - `get-a-document-header `__ + - `get-multiple-documents `__ """ # noqa: E501 - handle, _ = self._prep_from_doc(document) + params: Params = {"onlyget": True} + if ignore_revs is not None: + params["ignoreRevs"] = ignore_revs headers: RequestHeaders = {} - if allow_dirty_read: - headers["x-arango-allow-dirty-read"] = "true" - if if_match is not None: - headers["If-Match"] = if_match - if if_none_match is not None: - headers["If-None-Match"] = if_none_match + if allow_dirty_read is not None: + if allow_dirty_read is True: + headers["x-arango-allow-dirty-read"] = "true" + else: + headers["x-arango-allow-dirty-read"] = "false" request = Request( - method=Method.HEAD, - endpoint=f"/_api/document/{handle}", + method=Method.PUT, + endpoint=f"/_api/document/{self.name}", + params=params, headers=headers, + data=self._doc_serializer.dumps(documents), ) - def response_handler(resp: Response) -> bool: - if resp.is_success: - return True - elif resp.status_code == HTTP_NOT_FOUND: - return False - elif resp.status_code == HTTP_PRECONDITION_FAILED: - raise DocumentRevisionError(resp, request) - else: + def response_handler(resp: Response) -> V: + if not resp.is_success: raise DocumentGetError(resp, request) + return self._doc_deserializer.loads_many(resp.raw_body) return await self._executor.execute(request, response_handler) - async def insert( + async def find( self, - document: T, - wait_for_sync: Optional[bool] = None, - return_new: Optional[bool] = None, - return_old: Optional[bool] = None, - silent: Optional[bool] = None, - overwrite: Optional[bool] = None, - overwrite_mode: Optional[str] = None, - keep_null: Optional[bool] = None, - merge_objects: Optional[bool] = None, - refill_index_caches: Optional[bool] = None, - version_attribute: Optional[str] = None, - ) -> Result[bool | Json]: - """Insert a new document. + filters: Optional[Json] = None, + skip: Optional[int] = None, + limit: Optional[int | str] = None, + allow_dirty_read: Optional[bool] = False, + sort: Optional[Jsons] = None, + ) -> Result[Cursor]: + """Return all documents that match the given filters. Args: - document (dict): Document to insert. If it contains the "_key" or "_id" - field, the value is used as the key of the new document (otherwise - it is auto-generated). Any "_rev" field is ignored. - wait_for_sync (bool | None): Wait until document has been synced to disk. - return_new (bool | None): Additionally return the complete new document - under the attribute `new` in the result. - return_old (bool | None): Additionally return the complete old document - under the attribute `old` in the result. Only available if the - `overwrite` option is used. - silent (bool | None): If set to `True`, no document metadata is returned. - This can be used to save resources. - overwrite (bool | None): If set to `True`, operation does not fail on - duplicate key and existing document is overwritten (replace-insert). - overwrite_mode (str | None): Overwrite mode. Supersedes **overwrite** - option. May be one of "ignore", "replace", "update" or "conflict". - keep_null (bool | None): If set to `True`, fields with value None are - retained in the document. Otherwise, they are removed completely. - Applies only when **overwrite_mode** is set to "update" - (update-insert). - merge_objects (bool | None): If set to `True`, sub-dictionaries are merged - instead of the new one overwriting the old one. Applies only when - **overwrite_mode** is set to "update" (update-insert). - refill_index_caches (bool | None): Whether to add new entries to - in-memory index caches if document insertions affect the edge index - or cache-enabled persistent indexes. - version_attribute (str | None): Support for simple external versioning to - document operations. Only applicable if **overwrite** is set to `True` - or **overwrite_mode** is set to "update" or "replace". + filters (dict | None): Query filters. + skip (int | None): Number of documents to skip. + limit (int | str | None): Maximum number of documents to return. + allow_dirty_read (bool): Allow reads from followers in a cluster. + sort (list | None): Document sort parameters. Returns: - bool | dict: Document metadata (e.g. document id, key, revision) or `True` - if **silent** is set to `True`. + Cursor: Document cursor. Raises: - DocumentInsertError: If insertion fails. - DocumentParseError: If the document is malformed. - - References: - - `create-a-document `__ - """ # noqa: E501 - if isinstance(document, dict): - # We assume that the document deserializer works with dictionaries. - document = cast(T, self._ensure_key_from_id(document)) + DocumentGetError: If retrieval fails. + SortValidationError: If sort parameters are invalid. + """ + if not self._is_none_or_dict(filters): + raise ValueError("filters parameter must be a dict") + self._validate_sort_parameters(sort) + if not self._is_none_or_int(skip): + raise ValueError("skip parameter must be a non-negative int") + if not (self._is_none_or_int(limit) or limit == "null"): + raise ValueError("limit parameter must be a non-negative int") - params: Params = {} - if wait_for_sync is not None: - params["waitForSync"] = wait_for_sync - if return_new is not None: - params["returnNew"] = return_new - if return_old is not None: - params["returnOld"] = return_old - if silent is not None: - params["silent"] = silent - if overwrite is not None: - params["overwrite"] = overwrite - if overwrite_mode is not None: - params["overwriteMode"] = overwrite_mode - if keep_null is not None: - params["keepNull"] = keep_null - if merge_objects is not None: - params["mergeObjects"] = merge_objects - if refill_index_caches is not None: - params["refillIndexCaches"] = refill_index_caches - if version_attribute is not None: - params["versionAttribute"] = version_attribute + skip = skip if skip is not None else 0 + limit = limit if limit is not None else "null" + query = f""" + FOR doc IN @@collection + {self._build_filter_conditions(filters)} + LIMIT {skip}, {limit} + {self._build_sort_expression(sort)} + RETURN doc + """ + bind_vars = {"@collection": self.name} + data: Json = {"query": query, "bindVars": bind_vars, "count": True} + headers: RequestHeaders = {} + if allow_dirty_read is not None: + if allow_dirty_read is True: + headers["x-arango-allow-dirty-read"] = "true" + else: + headers["x-arango-allow-dirty-read"] = "false" request = Request( method=Method.POST, - endpoint=f"/_api/document/{self._name}", - params=params, - data=self._doc_serializer.dumps(document), + endpoint="/_api/cursor", + data=self.serializer.dumps(data), + headers=headers, ) - def response_handler(resp: Response) -> bool | Json: - if resp.is_success: + def response_handler(resp: Response) -> Cursor: + if not resp.is_success: + raise DocumentGetError(resp, request) + if self._executor.context == "async": + # We cannot have a cursor giving back async jobs + executor: NonAsyncExecutor = DefaultApiExecutor( + self._executor.connection + ) + else: + executor = cast(NonAsyncExecutor, self._executor) + return Cursor(executor, self.deserializer.loads(resp.raw_body)) + + return await self._executor.execute(request, response_handler) + + async def update_match( + self, + filters: Json, + body: T, + limit: Optional[int | str] = None, + keep_none: Optional[bool] = None, + wait_for_sync: Optional[bool] = None, + merge_objects: Optional[bool] = None, + ) -> Result[int]: + """Update matching documents. + + Args: + filters (dict | None): Query filters. + body (dict): Full or partial document body with the updates. + limit (int | str | None): Maximum number of documents to update. + keep_none (bool | None): If set to `True`, fields with value `None` are + retained in the document. Otherwise, they are removed completely. + wait_for_sync (bool | None): Wait until operation has been synced to disk. + merge_objects (bool | None): If set to `True`, sub-dictionaries are merged + instead of the new one overwriting the old one. + + Returns: + int: Number of documents that got updated. + + Raises: + DocumentUpdateError: If update fails. + """ + if not self._is_none_or_dict(filters): + raise ValueError("filters parameter must be a dict") + if not (self._is_none_or_int(limit) or limit == "null"): + raise ValueError("limit parameter must be a non-negative int") + + sync = f", waitForSync: {wait_for_sync}" if wait_for_sync is not None else "" + query = f""" + FOR doc IN @@collection + {self._build_filter_conditions(filters)} + {f"LIMIT {limit}" if limit is not None else ""} + UPDATE doc WITH @body IN @@collection + OPTIONS {{ keepNull: @keep_none, mergeObjects: @merge {sync} }} + """ # noqa: E201 E202 + bind_vars = { + "@collection": self.name, + "body": body, + "keep_none": keep_none, + "merge": merge_objects, + } + data = {"query": query, "bindVars": bind_vars} + + request = Request( + method=Method.POST, + endpoint="/_api/cursor", + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> int: + if resp.is_success: + result = self.deserializer.loads(resp.raw_body) + return cast(int, result["extra"]["stats"]["writesExecuted"]) + raise DocumentUpdateError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def replace_match( + self, + filters: Json, + body: T, + limit: Optional[int | str] = None, + wait_for_sync: Optional[bool] = None, + ) -> Result[int]: + """Replace matching documents. + + Args: + filters (dict | None): Query filters. + body (dict): New document body. + limit (int | str | None): Maximum number of documents to replace. + wait_for_sync (bool | None): Wait until operation has been synced to disk. + + Returns: + int: Number of documents that got replaced. + + Raises: + DocumentReplaceError: If replace fails. + """ + if not self._is_none_or_dict(filters): + raise ValueError("filters parameter must be a dict") + if not (self._is_none_or_int(limit) or limit == "null"): + raise ValueError("limit parameter must be a non-negative int") + + sync = f"waitForSync: {wait_for_sync}" if wait_for_sync is not None else "" + query = f""" + FOR doc IN @@collection + {self._build_filter_conditions(filters)} + {f"LIMIT {limit}" if limit is not None else ""} + REPLACE doc WITH @body IN @@collection + {f"OPTIONS {{ {sync} }}" if sync else ""} + """ # noqa: E201 E202 + bind_vars = { + "@collection": self.name, + "body": body, + } + data = {"query": query, "bindVars": bind_vars} + + request = Request( + method=Method.POST, + endpoint="/_api/cursor", + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> int: + if resp.is_success: + result = self.deserializer.loads(resp.raw_body) + return cast(int, result["extra"]["stats"]["writesExecuted"]) + raise DocumentReplaceError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def delete_match( + self, + filters: Json, + limit: Optional[int | str] = None, + wait_for_sync: Optional[bool] = None, + ) -> Result[int]: + """Delete matching documents. + + Args: + filters (dict | None): Query filters. + limit (int | str | None): Maximum number of documents to delete. + wait_for_sync (bool | None): Wait until operation has been synced to disk. + + Returns: + int: Number of documents that got deleted. + + Raises: + DocumentDeleteError: If delete fails. + """ + if not self._is_none_or_dict(filters): + raise ValueError("filters parameter must be a dict") + if not (self._is_none_or_int(limit) or limit == "null"): + raise ValueError("limit parameter must be a non-negative int") + + sync = f"waitForSync: {wait_for_sync}" if wait_for_sync is not None else "" + query = f""" + FOR doc IN @@collection + {self._build_filter_conditions(filters)} + {f"LIMIT {limit}" if limit is not None else ""} + REMOVE doc IN @@collection + {f"OPTIONS {{ {sync} }}" if sync else ""} + """ # noqa: E201 E202 + bind_vars = {"@collection": self.name} + data = {"query": query, "bindVars": bind_vars} + + request = Request( + method=Method.POST, + endpoint="/_api/cursor", + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> int: + if resp.is_success: + result = self.deserializer.loads(resp.raw_body) + return cast(int, result["extra"]["stats"]["writesExecuted"]) + raise DocumentDeleteError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def insert_many( + self, + documents: Sequence[T], + wait_for_sync: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + silent: Optional[bool] = None, + overwrite: Optional[bool] = None, + overwrite_mode: Optional[str] = None, + keep_null: Optional[bool] = None, + merge_objects: Optional[bool] = None, + refill_index_caches: Optional[bool] = None, + version_attribute: Optional[str] = None, + ) -> Result[Jsons]: + """Insert multiple documents. + + Note: + If inserting a document fails, the exception is not raised but + returned as an object in the "errors" list. It is up to you to + inspect the list to determine which documents were inserted + successfully (returns document metadata) and which were not + (returns exception object). + + Args: + documents (list): Documents to insert. If an item contains the "_key" or + "_id" field, the value is used as the key of the new document + (otherwise it is auto-generated). Any "_rev" field is ignored. + wait_for_sync (bool | None): Wait until documents have been synced to disk. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. Only available if the + `overwrite` option is used. + silent (bool | None): If set to `True`, an empty object is returned as + response if all document operations succeed. No meta-data is returned + for the created documents. If any of the operations raises an error, + an array with the error object(s) is returned. + overwrite (bool | None): If set to `True`, operation does not fail on + duplicate key and existing document is overwritten (replace-insert). + overwrite_mode (str | None): Overwrite mode. Supersedes **overwrite** + option. May be one of "ignore", "replace", "update" or "conflict". + keep_null (bool | None): If set to `True`, fields with value None are + retained in the document. Otherwise, they are removed completely. + Applies only when **overwrite_mode** is set to "update" + (update-insert). + merge_objects (bool | None): If set to `True`, sub-dictionaries are merged + instead of the new one overwriting the old one. Applies only when + **overwrite_mode** is set to "update" (update-insert). + refill_index_caches (bool | None): Whether to add new entries to + in-memory index caches if document operations affect the edge index + or cache-enabled persistent indexes. + version_attribute (str | None): Support for simple external versioning to + document operations. Only applicable if **overwrite** is set to `True` + or **overwrite_mode** is set to "update" or "replace". + + Returns: + list: Documents metadata (e.g. document id, key, revision) and + errors or just errors if **silent** is set to `True`. + + Raises: + DocumentInsertError: If insertion fails. + + References: + - `create-multiple-documents `__ + """ # noqa: E501 + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if return_new is not None: + params["returnNew"] = return_new + if return_old is not None: + params["returnOld"] = return_old + if silent is not None: + params["silent"] = silent + if overwrite is not None: + params["overwrite"] = overwrite + if overwrite_mode is not None: + params["overwriteMode"] = overwrite_mode + if keep_null is not None: + params["keepNull"] = keep_null + if merge_objects is not None: + params["mergeObjects"] = merge_objects + if refill_index_caches is not None: + params["refillIndexCaches"] = refill_index_caches + if version_attribute is not None: + params["versionAttribute"] = version_attribute + + request = Request( + method=Method.POST, + endpoint=f"/_api/document/{self.name}", + data=self._doc_serializer.dumps(documents), + params=params, + ) + + def response_handler( + resp: Response, + ) -> Jsons: + if not resp.is_success: + raise DocumentInsertError(resp, request) + return self.deserializer.loads_many(resp.raw_body) + + return await self._executor.execute(request, response_handler) + + async def replace_many( + self, + documents: Sequence[T], + wait_for_sync: Optional[bool] = None, + ignore_revs: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + silent: Optional[bool] = None, + refill_index_caches: Optional[bool] = None, + version_attribute: Optional[str] = None, + ) -> Result[Jsons]: + """Insert multiple documents. + + Note: + If replacing a document fails, the exception is not raised but + returned as an object in the "errors" list. It is up to you to + inspect the list to determine which documents were replaced + successfully (returns document metadata) and which were not + (returns exception object). + + Args: + documents (list): New documents to replace the old ones. An item must + contain the "_key" or "_id" field. + wait_for_sync (bool | None): Wait until documents have been synced to disk. + ignore_revs (bool | None): If this is set to `False`, then any `_rev` + attribute given in a body document is taken as a precondition. The + document is only replaced if the current revision is the one + specified. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + silent (bool | None): If set to `True`, an empty object is returned as + response if all document operations succeed. No meta-data is returned + for the created documents. If any of the operations raises an error, + an array with the error object(s) is returned. + refill_index_caches (bool | None): Whether to add new entries to + in-memory index caches if document operations affect the edge index + or cache-enabled persistent indexes. + version_attribute (str | None): Support for simple external versioning to + document operations. + + Returns: + list: Documents metadata (e.g. document id, key, revision) and + errors or just errors if **silent** is set to `True`. + + Raises: + DocumentReplaceError: If replacing fails. + + References: + - `replace-multiple-documents `__ + """ # noqa: E501 + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if ignore_revs is not None: + params["ignoreRevs"] = ignore_revs + if return_new is not None: + params["returnNew"] = return_new + if return_old is not None: + params["returnOld"] = return_old + if silent is not None: + params["silent"] = silent + if refill_index_caches is not None: + params["refillIndexCaches"] = refill_index_caches + if version_attribute is not None: + params["versionAttribute"] = version_attribute + + request = Request( + method=Method.PUT, + endpoint=f"/_api/document/{self.name}", + data=self._doc_serializer.dumps(documents), + params=params, + ) + + def response_handler( + resp: Response, + ) -> Jsons: + if not resp.is_success: + raise DocumentReplaceError(resp, request) + return self.deserializer.loads_many(resp.raw_body) + + return await self._executor.execute(request, response_handler) + + async def update_many( + self, + documents: Sequence[T], + wait_for_sync: Optional[bool] = None, + ignore_revs: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + silent: Optional[bool] = None, + keep_null: Optional[bool] = None, + merge_objects: Optional[bool] = None, + refill_index_caches: Optional[bool] = None, + version_attribute: Optional[str] = None, + ) -> Result[Jsons]: + """Insert multiple documents. + + Note: + If updating a document fails, the exception is not raised but + returned as an object in the "errors" list. It is up to you to + inspect the list to determine which documents were updated + successfully (returned as document metadata) and which were not + (returned as exception object). + + Args: + documents (list): Documents to update. An item must contain the "_key" or + "_id" field. + wait_for_sync (bool | None): Wait until documents have been synced to disk. + ignore_revs (bool | None): If this is set to `False`, then any `_rev` + attribute given in a body document is taken as a precondition. The + document is only updated if the current revision is the one + specified. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + silent (bool | None): If set to `True`, an empty object is returned as + response if all document operations succeed. No meta-data is returned + for the created documents. If any of the operations raises an error, + an array with the error object(s) is returned. + keep_null (bool | None): If set to `True`, fields with value None are + retained in the document. Otherwise, they are removed completely. + Applies only when **overwrite_mode** is set to "update" + (update-insert). + merge_objects (bool | None): If set to `True`, sub-dictionaries are merged + instead of the new one overwriting the old one. Applies only when + **overwrite_mode** is set to "update" (update-insert). + refill_index_caches (bool | None): Whether to add new entries to + in-memory index caches if document operations affect the edge index + or cache-enabled persistent indexes. + version_attribute (str | None): Support for simple external versioning to + document operations. + + Returns: + list: Documents metadata (e.g. document id, key, revision) and + errors or just errors if **silent** is set to `True`. + + Raises: + DocumentUpdateError: If update fails. + + References: + - `update-multiple-documents `__ + """ # noqa: E501 + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if ignore_revs is not None: + params["ignoreRevs"] = ignore_revs + if return_new is not None: + params["returnNew"] = return_new + if return_old is not None: + params["returnOld"] = return_old + if silent is not None: + params["silent"] = silent + if keep_null is not None: + params["keepNull"] = keep_null + if merge_objects is not None: + params["mergeObjects"] = merge_objects + if refill_index_caches is not None: + params["refillIndexCaches"] = refill_index_caches + if version_attribute is not None: + params["versionAttribute"] = version_attribute + + request = Request( + method=Method.PATCH, + endpoint=f"/_api/document/{self.name}", + data=self._doc_serializer.dumps(documents), + params=params, + ) + + def response_handler( + resp: Response, + ) -> Jsons: + if not resp.is_success: + raise DocumentUpdateError(resp, request) + return self.deserializer.loads_many(resp.raw_body) + + return await self._executor.execute(request, response_handler) + + async def delete_many( + self, + documents: Sequence[T], + wait_for_sync: Optional[bool] = None, + ignore_revs: Optional[bool] = None, + return_old: Optional[bool] = None, + silent: Optional[bool] = None, + refill_index_caches: Optional[bool] = None, + ) -> Result[Jsons]: + """Delete multiple documents. + + Note: + If deleting a document fails, the exception is not raised but + returned as an object in the "errors" list. It is up to you to + inspect the list to determine which documents were deleted + successfully (returned as document metadata) and which were not + (returned as exception object). + + Args: + documents (list): Documents to delete. An item must contain the "_key" or + "_id" field. + wait_for_sync (bool | None): Wait until documents have been synced to disk. + ignore_revs (bool | None): If this is set to `False`, then any `_rev` + attribute given in a body document is taken as a precondition. The + document is only updated if the current revision is the one + specified. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + silent (bool | None): If set to `True`, an empty object is returned as + response if all document operations succeed. No meta-data is returned + for the created documents. If any of the operations raises an error, + an array with the error object(s) is returned. + refill_index_caches (bool | None): Whether to add new entries to + in-memory index caches if document operations affect the edge index + or cache-enabled persistent indexes. + + Returns: + list: Documents metadata (e.g. document id, key, revision) and + errors or just errors if **silent** is set to `True`. + + Raises: + DocumentRemoveError: If removal fails. + + References: + - `remove-multiple-documents `__ + """ # noqa: E501 + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if ignore_revs is not None: + params["ignoreRevs"] = ignore_revs + if return_old is not None: + params["returnOld"] = return_old + if silent is not None: + params["silent"] = silent + if refill_index_caches is not None: + params["refillIndexCaches"] = refill_index_caches + + request = Request( + method=Method.DELETE, + endpoint=f"/_api/document/{self.name}", + data=self._doc_serializer.dumps(documents), + params=params, + ) + + def response_handler( + resp: Response, + ) -> Jsons: + if not resp.is_success: + raise DocumentDeleteError(resp, request) + return self.deserializer.loads_many(resp.raw_body) + + return await self._executor.execute(request, response_handler) + + +class StandardCollection(Collection[T, U, V]): + """Standard collection API wrapper. + + Args: + executor (ApiExecutor): API executor. + name (str): Collection name + doc_serializer (Serializer): Document serializer. + doc_deserializer (Deserializer): Document deserializer. + """ + + def __init__( + self, + executor: ApiExecutor, + name: str, + doc_serializer: Serializer[T], + doc_deserializer: Deserializer[U, V], + ) -> None: + super().__init__(executor, name, doc_serializer, doc_deserializer) + + def __repr__(self) -> str: + return f"" + + async def get( + self, + document: str | Json, + allow_dirty_read: bool = False, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + ) -> Result[Optional[U]]: + """Return a document. + + Args: + document (str | dict): Document ID, key or body. + Document body must contain the "_id" or "_key" field. + allow_dirty_read (bool): Allow reads from followers in a cluster. + if_match (str | None): The document is returned, if it has the same + revision as the given ETag. + if_none_match (str | None): The document is returned, if it has a + different revision than the given ETag. + + Returns: + Document or `None` if not found. + + Raises: + DocumentRevisionError: If the revision is incorrect. + DocumentGetError: If retrieval fails. + DocumentParseError: If the document is malformed. + + References: + - `get-a-document `__ + """ # noqa: E501 + handle = self._get_doc_id(document) + + headers: RequestHeaders = {} + if allow_dirty_read: + headers["x-arango-allow-dirty-read"] = "true" + if if_match is not None: + headers["If-Match"] = if_match + if if_none_match is not None: + headers["If-None-Match"] = if_none_match + + request = Request( + method=Method.GET, + endpoint=f"/_api/document/{handle}", + headers=headers, + ) + + def response_handler(resp: Response) -> Optional[U]: + if resp.is_success: + return self._doc_deserializer.loads(resp.raw_body) + elif resp.status_code == HTTP_NOT_FOUND: + if resp.error_code == DOCUMENT_NOT_FOUND: + return None + else: + raise DocumentGetError(resp, request) + elif resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + else: + raise DocumentGetError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def insert( + self, + document: T, + wait_for_sync: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + silent: Optional[bool] = None, + overwrite: Optional[bool] = None, + overwrite_mode: Optional[str] = None, + keep_null: Optional[bool] = None, + merge_objects: Optional[bool] = None, + refill_index_caches: Optional[bool] = None, + version_attribute: Optional[str] = None, + ) -> Result[bool | Json]: + """Insert a new document. + + Args: + document (dict): Document to insert. If it contains the "_key" or "_id" + field, the value is used as the key of the new document (otherwise + it is auto-generated). Any "_rev" field is ignored. + wait_for_sync (bool | None): Wait until document has been synced to disk. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. Only available if the + `overwrite` option is used. + silent (bool | None): If set to `True`, no document metadata is returned. + This can be used to save resources. + overwrite (bool | None): If set to `True`, operation does not fail on + duplicate key and existing document is overwritten (replace-insert). + overwrite_mode (str | None): Overwrite mode. Supersedes **overwrite** + option. May be one of "ignore", "replace", "update" or "conflict". + keep_null (bool | None): If set to `True`, fields with value None are + retained in the document. Otherwise, they are removed completely. + Applies only when **overwrite_mode** is set to "update" + (update-insert). + merge_objects (bool | None): If set to `True`, sub-dictionaries are merged + instead of the new one overwriting the old one. Applies only when + **overwrite_mode** is set to "update" (update-insert). + refill_index_caches (bool | None): Whether to add new entries to + in-memory index caches if document insertions affect the edge index + or cache-enabled persistent indexes. + version_attribute (str | None): Support for simple external versioning to + document operations. Only applicable if **overwrite** is set to `True` + or **overwrite_mode** is set to "update" or "replace". + + Returns: + bool | dict: Document metadata (e.g. document id, key, revision) or `True` + if **silent** is set to `True`. + + Raises: + DocumentInsertError: If insertion fails. + DocumentParseError: If the document is malformed. + + References: + - `create-a-document `__ + """ # noqa: E501 + if isinstance(document, dict): + document = cast(T, self._ensure_key_from_id(document)) + + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if return_new is not None: + params["returnNew"] = return_new + if return_old is not None: + params["returnOld"] = return_old + if silent is not None: + params["silent"] = silent + if overwrite is not None: + params["overwrite"] = overwrite + if overwrite_mode is not None: + params["overwriteMode"] = overwrite_mode + if keep_null is not None: + params["keepNull"] = keep_null + if merge_objects is not None: + params["mergeObjects"] = merge_objects + if refill_index_caches is not None: + params["refillIndexCaches"] = refill_index_caches + if version_attribute is not None: + params["versionAttribute"] = version_attribute + + request = Request( + method=Method.POST, + endpoint=f"/_api/document/{self._name}", + params=params, + data=self._doc_serializer.dumps(document), + ) + + def response_handler(resp: Response) -> bool | Json: + if resp.is_success: + if silent is True: + return True + return self._executor.deserialize(resp.raw_body) + msg: Optional[str] = None + if resp.status_code == HTTP_BAD_PARAMETER: + msg = ( + "Body does not contain a valid JSON representation of " + "one document." + ) + elif resp.status_code == HTTP_NOT_FOUND: + msg = "Collection not found." + raise DocumentInsertError(resp, request, msg) + + return await self._executor.execute(request, response_handler) + + async def update( + self, + document: T, + ignore_revs: Optional[bool] = None, + wait_for_sync: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + silent: Optional[bool] = None, + keep_null: Optional[bool] = None, + merge_objects: Optional[bool] = None, + refill_index_caches: Optional[bool] = None, + version_attribute: Optional[str] = None, + if_match: Optional[str] = None, + ) -> Result[bool | Json]: + """Update a document. + + Args: + document (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field. + ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the + document is ignored. If this is set to `False`, then the `_rev` + attribute given in the body document is taken as a precondition. + The document is only updated if the current revision is the one + specified. + wait_for_sync (bool | None): Wait until document has been synced to disk. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + silent (bool | None): If set to `True`, no document metadata is returned. + This can be used to save resources. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. + merge_objects (bool | None): Controls whether objects (not arrays) are + merged if present in both the existing and the patch document. + If set to `False`, the value in the patch document overwrites the + existing document’s value. If set to `True`, objects are merged. + refill_index_caches (bool | None): Whether to add new entries to + in-memory index caches if document updates affect the edge index + or cache-enabled persistent indexes. + version_attribute (str | None): Support for simple external versioning to + document operations. + if_match (str | None): You can conditionally update a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + bool | dict: Document metadata (e.g. document id, key, revision) or `True` + if **silent** is set to `True`. + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentUpdateError: If update fails. + + References: + - `update-a-document `__ + """ # noqa: E501 + params: Params = {} + if ignore_revs is not None: + params["ignoreRevs"] = ignore_revs + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if return_new is not None: + params["returnNew"] = return_new + if return_old is not None: + params["returnOld"] = return_old + if silent is not None: + params["silent"] = silent + if keep_null is not None: + params["keepNull"] = keep_null + if merge_objects is not None: + params["mergeObjects"] = merge_objects + if refill_index_caches is not None: + params["refillIndexCaches"] = refill_index_caches + if version_attribute is not None: + params["versionAttribute"] = version_attribute + + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match + + request = Request( + method=Method.PATCH, + endpoint=f"/_api/document/{self._extract_id(cast(Json, document))}", + params=params, + headers=headers, + data=self._doc_serializer.dumps(document), + ) + + def response_handler(resp: Response) -> bool | Json: + if resp.is_success: + if silent is True: + return True + return self._executor.deserialize(resp.raw_body) + msg: Optional[str] = None + if resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + elif resp.status_code == HTTP_NOT_FOUND: + msg = "Document, collection or transaction not found." + raise DocumentUpdateError(resp, request, msg) + + return await self._executor.execute(request, response_handler) + + async def replace( + self, + document: T, + ignore_revs: Optional[bool] = None, + wait_for_sync: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + silent: Optional[bool] = None, + refill_index_caches: Optional[bool] = None, + version_attribute: Optional[str] = None, + if_match: Optional[str] = None, + ) -> Result[bool | Json]: + """Replace a document. + + Args: + document (dict): New document. It must contain the "_key" or "_id" field. + Edge document must also have "_from" and "_to" fields. + ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the + document is ignored. If this is set to `False`, then the `_rev` + attribute given in the body document is taken as a precondition. + The document is only replaced if the current revision is the one + specified. + wait_for_sync (bool | None): Wait until document has been synced to disk. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + silent (bool | None): If set to `True`, no document metadata is returned. + This can be used to save resources. + refill_index_caches (bool | None): Whether to add new entries to + in-memory index caches if document updates affect the edge index + or cache-enabled persistent indexes. + version_attribute (str | None): Support for simple external versioning to + document operations. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + bool | dict: Document metadata (e.g. document id, key, revision) or `True` + if **silent** is set to `True`. + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentReplaceError: If replace fails. + + References: + - `replace-a-document `__ + """ # noqa: E501 + params: Params = {} + if ignore_revs is not None: + params["ignoreRevs"] = ignore_revs + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if return_new is not None: + params["returnNew"] = return_new + if return_old is not None: + params["returnOld"] = return_old + if silent is not None: + params["silent"] = silent + if refill_index_caches is not None: + params["refillIndexCaches"] = refill_index_caches + if version_attribute is not None: + params["versionAttribute"] = version_attribute + + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match + + request = Request( + method=Method.PUT, + endpoint=f"/_api/document/{self._extract_id(cast(Json, document))}", + params=params, + headers=headers, + data=self._doc_serializer.dumps(document), + ) + + def response_handler(resp: Response) -> bool | Json: + if resp.is_success: if silent is True: return True return self._executor.deserialize(resp.raw_body) msg: Optional[str] = None - if resp.status_code == HTTP_BAD_PARAMETER: - msg = ( - "Body does not contain a valid JSON representation of " - "one document." - ) + if resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) elif resp.status_code == HTTP_NOT_FOUND: - msg = "Collection not found." - raise DocumentInsertError(resp, request, msg) + msg = "Document, collection or transaction not found." + raise DocumentReplaceError(resp, request, msg) return await self._executor.execute(request, response_handler) - async def update( + async def delete( self, document: T, ignore_revs: Optional[bool] = None, + ignore_missing: bool = False, wait_for_sync: Optional[bool] = None, - return_new: Optional[bool] = None, return_old: Optional[bool] = None, silent: Optional[bool] = None, - keep_null: Optional[bool] = None, - merge_objects: Optional[bool] = None, refill_index_caches: Optional[bool] = None, - version_attribute: Optional[str] = None, if_match: Optional[str] = None, ) -> Result[bool | Json]: - """Insert a new document. + """Delete a document. Args: - document (dict): Partial or full document with the updated values. - It must contain the "_key" or "_id" field. + document (dict): Document ID, key or body. The body must contain the + "_key" or "_id" field. ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the document is ignored. If this is set to `False`, then the `_rev` attribute given in the body document is taken as a precondition. - The document is only updated if the current revision is the one + The document is only replaced if the current revision is the one specified. - wait_for_sync (bool | None): Wait until document has been synced to disk. - return_new (bool | None): Additionally return the complete new document - under the attribute `new` in the result. + ignore_missing (bool): Do not raise an exception on missing document. + This parameter has no effect in transactions where an exception is + always raised on failures. + wait_for_sync (bool | None): Wait until operation has been synced to disk. return_old (bool | None): Additionally return the complete old document under the attribute `old` in the result. silent (bool | None): If set to `True`, no document metadata is returned. This can be used to save resources. - keep_null (bool | None): If the intention is to delete existing attributes - with the patch command, set this parameter to `False`. - merge_objects (bool | None): Controls whether objects (not arrays) are - merged if present in both the existing and the patch document. - If set to `False`, the value in the patch document overwrites the - existing document’s value. If set to `True`, objects are merged. refill_index_caches (bool | None): Whether to add new entries to in-memory index caches if document updates affect the edge index or cache-enabled persistent indexes. - version_attribute (str | None): Support for simple external versioning to - document operations. - if_match (str | None): You can conditionally update a document based on a - target revision id by using the "if-match" HTTP header. + if_match (bool | None): You can conditionally remove a document based + on a target revision id by using the "if-match" HTTP header. Returns: bool | dict: Document metadata (e.g. document id, key, revision) or `True` - if **silent** is set to `True`. + if **silent** is set to `True` and the document was found. Raises: DocumentRevisionError: If precondition was violated. - DocumentUpdateError: If update fails. + DocumentDeleteError: If deletion fails. References: - - `update-a-document `__ + - `remove-a-document `__ """ # noqa: E501 params: Params = {} if ignore_revs is not None: params["ignoreRevs"] = ignore_revs if wait_for_sync is not None: params["waitForSync"] = wait_for_sync - if return_new is not None: - params["returnNew"] = return_new if return_old is not None: params["returnOld"] = return_old if silent is not None: params["silent"] = silent - if keep_null is not None: - params["keepNull"] = keep_null - if merge_objects is not None: - params["mergeObjects"] = merge_objects if refill_index_caches is not None: params["refillIndexCaches"] = refill_index_caches - if version_attribute is not None: - params["versionAttribute"] = version_attribute headers: RequestHeaders = {} if if_match is not None: headers["If-Match"] = if_match request = Request( - method=Method.PATCH, + method=Method.DELETE, endpoint=f"/_api/document/{self._extract_id(cast(Json, document))}", params=params, headers=headers, - data=self._doc_serializer.dumps(document), ) def response_handler(resp: Response) -> bool | Json: @@ -883,74 +1729,310 @@ def response_handler(resp: Response) -> bool | Json: if resp.status_code == HTTP_PRECONDITION_FAILED: raise DocumentRevisionError(resp, request) elif resp.status_code == HTTP_NOT_FOUND: + if resp.error_code == DOCUMENT_NOT_FOUND and ignore_missing: + return False msg = "Document, collection or transaction not found." - raise DocumentUpdateError(resp, request, msg) + raise DocumentDeleteError(resp, request, msg) return await self._executor.execute(request, response_handler) - async def replace( + +class VertexCollection(Collection[T, U, V]): + """Vertex collection API wrapper. + + Args: + executor (ApiExecutor): API executor. + name (str): Collection name + graph (str): Graph name. + doc_serializer (Serializer): Document serializer. + doc_deserializer (Deserializer): Document deserializer. + """ + + def __init__( self, - document: T, - ignore_revs: Optional[bool] = None, + executor: ApiExecutor, + graph: str, + name: str, + doc_serializer: Serializer[T], + doc_deserializer: Deserializer[U, V], + ) -> None: + super().__init__(executor, name, doc_serializer, doc_deserializer) + self._graph = graph + + def __repr__(self) -> str: + return f"" + + @staticmethod + def _parse_result(data: Json) -> Json: + """Parse the result from the response. + + Args: + data (dict): Response data. + + Returns: + dict: Parsed result. + """ + result: Json = {} + if "new" in data or "old" in data: + result["vertex"] = data["vertex"] + if "new" in data: + result["new"] = data["new"] + if "old" in data: + result["old"] = data["old"] + else: + result = data["vertex"] + return result + + @property + def graph(self) -> str: + """Return the graph name. + + Returns: + str: Graph name. + """ + return self._graph + + async def get( + self, + vertex: str | Json, + rev: Optional[str] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + ) -> Result[Optional[Json]]: + """Return a vertex from the graph. + + Args: + vertex (str | dict): Document ID, key or body. + Document body must contain the "_id" or "_key" field. + rev (str | None): If this is set a document is only returned if it + has exactly this revision. + if_match (str | None): The document is returned, if it has the same + revision as the given ETag. + if_none_match (str | None): The document is returned, if it has a + different revision than the given ETag. + + Returns: + dict | None: Document or `None` if not found. + + Raises: + DocumentRevisionError: If the revision is incorrect. + DocumentGetError: If retrieval fails. + DocumentParseError: If the document is malformed. + + References: + - `get-a-vertex `__ + """ # noqa: E501 + handle = self._get_doc_id(vertex) + + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match + if if_none_match is not None: + headers["If-None-Match"] = if_none_match + + params: Params = {} + if rev is not None: + params["rev"] = rev + + request = Request( + method=Method.GET, + endpoint=f"/_api/gharial/{self._graph}/vertex/{handle}", + headers=headers, + params=params, + ) + + def response_handler(resp: Response) -> Optional[Json]: + if resp.is_success: + return self._parse_result(self.deserializer.loads(resp.raw_body)) + elif resp.status_code == HTTP_NOT_FOUND: + if resp.error_code == DOCUMENT_NOT_FOUND: + return None + else: + raise DocumentGetError(resp, request) + elif resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + else: + raise DocumentGetError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def insert( + self, + vertex: T, + wait_for_sync: Optional[bool] = None, + return_new: Optional[bool] = None, + ) -> Result[Json]: + """Insert a new vertex document. + + Args: + vertex (dict): Document to insert. If it contains the "_key" or "_id" + field, the value is used as the key of the new document (otherwise + it is auto-generated). Any "_rev" field is ignored. + wait_for_sync (bool | None): Wait until document has been synced to disk. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` is specified, the result contains the document + metadata in the "vertex" field and the new document in the "new" field. + + Raises: + DocumentInsertError: If insertion fails. + DocumentParseError: If the document is malformed. + + References: + - `create-a-vertex `__ + """ # noqa: E501 + if isinstance(vertex, dict): + vertex = cast(T, self._ensure_key_from_id(vertex)) + + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if return_new is not None: + params["returnNew"] = return_new + + request = Request( + method=Method.POST, + endpoint=f"/_api/gharial/{self._graph}/vertex/{self.name}", + params=params, + data=self._doc_serializer.dumps(vertex), + ) + + def response_handler(resp: Response) -> Json: + if resp.is_success: + return self._parse_result(self.deserializer.loads(resp.raw_body)) + msg: Optional[str] = None + if resp.status_code == HTTP_NOT_FOUND: + msg = ( + "The graph cannot be found or the collection is not " + "part of the graph." + ) + raise DocumentInsertError(resp, request, msg) + + return await self._executor.execute(request, response_handler) + + async def update( + self, + vertex: T, wait_for_sync: Optional[bool] = None, + keep_null: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[Json]: + """Update a vertex in the graph. + + Args: + vertex (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field. + wait_for_sync (bool | None): Wait until document has been synced to disk. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally update a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` or "return_old" are specified, the result contains + the document metadata in the "vertex" field and two additional fields + ("new" and "old"). + + Raises: + DocumentUpdateError: If update fails. + + References: + - `update-a-vertex `__ + """ # noqa: E501 + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if keep_null is not None: + params["keepNull"] = keep_null + if return_new is not None: + params["returnNew"] = return_new + if return_old is not None: + params["returnOld"] = return_old + + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match + + request = Request( + method=Method.PATCH, + endpoint=f"/_api/gharial/{self._graph}/vertex/" + f"{self._get_doc_id(cast(Json, vertex))}", + params=params, + headers=headers, + data=self._doc_serializer.dumps(vertex), + ) + + def response_handler(resp: Response) -> Json: + if resp.is_success: + return self._parse_result(self.deserializer.loads(resp.raw_body)) + msg: Optional[str] = None + if resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + elif resp.status_code == HTTP_NOT_FOUND: + msg = ( + "Vertex or graph not found, or the collection is not part of " + "this graph. Error may also occur if the transaction ID is " + "unknown." + ) + raise DocumentUpdateError(resp, request, msg) + + return await self._executor.execute(request, response_handler) + + async def replace( + self, + vertex: T, + wait_for_sync: Optional[bool] = None, + keep_null: Optional[bool] = None, return_new: Optional[bool] = None, return_old: Optional[bool] = None, - silent: Optional[bool] = None, - refill_index_caches: Optional[bool] = None, - version_attribute: Optional[str] = None, if_match: Optional[str] = None, - ) -> Result[bool | Json]: - """Replace a document. + ) -> Result[Json]: + """Replace a vertex in the graph. Args: - document (dict): New document. It must contain the "_key" or "_id" field. - Edge document must also have "_from" and "_to" fields. - ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the - document is ignored. If this is set to `False`, then the `_rev` - attribute given in the body document is taken as a precondition. - The document is only replaced if the current revision is the one - specified. + vertex (dict): New document. It must contain the "_key" or "_id" field. wait_for_sync (bool | None): Wait until document has been synced to disk. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. return_new (bool | None): Additionally return the complete new document under the attribute `new` in the result. return_old (bool | None): Additionally return the complete old document under the attribute `old` in the result. - silent (bool | None): If set to `True`, no document metadata is returned. - This can be used to save resources. - refill_index_caches (bool | None): Whether to add new entries to - in-memory index caches if document updates affect the edge index - or cache-enabled persistent indexes. - version_attribute (str | None): Support for simple external versioning to - document operations. if_match (str | None): You can conditionally replace a document based on a target revision id by using the "if-match" HTTP header. Returns: - bool | dict: Document metadata (e.g. document id, key, revision) or `True` - if **silent** is set to `True`. + dict: Document metadata (e.g. document id, key, revision). + If `return_new` or "return_old" are specified, the result contains + the document metadata in the "vertex" field and two additional fields + ("new" and "old"). Raises: DocumentRevisionError: If precondition was violated. DocumentReplaceError: If replace fails. References: - - `replace-a-document `__ + - `replace-a-vertex `__ """ # noqa: E501 params: Params = {} - if ignore_revs is not None: - params["ignoreRevs"] = ignore_revs if wait_for_sync is not None: params["waitForSync"] = wait_for_sync + if keep_null is not None: + params["keepNull"] = keep_null if return_new is not None: params["returnNew"] = return_new if return_old is not None: params["returnOld"] = return_old - if silent is not None: - params["silent"] = silent - if refill_index_caches is not None: - params["refillIndexCaches"] = refill_index_caches - if version_attribute is not None: - params["versionAttribute"] = version_attribute headers: RequestHeaders = {} if if_match is not None: @@ -958,83 +2040,67 @@ async def replace( request = Request( method=Method.PUT, - endpoint=f"/_api/document/{self._extract_id(cast(Json, document))}", + endpoint=f"/_api/gharial/{self._graph}/vertex/" + f"{self._get_doc_id(cast(Json, vertex))}", params=params, headers=headers, - data=self._doc_serializer.dumps(document), + data=self._doc_serializer.dumps(vertex), ) - def response_handler(resp: Response) -> bool | Json: + def response_handler(resp: Response) -> Json: if resp.is_success: - if silent is True: - return True - return self._executor.deserialize(resp.raw_body) + return self._parse_result(self.deserializer.loads(resp.raw_body)) msg: Optional[str] = None if resp.status_code == HTTP_PRECONDITION_FAILED: raise DocumentRevisionError(resp, request) elif resp.status_code == HTTP_NOT_FOUND: - msg = "Document, collection or transaction not found." + msg = ( + "Vertex or graph not found, or the collection is not part of " + "this graph. Error may also occur if the transaction ID is " + "unknown." + ) raise DocumentReplaceError(resp, request, msg) return await self._executor.execute(request, response_handler) async def delete( self, - document: T, - ignore_revs: Optional[bool] = None, + vertex: T, ignore_missing: bool = False, wait_for_sync: Optional[bool] = None, return_old: Optional[bool] = None, - silent: Optional[bool] = None, - refill_index_caches: Optional[bool] = None, if_match: Optional[str] = None, ) -> Result[bool | Json]: - """Delete a document. + """Delete a vertex from the graph. Args: - document (dict): Document ID, key or body. The body must contain the + vertex (dict): Document ID, key or body. The body must contain the "_key" or "_id" field. - ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the - document is ignored. If this is set to `False`, then the `_rev` - attribute given in the body document is taken as a precondition. - The document is only replaced if the current revision is the one - specified. ignore_missing (bool): Do not raise an exception on missing document. - This parameter has no effect in transactions where an exception is - always raised on failures. wait_for_sync (bool | None): Wait until operation has been synced to disk. return_old (bool | None): Additionally return the complete old document under the attribute `old` in the result. - silent (bool | None): If set to `True`, no document metadata is returned. - This can be used to save resources. - refill_index_caches (bool | None): Whether to add new entries to - in-memory index caches if document updates affect the edge index - or cache-enabled persistent indexes. - if_match (bool | None): You can conditionally remove a document based - on a target revision id by using the "if-match" HTTP header. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. Returns: - bool | dict: Document metadata (e.g. document id, key, revision) or `True` - if **silent** is set to `True` and the document was found. + bool | dict: `True` if vertex was deleted successfully, `False` if vertex + was not found and **ignore_missing** was set to `True` (does not apply + in transactions). Old document is returned if **return_old** is set + to `True`. Raises: DocumentRevisionError: If precondition was violated. DocumentDeleteError: If deletion fails. References: - - `remove-a-document `__ + - `remove-a-vertex `__ """ # noqa: E501 params: Params = {} - if ignore_revs is not None: - params["ignoreRevs"] = ignore_revs if wait_for_sync is not None: params["waitForSync"] = wait_for_sync if return_old is not None: params["returnOld"] = return_old - if silent is not None: - params["silent"] = silent - if refill_index_caches is not None: - params["refillIndexCaches"] = refill_index_caches headers: RequestHeaders = {} if if_match is not None: @@ -1042,672 +2108,531 @@ async def delete( request = Request( method=Method.DELETE, - endpoint=f"/_api/document/{self._extract_id(cast(Json, document))}", + endpoint=f"/_api/gharial/{self._graph}/vertex/" + f"{self._get_doc_id(cast(Json, vertex))}", params=params, headers=headers, ) def response_handler(resp: Response) -> bool | Json: if resp.is_success: - if silent is True: - return True - return self._executor.deserialize(resp.raw_body) + data: Json = self.deserializer.loads(resp.raw_body) + if "old" in data: + return cast(Json, data["old"]) + return True msg: Optional[str] = None if resp.status_code == HTTP_PRECONDITION_FAILED: raise DocumentRevisionError(resp, request) elif resp.status_code == HTTP_NOT_FOUND: if resp.error_code == DOCUMENT_NOT_FOUND and ignore_missing: return False - msg = "Document, collection or transaction not found." - raise DocumentDeleteError(resp, request, msg) - - return await self._executor.execute(request, response_handler) - - async def get_many( - self, - documents: Sequence[str | T], - allow_dirty_read: Optional[bool] = None, - ignore_revs: Optional[bool] = None, - ) -> Result[V]: - """Return multiple documents ignoring any missing ones. - - Args: - documents (list): List of document IDs, keys or bodies. A search document - must contain at least a value for the `_key` field. A value for `_rev` - may be specified to verify whether the document has the same revision - value, unless `ignoreRevs` is set to false. - allow_dirty_read (bool | None): Allow reads from followers in a cluster. - ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the - document is ignored. If this is set to `False`, then the `_rev` - attribute given in the body document is taken as a precondition. - The document is only replaced if the current revision is the one - specified. - - Returns: - list: List of documents. Missing ones are not included. - - Raises: - DocumentGetError: If retrieval fails. - - References: - - `get-multiple-documents `__ - """ # noqa: E501 - params: Params = {"onlyget": True} - if ignore_revs is not None: - params["ignoreRevs"] = ignore_revs - - headers: RequestHeaders = {} - if allow_dirty_read is not None: - if allow_dirty_read is True: - headers["x-arango-allow-dirty-read"] = "true" - else: - headers["x-arango-allow-dirty-read"] = "false" - - request = Request( - method=Method.PUT, - endpoint=f"/_api/document/{self.name}", - params=params, - headers=headers, - data=self._doc_serializer.dumps(documents), - ) - - def response_handler(resp: Response) -> V: - if not resp.is_success: - raise DocumentGetError(resp, request) - return self._doc_deserializer.loads_many(resp.raw_body) - - return await self._executor.execute(request, response_handler) - - async def find( - self, - filters: Optional[Json] = None, - skip: Optional[int] = None, - limit: Optional[int | str] = None, - allow_dirty_read: Optional[bool] = False, - sort: Optional[Jsons] = None, - ) -> Result[Cursor]: - """Return all documents that match the given filters. - - Args: - filters (dict | None): Query filters. - skip (int | None): Number of documents to skip. - limit (int | str | None): Maximum number of documents to return. - allow_dirty_read (bool): Allow reads from followers in a cluster. - sort (list | None): Document sort parameters. - - Returns: - Cursor: Document cursor. - - Raises: - DocumentGetError: If retrieval fails. - SortValidationError: If sort parameters are invalid. - """ - if not self._is_none_or_dict(filters): - raise ValueError("filters parameter must be a dict") - self._validate_sort_parameters(sort) - if not self._is_none_or_int(skip): - raise ValueError("skip parameter must be a non-negative int") - if not (self._is_none_or_int(limit) or limit == "null"): - raise ValueError("limit parameter must be a non-negative int") - - skip = skip if skip is not None else 0 - limit = limit if limit is not None else "null" - query = f""" - FOR doc IN @@collection - {self._build_filter_conditions(filters)} - LIMIT {skip}, {limit} - {self._build_sort_expression(sort)} - RETURN doc - """ - bind_vars = {"@collection": self.name} - data: Json = {"query": query, "bindVars": bind_vars, "count": True} - headers: RequestHeaders = {} - if allow_dirty_read is not None: - if allow_dirty_read is True: - headers["x-arango-allow-dirty-read"] = "true" - else: - headers["x-arango-allow-dirty-read"] = "false" - - request = Request( - method=Method.POST, - endpoint="/_api/cursor", - data=self.serializer.dumps(data), - headers=headers, - ) - - def response_handler(resp: Response) -> Cursor: - if not resp.is_success: - raise DocumentGetError(resp, request) - if self._executor.context == "async": - # We cannot have a cursor giving back async jobs - executor: NonAsyncExecutor = DefaultApiExecutor( - self._executor.connection + msg = ( + "Vertex or graph not found, or the collection is not part of " + "this graph. Error may also occur if the transaction ID is " + "unknown." ) - else: - executor = cast(NonAsyncExecutor, self._executor) - return Cursor(executor, self.deserializer.loads(resp.raw_body)) - - return await self._executor.execute(request, response_handler) - - async def update_match( - self, - filters: Json, - body: T, - limit: Optional[int | str] = None, - keep_none: Optional[bool] = None, - wait_for_sync: Optional[bool] = None, - merge_objects: Optional[bool] = None, - ) -> Result[int]: - """Update matching documents. - - Args: - filters (dict | None): Query filters. - body (dict): Full or partial document body with the updates. - limit (int | str | None): Maximum number of documents to update. - keep_none (bool | None): If set to `True`, fields with value `None` are - retained in the document. Otherwise, they are removed completely. - wait_for_sync (bool | None): Wait until operation has been synced to disk. - merge_objects (bool | None): If set to `True`, sub-dictionaries are merged - instead of the new one overwriting the old one. - - Returns: - int: Number of documents that got updated. - - Raises: - DocumentUpdateError: If update fails. - """ - if not self._is_none_or_dict(filters): - raise ValueError("filters parameter must be a dict") - if not (self._is_none_or_int(limit) or limit == "null"): - raise ValueError("limit parameter must be a non-negative int") + raise DocumentDeleteError(resp, request, msg) - sync = f", waitForSync: {wait_for_sync}" if wait_for_sync is not None else "" - query = f""" - FOR doc IN @@collection - {self._build_filter_conditions(filters)} - {f"LIMIT {limit}" if limit is not None else ""} - UPDATE doc WITH @body IN @@collection - OPTIONS {{ keepNull: @keep_none, mergeObjects: @merge {sync} }} - """ # noqa: E201 E202 - bind_vars = { - "@collection": self.name, - "body": body, - "keep_none": keep_none, - "merge": merge_objects, - } - data = {"query": query, "bindVars": bind_vars} + return await self._executor.execute(request, response_handler) - request = Request( - method=Method.POST, - endpoint="/_api/cursor", - data=self.serializer.dumps(data), - ) - def response_handler(resp: Response) -> int: - if resp.is_success: - result = self.deserializer.loads(resp.raw_body) - return cast(int, result["extra"]["stats"]["writesExecuted"]) - raise DocumentUpdateError(resp, request) +class EdgeCollection(Collection[T, U, V]): + """Edge collection API wrapper. - return await self._executor.execute(request, response_handler) + Args: + executor (ApiExecutor): API executor. + name (str): Collection name + graph (str): Graph name. + doc_serializer (Serializer): Document serializer. + doc_deserializer (Deserializer): Document deserializer. + """ - async def replace_match( + def __init__( self, - filters: Json, - body: T, - limit: Optional[int | str] = None, - wait_for_sync: Optional[bool] = None, - ) -> Result[int]: - """Replace matching documents. + executor: ApiExecutor, + graph: str, + name: str, + doc_serializer: Serializer[T], + doc_deserializer: Deserializer[U, V], + ) -> None: + super().__init__(executor, name, doc_serializer, doc_deserializer) + self._graph = graph + + def __repr__(self) -> str: + return f"" + + @staticmethod + def _parse_result(data: Json) -> Json: + """Parse the result from the response. Args: - filters (dict | None): Query filters. - body (dict): New document body. - limit (int | str | None): Maximum number of documents to replace. - wait_for_sync (bool | None): Wait until operation has been synced to disk. + data (dict): Response data. Returns: - int: Number of documents that got replaced. - - Raises: - DocumentReplaceError: If replace fails. + dict: Parsed result. """ - if not self._is_none_or_dict(filters): - raise ValueError("filters parameter must be a dict") - if not (self._is_none_or_int(limit) or limit == "null"): - raise ValueError("limit parameter must be a non-negative int") - - sync = f"waitForSync: {wait_for_sync}" if wait_for_sync is not None else "" - query = f""" - FOR doc IN @@collection - {self._build_filter_conditions(filters)} - {f"LIMIT {limit}" if limit is not None else ""} - REPLACE doc WITH @body IN @@collection - {f"OPTIONS {{ {sync} }}" if sync else ""} - """ # noqa: E201 E202 - bind_vars = { - "@collection": self.name, - "body": body, - } - data = {"query": query, "bindVars": bind_vars} - - request = Request( - method=Method.POST, - endpoint="/_api/cursor", - data=self.serializer.dumps(data), - ) + result: Json = {} + if "new" in data or "old" in data: + result["edge"] = data["edge"] + if "new" in data: + result["new"] = data["new"] + if "old" in data: + result["old"] = data["old"] + else: + result = data["edge"] + return result - def response_handler(resp: Response) -> int: - if resp.is_success: - result = self.deserializer.loads(resp.raw_body) - return cast(int, result["extra"]["stats"]["writesExecuted"]) - raise DocumentReplaceError(resp, request) + @property + def graph(self) -> str: + """Return the graph name. - return await self._executor.execute(request, response_handler) + Returns: + str: Graph name. + """ + return self._graph - async def delete_match( + async def get( self, - filters: Json, - limit: Optional[int | str] = None, - wait_for_sync: Optional[bool] = None, - ) -> Result[int]: - """Delete matching documents. + edge: str | Json, + rev: Optional[str] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + ) -> Result[Optional[Json]]: + """Return an edge from the graph. Args: - filters (dict | None): Query filters. - limit (int | str | None): Maximum number of documents to delete. - wait_for_sync (bool | None): Wait until operation has been synced to disk. + edge (str | dict): Document ID, key or body. + Document body must contain the "_id" or "_key" field. + rev (str | None): If this is set a document is only returned if it + has exactly this revision. + if_match (str | None): The document is returned, if it has the same + revision as the given ETag. + if_none_match (str | None): The document is returned, if it has a + different revision than the given ETag. Returns: - int: Number of documents that got deleted. + dict | None: Document or `None` if not found. Raises: - DocumentDeleteError: If delete fails. - """ - if not self._is_none_or_dict(filters): - raise ValueError("filters parameter must be a dict") - if not (self._is_none_or_int(limit) or limit == "null"): - raise ValueError("limit parameter must be a non-negative int") + DocumentRevisionError: If the revision is incorrect. + DocumentGetError: If retrieval fails. + DocumentParseError: If the document is malformed. - sync = f"waitForSync: {wait_for_sync}" if wait_for_sync is not None else "" - query = f""" - FOR doc IN @@collection - {self._build_filter_conditions(filters)} - {f"LIMIT {limit}" if limit is not None else ""} - REMOVE doc IN @@collection - {f"OPTIONS {{ {sync} }}" if sync else ""} - """ # noqa: E201 E202 - bind_vars = {"@collection": self.name} - data = {"query": query, "bindVars": bind_vars} + References: + - `get-an-edge `__ + """ # noqa: E501 + handle = self._get_doc_id(edge) + + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match + if if_none_match is not None: + headers["If-None-Match"] = if_none_match + + params: Params = {} + if rev is not None: + params["rev"] = rev request = Request( - method=Method.POST, - endpoint="/_api/cursor", - data=self.serializer.dumps(data), + method=Method.GET, + endpoint=f"/_api/gharial/{self._graph}/edge/{handle}", + headers=headers, + params=params, ) - def response_handler(resp: Response) -> int: + def response_handler(resp: Response) -> Optional[Json]: if resp.is_success: - result = self.deserializer.loads(resp.raw_body) - return cast(int, result["extra"]["stats"]["writesExecuted"]) - raise DocumentDeleteError(resp, request) + return self._parse_result(self.deserializer.loads(resp.raw_body)) + elif resp.status_code == HTTP_NOT_FOUND: + if resp.error_code == DOCUMENT_NOT_FOUND: + return None + else: + raise DocumentGetError(resp, request) + elif resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + else: + raise DocumentGetError(resp, request) return await self._executor.execute(request, response_handler) - async def insert_many( + async def insert( self, - documents: Sequence[T], + edge: T, wait_for_sync: Optional[bool] = None, return_new: Optional[bool] = None, - return_old: Optional[bool] = None, - silent: Optional[bool] = None, - overwrite: Optional[bool] = None, - overwrite_mode: Optional[str] = None, - keep_null: Optional[bool] = None, - merge_objects: Optional[bool] = None, - refill_index_caches: Optional[bool] = None, - version_attribute: Optional[str] = None, - ) -> Result[Jsons]: - """Insert multiple documents. - - Note: - If inserting a document fails, the exception is not raised but - returned as an object in the "errors" list. It is up to you to - inspect the list to determine which documents were inserted - successfully (returns document metadata) and which were not - (returns exception object). + ) -> Result[Json]: + """Insert a new edge document. Args: - documents (list): Documents to insert. If an item contains the "_key" or - "_id" field, the value is used as the key of the new document - (otherwise it is auto-generated). Any "_rev" field is ignored. - wait_for_sync (bool | None): Wait until documents have been synced to disk. + edge (dict): Document to insert. It must contain "_from" and + "_to" fields. If it contains the "_key" or "_id" + field, the value is used as the key of the new document (otherwise + it is auto-generated). Any "_rev" field is ignored. + wait_for_sync (bool | None): Wait until document has been synced to disk. return_new (bool | None): Additionally return the complete new document under the attribute `new` in the result. - return_old (bool | None): Additionally return the complete old document - under the attribute `old` in the result. Only available if the - `overwrite` option is used. - silent (bool | None): If set to `True`, an empty object is returned as - response if all document operations succeed. No meta-data is returned - for the created documents. If any of the operations raises an error, - an array with the error object(s) is returned. - overwrite (bool | None): If set to `True`, operation does not fail on - duplicate key and existing document is overwritten (replace-insert). - overwrite_mode (str | None): Overwrite mode. Supersedes **overwrite** - option. May be one of "ignore", "replace", "update" or "conflict". - keep_null (bool | None): If set to `True`, fields with value None are - retained in the document. Otherwise, they are removed completely. - Applies only when **overwrite_mode** is set to "update" - (update-insert). - merge_objects (bool | None): If set to `True`, sub-dictionaries are merged - instead of the new one overwriting the old one. Applies only when - **overwrite_mode** is set to "update" (update-insert). - refill_index_caches (bool | None): Whether to add new entries to - in-memory index caches if document operations affect the edge index - or cache-enabled persistent indexes. - version_attribute (str | None): Support for simple external versioning to - document operations. Only applicable if **overwrite** is set to `True` - or **overwrite_mode** is set to "update" or "replace". Returns: - list: Documents metadata (e.g. document id, key, revision) and - errors or just errors if **silent** is set to `True`. + dict: Document metadata (e.g. document id, key, revision). + If `return_new` is specified, the result contains the document + metadata in the "edge" field and the new document in the "new" field. Raises: DocumentInsertError: If insertion fails. + DocumentParseError: If the document is malformed. References: - - `create-multiple-documents `__ + - `create-an-edge `__ """ # noqa: E501 + if isinstance(edge, dict): + edge = cast(T, self._ensure_key_from_id(edge)) + params: Params = {} if wait_for_sync is not None: params["waitForSync"] = wait_for_sync if return_new is not None: params["returnNew"] = return_new - if return_old is not None: - params["returnOld"] = return_old - if silent is not None: - params["silent"] = silent - if overwrite is not None: - params["overwrite"] = overwrite - if overwrite_mode is not None: - params["overwriteMode"] = overwrite_mode - if keep_null is not None: - params["keepNull"] = keep_null - if merge_objects is not None: - params["mergeObjects"] = merge_objects - if refill_index_caches is not None: - params["refillIndexCaches"] = refill_index_caches - if version_attribute is not None: - params["versionAttribute"] = version_attribute request = Request( method=Method.POST, - endpoint=f"/_api/document/{self.name}", - data=self._doc_serializer.dumps(documents), + endpoint=f"/_api/gharial/{self._graph}/edge/{self.name}", params=params, + data=self._doc_serializer.dumps(edge), ) - def response_handler( - resp: Response, - ) -> Jsons: - if not resp.is_success: - raise DocumentInsertError(resp, request) - return self.deserializer.loads_many(resp.raw_body) + def response_handler(resp: Response) -> Json: + if resp.is_success: + return self._parse_result(self.deserializer.loads(resp.raw_body)) + msg: Optional[str] = None + if resp.status_code == HTTP_NOT_FOUND: + msg = ( + "The graph cannot be found or the edge collection is not " + "part of the graph. It is also possible that the vertex " + "collection referenced in the _from or _to attribute is not part " + "of the graph or the vertex collection is part of the graph, but " + "does not exist. Finally check that _from or _to vertex do exist." + ) + raise DocumentInsertError(resp, request, msg) return await self._executor.execute(request, response_handler) - async def replace_many( + async def update( self, - documents: Sequence[T], + edge: T, wait_for_sync: Optional[bool] = None, - ignore_revs: Optional[bool] = None, + keep_null: Optional[bool] = None, return_new: Optional[bool] = None, return_old: Optional[bool] = None, - silent: Optional[bool] = None, - refill_index_caches: Optional[bool] = None, - version_attribute: Optional[str] = None, - ) -> Result[Jsons]: - """Insert multiple documents. - - Note: - If replacing a document fails, the exception is not raised but - returned as an object in the "errors" list. It is up to you to - inspect the list to determine which documents were replaced - successfully (returns document metadata) and which were not - (returns exception object). + if_match: Optional[str] = None, + ) -> Result[Json]: + """Update an edge in the graph. Args: - documents (list): New documents to replace the old ones. An item must - contain the "_key" or "_id" field. - wait_for_sync (bool | None): Wait until documents have been synced to disk. - ignore_revs (bool | None): If this is set to `False`, then any `_rev` - attribute given in a body document is taken as a precondition. The - document is only replaced if the current revision is the one - specified. + edge (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field, along with "_from" and + "_to" fields. + wait_for_sync (bool | None): Wait until document has been synced to disk. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. return_new (bool | None): Additionally return the complete new document under the attribute `new` in the result. return_old (bool | None): Additionally return the complete old document under the attribute `old` in the result. - silent (bool | None): If set to `True`, an empty object is returned as - response if all document operations succeed. No meta-data is returned - for the created documents. If any of the operations raises an error, - an array with the error object(s) is returned. - refill_index_caches (bool | None): Whether to add new entries to - in-memory index caches if document operations affect the edge index - or cache-enabled persistent indexes. - version_attribute (str | None): Support for simple external versioning to - document operations. + if_match (str | None): You can conditionally update a document based on a + target revision id by using the "if-match" HTTP header. Returns: - list: Documents metadata (e.g. document id, key, revision) and - errors or just errors if **silent** is set to `True`. + dict: Document metadata (e.g. document id, key, revision). + If `return_new` or "return_old" are specified, the result contains + the document metadata in the "edge" field and two additional fields + ("new" and "old"). Raises: - DocumentReplaceError: If replacing fails. + DocumentUpdateError: If update fails. References: - - `replace-multiple-documents `__ + - `update-an-edge `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: params["waitForSync"] = wait_for_sync - if ignore_revs is not None: - params["ignoreRevs"] = ignore_revs + if keep_null is not None: + params["keepNull"] = keep_null if return_new is not None: params["returnNew"] = return_new if return_old is not None: params["returnOld"] = return_old - if silent is not None: - params["silent"] = silent - if refill_index_caches is not None: - params["refillIndexCaches"] = refill_index_caches - if version_attribute is not None: - params["versionAttribute"] = version_attribute + + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match request = Request( - method=Method.PUT, - endpoint=f"/_api/document/{self.name}", - data=self._doc_serializer.dumps(documents), + method=Method.PATCH, + endpoint=f"/_api/gharial/{self._graph}/edge/" + f"{self._get_doc_id(cast(Json, edge))}", params=params, + headers=headers, + data=self._doc_serializer.dumps(edge), ) - def response_handler( - resp: Response, - ) -> Jsons: - if not resp.is_success: - raise DocumentReplaceError(resp, request) - return self.deserializer.loads_many(resp.raw_body) + def response_handler(resp: Response) -> Json: + if resp.is_success: + return self._parse_result(self.deserializer.loads(resp.raw_body)) + msg: Optional[str] = None + if resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + elif resp.status_code == HTTP_NOT_FOUND: + msg = ( + "The graph cannot be found or the edge collection is not " + "part of the graph. It is also possible that the vertex " + "collection referenced in the _from or _to attribute is not part " + "of the graph or the vertex collection is part of the graph, but " + "does not exist. Finally check that _from or _to vertex do exist." + ) + raise DocumentUpdateError(resp, request, msg) return await self._executor.execute(request, response_handler) - async def update_many( + async def replace( self, - documents: Sequence[T], + edge: T, wait_for_sync: Optional[bool] = None, - ignore_revs: Optional[bool] = None, + keep_null: Optional[bool] = None, return_new: Optional[bool] = None, return_old: Optional[bool] = None, - silent: Optional[bool] = None, - keep_null: Optional[bool] = None, - merge_objects: Optional[bool] = None, - refill_index_caches: Optional[bool] = None, - version_attribute: Optional[str] = None, - ) -> Result[Jsons]: - """Insert multiple documents. - - Note: - If updating a document fails, the exception is not raised but - returned as an object in the "errors" list. It is up to you to - inspect the list to determine which documents were updated - successfully (returned as document metadata) and which were not - (returned as exception object). + if_match: Optional[str] = None, + ) -> Result[Json]: + """Replace an edge in the graph. Args: - documents (list): Documents to update. An item must contain the "_key" or - "_id" field. - wait_for_sync (bool | None): Wait until documents have been synced to disk. - ignore_revs (bool | None): If this is set to `False`, then any `_rev` - attribute given in a body document is taken as a precondition. The - document is only updated if the current revision is the one - specified. + edge (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field, along with "_from" and + "_to" fields. + wait_for_sync (bool | None): Wait until document has been synced to disk. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. return_new (bool | None): Additionally return the complete new document under the attribute `new` in the result. return_old (bool | None): Additionally return the complete old document under the attribute `old` in the result. - silent (bool | None): If set to `True`, an empty object is returned as - response if all document operations succeed. No meta-data is returned - for the created documents. If any of the operations raises an error, - an array with the error object(s) is returned. - keep_null (bool | None): If set to `True`, fields with value None are - retained in the document. Otherwise, they are removed completely. - Applies only when **overwrite_mode** is set to "update" - (update-insert). - merge_objects (bool | None): If set to `True`, sub-dictionaries are merged - instead of the new one overwriting the old one. Applies only when - **overwrite_mode** is set to "update" (update-insert). - refill_index_caches (bool | None): Whether to add new entries to - in-memory index caches if document operations affect the edge index - or cache-enabled persistent indexes. - version_attribute (str | None): Support for simple external versioning to - document operations. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. Returns: - list: Documents metadata (e.g. document id, key, revision) and - errors or just errors if **silent** is set to `True`. + dict: Document metadata (e.g. document id, key, revision). + If `return_new` or "return_old" are specified, the result contains + the document metadata in the "edge" field and two additional fields + ("new" and "old"). Raises: - DocumentUpdateError: If update fails. + DocumentRevisionError: If precondition was violated. + DocumentReplaceError: If replace fails. References: - - `update-multiple-documents `__ + - `replace-an-edge `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: params["waitForSync"] = wait_for_sync - if ignore_revs is not None: - params["ignoreRevs"] = ignore_revs + if keep_null is not None: + params["keepNull"] = keep_null if return_new is not None: params["returnNew"] = return_new if return_old is not None: params["returnOld"] = return_old - if silent is not None: - params["silent"] = silent - if keep_null is not None: - params["keepNull"] = keep_null - if merge_objects is not None: - params["mergeObjects"] = merge_objects - if refill_index_caches is not None: - params["refillIndexCaches"] = refill_index_caches - if version_attribute is not None: - params["versionAttribute"] = version_attribute + + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match request = Request( - method=Method.PATCH, - endpoint=f"/_api/document/{self.name}", - data=self._doc_serializer.dumps(documents), + method=Method.PUT, + endpoint=f"/_api/gharial/{self._graph}/edge/" + f"{self._get_doc_id(cast(Json, edge))}", params=params, + headers=headers, + data=self._doc_serializer.dumps(edge), ) - def response_handler( - resp: Response, - ) -> Jsons: - if not resp.is_success: - raise DocumentUpdateError(resp, request) - return self.deserializer.loads_many(resp.raw_body) + def response_handler(resp: Response) -> Json: + if resp.is_success: + return self._parse_result(self.deserializer.loads(resp.raw_body)) + msg: Optional[str] = None + if resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + elif resp.status_code == HTTP_NOT_FOUND: + msg = ( + "The graph cannot be found or the edge collection is not " + "part of the graph. It is also possible that the vertex " + "collection referenced in the _from or _to attribute is not part " + "of the graph or the vertex collection is part of the graph, but " + "does not exist. Finally check that _from or _to vertex do exist." + ) + raise DocumentReplaceError(resp, request, msg) return await self._executor.execute(request, response_handler) - async def delete_many( + async def delete( self, - documents: Sequence[T], + edge: T, + ignore_missing: bool = False, wait_for_sync: Optional[bool] = None, - ignore_revs: Optional[bool] = None, return_old: Optional[bool] = None, - silent: Optional[bool] = None, - refill_index_caches: Optional[bool] = None, - ) -> Result[Jsons]: - """Delete multiple documents. - - Note: - If deleting a document fails, the exception is not raised but - returned as an object in the "errors" list. It is up to you to - inspect the list to determine which documents were deleted - successfully (returned as document metadata) and which were not - (returned as exception object). + if_match: Optional[str] = None, + ) -> Result[bool | Json]: + """Delete an edge from the graph. Args: - documents (list): Documents to delete. An item must contain the "_key" or - "_id" field. - wait_for_sync (bool | None): Wait until documents have been synced to disk. - ignore_revs (bool | None): If this is set to `False`, then any `_rev` - attribute given in a body document is taken as a precondition. The - document is only updated if the current revision is the one - specified. + edge (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field, along with "_from" and + "_to" fields. + ignore_missing (bool): Do not raise an exception on missing document. + wait_for_sync (bool | None): Wait until operation has been synced to disk. return_old (bool | None): Additionally return the complete old document under the attribute `old` in the result. - silent (bool | None): If set to `True`, an empty object is returned as - response if all document operations succeed. No meta-data is returned - for the created documents. If any of the operations raises an error, - an array with the error object(s) is returned. - refill_index_caches (bool | None): Whether to add new entries to - in-memory index caches if document operations affect the edge index - or cache-enabled persistent indexes. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. Returns: - list: Documents metadata (e.g. document id, key, revision) and - errors or just errors if **silent** is set to `True`. + bool | dict: `True` if vertex was deleted successfully, `False` if vertex + was not found and **ignore_missing** was set to `True` (does not apply + in transactions). Old document is returned if **return_old** is set + to `True`. Raises: - DocumentRemoveError: If removal fails. + DocumentRevisionError: If precondition was violated. + DocumentDeleteError: If deletion fails. References: - - `remove-multiple-documents `__ + - `remove-an-edge `__ """ # noqa: E501 params: Params = {} if wait_for_sync is not None: params["waitForSync"] = wait_for_sync - if ignore_revs is not None: - params["ignoreRevs"] = ignore_revs if return_old is not None: params["returnOld"] = return_old - if silent is not None: - params["silent"] = silent - if refill_index_caches is not None: - params["refillIndexCaches"] = refill_index_caches + + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match request = Request( method=Method.DELETE, - endpoint=f"/_api/document/{self.name}", - data=self._doc_serializer.dumps(documents), + endpoint=f"/_api/gharial/{self._graph}/edge/" + f"{self._get_doc_id(cast(Json, edge))}", params=params, + headers=headers, ) - def response_handler( - resp: Response, - ) -> Jsons: + def response_handler(resp: Response) -> bool | Json: + if resp.is_success: + data: Json = self.deserializer.loads(resp.raw_body) + if "old" in data: + return cast(Json, data["old"]) + return True + msg: Optional[str] = None + if resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + elif resp.status_code == HTTP_NOT_FOUND: + if resp.error_code == DOCUMENT_NOT_FOUND and ignore_missing: + return False + msg = ( + "Either the graph cannot be found, the edge collection is not " + "part of the graph, or the edge does not exist" + ) + raise DocumentDeleteError(resp, request, msg) + + return await self._executor.execute(request, response_handler) + + async def edges( + self, + vertex: str | Json, + direction: Optional[Literal["in", "out"]] = None, + allow_dirty_read: Optional[bool] = None, + ) -> Result[Json]: + """Return the edges starting or ending at the specified vertex. + + Args: + vertex (str | dict): Document ID, key or body. + direction (str | None): Direction of the edges to return. Selects `in` + or `out` direction for edges. If not set, any edges are returned. + allow_dirty_read (bool | None): Allow reads from followers in a cluster. + + Returns: + dict: List of edges and statistics. + + Raises: + EdgeListError: If retrieval fails. + + References: + - `get-inbound-and-outbound-edges `__ + """ # noqa: E501 + params: Params = { + "vertex": self._get_doc_id(vertex, validate=False), + } + if direction is not None: + params["direction"] = direction + + headers: RequestHeaders = {} + if allow_dirty_read is not None: + headers["x-arango-allow-dirty-read"] = ( + "true" if allow_dirty_read else "false" + ) + + request = Request( + method=Method.GET, + endpoint=f"/_api/edges/{self._name}", + params=params, + headers=headers, + ) + + def response_handler(resp: Response) -> Json: if not resp.is_success: - raise DocumentDeleteError(resp, request) - return self.deserializer.loads_many(resp.raw_body) + raise EdgeListError(resp, request) + body = self.deserializer.loads(resp.raw_body) + for key in ("error", "code"): + body.pop(key) + return body return await self._executor.execute(request, response_handler) + + async def link( + self, + from_vertex: str | Json, + to_vertex: str | Json, + data: Optional[Json] = None, + wait_for_sync: Optional[bool] = None, + return_new: bool = False, + ) -> Result[Json]: + """Insert a new edge document linking the given vertices. + + Args: + from_vertex (str | dict): "_from" vertex document ID or body with "_id" + field. + to_vertex (str | dict): "_to" vertex document ID or body with "_id" field. + data (dict | None): Any extra data for the new edge document. If it has + "_key" or "_id" field, its value is used as key of the new edge document + (otherwise it is auto-generated). + wait_for_sync (bool | None): Wait until operation has been synced to disk. + return_new: Optional[bool]: Additionally return the complete new document + under the attribute `new` in the result. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` is specified, the result contains the document + metadata in the "edge" field and the new document in the "new" field. + + Raises: + DocumentInsertError: If insertion fails. + DocumentParseError: If the document is malformed. + """ + edge: Json = { + "_from": self._get_doc_id(from_vertex, validate=False), + "_to": self._get_doc_id(to_vertex, validate=False), + } + if data is not None: + edge.update(self._ensure_key_from_id(data)) + return await self.insert( + cast(T, edge), wait_for_sync=wait_for_sync, return_new=return_new + ) diff --git a/arangoasync/database.py b/arangoasync/database.py index 60f6ee9..3cac02d 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -88,6 +88,40 @@ class Database: def __init__(self, executor: ApiExecutor) -> None: self._executor = executor + def _get_doc_serializer( + self, + doc_serializer: Optional[Serializer[T]] = None, + ) -> Serializer[T]: + """Figure out the document serializer, defaulting to `Json`. + + Args: + doc_serializer (Serializer | None): Optional serializer. + + Returns: + Serializer: Either the passed serializer or the default one. + """ + if doc_serializer is None: + return cast(Serializer[T], self.serializer) + else: + return doc_serializer + + def _get_doc_deserializer( + self, + doc_deserializer: Optional[Deserializer[U, V]] = None, + ) -> Deserializer[U, V]: + """Figure out the document deserializer, defaulting to `Json`. + + Args: + doc_deserializer (Deserializer | None): Optional deserializer. + + Returns: + Deserializer: Either the passed deserializer or the default one. + """ + if doc_deserializer is None: + return cast(Deserializer[U, V], self.deserializer) + else: + return doc_deserializer + @property def connection(self) -> Connection: """Return the HTTP connection.""" @@ -390,17 +424,11 @@ def collection( Returns: StandardCollection: Collection API wrapper. """ - if doc_serializer is None: - serializer = cast(Serializer[T], self.serializer) - else: - serializer = doc_serializer - if doc_deserializer is None: - deserializer = cast(Deserializer[U, V], self.deserializer) - else: - deserializer = doc_deserializer - return StandardCollection[T, U, V]( - self._executor, name, serializer, deserializer + self._executor, + name, + self._get_doc_serializer(doc_serializer), + self._get_doc_deserializer(doc_deserializer), ) async def collections( @@ -604,16 +632,11 @@ async def create_collection( def response_handler(resp: Response) -> StandardCollection[T, U, V]: if not resp.is_success: raise CollectionCreateError(resp, request) - if doc_serializer is None: - serializer = cast(Serializer[T], self.serializer) - else: - serializer = doc_serializer - if doc_deserializer is None: - deserializer = cast(Deserializer[U, V], self.deserializer) - else: - deserializer = doc_deserializer return StandardCollection[T, U, V]( - self._executor, name, serializer, deserializer + self._executor, + name, + self._get_doc_serializer(doc_serializer), + self._get_doc_deserializer(doc_deserializer), ) return await self._executor.execute(request, response_handler) @@ -661,16 +684,30 @@ def response_handler(resp: Response) -> bool: return await self._executor.execute(request, response_handler) - def graph(self, name: str) -> Graph: + def graph( + self, + name: str, + doc_serializer: Optional[Serializer[T]] = None, + doc_deserializer: Optional[Deserializer[U, V]] = None, + ) -> Graph[T, U, V]: """Return the graph API wrapper. Args: name (str): Graph name. + doc_serializer (Serializer): Custom document serializer. + This will be used only for document operations. + doc_deserializer (Deserializer): Custom document deserializer. + This will be used only for document operations. Returns: Graph: Graph API wrapper. """ - return Graph(self._executor, name) + return Graph[T, U, V]( + self._executor, + name, + self._get_doc_serializer(doc_serializer), + self._get_doc_deserializer(doc_deserializer), + ) async def has_graph(self, name: str) -> Result[bool]: """Check if a graph exists in the database. @@ -679,7 +716,7 @@ async def has_graph(self, name: str) -> Result[bool]: name (str): Graph name. Returns: - bool: True if the graph exists, False otherwise. + bool: `True` if the graph exists, `False` otherwise. Raises: GraphListError: If the operation fails. @@ -720,17 +757,23 @@ def response_handler(resp: Response) -> List[GraphProperties]: async def create_graph( self, name: str, + doc_serializer: Optional[Serializer[T]] = None, + doc_deserializer: Optional[Deserializer[U, V]] = None, edge_definitions: Optional[Sequence[Json]] = None, is_disjoint: Optional[bool] = None, is_smart: Optional[bool] = None, options: Optional[GraphOptions | Json] = None, orphan_collections: Optional[Sequence[str]] = None, wait_for_sync: Optional[bool] = None, - ) -> Result[Graph]: + ) -> Result[Graph[T, U, V]]: """Create a new graph. Args: name (str): Graph name. + doc_serializer (Serializer): Custom document serializer. + This will be used only for document operations. + doc_deserializer (Deserializer): Custom document deserializer. + This will be used only for document operations. edge_definitions (list | None): List of edge definitions, where each edge definition entry is a dictionary with fields "collection" (name of the edge collection), "from" (list of vertex collection names) and "to" @@ -782,10 +825,15 @@ async def create_graph( params=params, ) - def response_handler(resp: Response) -> Graph: - if resp.is_success: - return Graph(self._executor, name) - raise GraphCreateError(resp, request) + def response_handler(resp: Response) -> Graph[T, U, V]: + if not resp.is_success: + raise GraphCreateError(resp, request) + return Graph[T, U, V]( + self._executor, + name, + self._get_doc_serializer(doc_serializer), + self._get_doc_deserializer(doc_deserializer), + ) return await self._executor.execute(request, response_handler) diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index a62e64e..c4ee40a 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -263,6 +263,30 @@ class DocumentUpdateError(ArangoServerError): """Failed to update document.""" +class EdgeCollectionListError(ArangoServerError): + """Failed to retrieve edge collections.""" + + +class EdgeDefinitionListError(ArangoServerError): + """Failed to retrieve edge definitions.""" + + +class EdgeDefinitionCreateError(ArangoServerError): + """Failed to create edge definition.""" + + +class EdgeDefinitionReplaceError(ArangoServerError): + """Failed to replace edge definition.""" + + +class EdgeDefinitionDeleteError(ArangoServerError): + """Failed to delete edge definition.""" + + +class EdgeListError(ArangoServerError): + """Failed to retrieve edges coming in and out of a vertex.""" + + class GraphCreateError(ArangoServerError): """Failed to create the graph.""" @@ -275,6 +299,10 @@ class GraphListError(ArangoServerError): """Failed to retrieve graphs.""" +class GraphPropertiesError(ArangoServerError): + """Failed to retrieve graph properties.""" + + class IndexCreateError(ArangoServerError): """Failed to create collection index.""" @@ -389,3 +417,15 @@ class UserReplaceError(ArangoServerError): class UserUpdateError(ArangoServerError): """Failed to update user.""" + + +class VertexCollectionCreateError(ArangoServerError): + """Failed to create vertex collection.""" + + +class VertexCollectionDeleteError(ArangoServerError): + """Failed to delete vertex collection.""" + + +class VertexCollectionListError(ArangoServerError): + """Failed to retrieve vertex collections.""" diff --git a/arangoasync/graph.py b/arangoasync/graph.py index 2047d96..059a53e 100644 --- a/arangoasync/graph.py +++ b/arangoasync/graph.py @@ -1,16 +1,60 @@ +__all__ = ["Graph"] + + +from typing import Generic, List, Literal, Optional, Sequence, TypeVar, cast + +from arangoasync.collection import Collection, EdgeCollection, VertexCollection +from arangoasync.exceptions import ( + EdgeCollectionListError, + EdgeDefinitionCreateError, + EdgeDefinitionDeleteError, + EdgeDefinitionListError, + EdgeDefinitionReplaceError, + GraphPropertiesError, + VertexCollectionCreateError, + VertexCollectionDeleteError, + VertexCollectionListError, +) from arangoasync.executor import ApiExecutor +from arangoasync.request import Method, Request +from arangoasync.response import Response +from arangoasync.result import Result +from arangoasync.serialization import Deserializer, Serializer +from arangoasync.typings import ( + EdgeDefinitionOptions, + GraphProperties, + Json, + Jsons, + Params, + VertexCollectionOptions, +) +T = TypeVar("T") # Serializer type +U = TypeVar("U") # Deserializer loads +V = TypeVar("V") # Deserializer loads_many -class Graph: + +class Graph(Generic[T, U, V]): """Graph API wrapper, representing a graph in ArangoDB. Args: - executor: API executor. Required to execute the API requests. + executor (APIExecutor): Required to execute the API requests. + name (str): Graph name. + doc_serializer (Serializer): Document serializer. + doc_deserializer (Deserializer): Document deserializer. """ - def __init__(self, executor: ApiExecutor, name: str) -> None: + def __init__( + self, + executor: ApiExecutor, + name: str, + doc_serializer: Serializer[T], + doc_deserializer: Deserializer[U, V], + ) -> None: self._executor = executor self._name = name + self._doc_serializer = doc_serializer + self._doc_deserializer = doc_deserializer def __repr__(self) -> str: return f"" @@ -19,3 +63,988 @@ def __repr__(self) -> str: def name(self) -> str: """Name of the graph.""" return self._name + + @property + def db_name(self) -> str: + """Return the name of the current database. + + Returns: + str: Database name. + """ + return self._executor.db_name + + @property + def serializer(self) -> Serializer[Json]: + """Return the serializer.""" + return self._executor.serializer + + @property + def deserializer(self) -> Deserializer[Json, Jsons]: + """Return the deserializer.""" + return self._executor.deserializer + + async def properties(self) -> Result[GraphProperties]: + """Get the properties of the graph. + + Returns: + GraphProperties: Properties of the graph. + + Raises: + GraphProperties: If the operation fails. + + References: + - `get-a-graph `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint=f"/_api/gharial/{self._name}") + + def response_handler(resp: Response) -> GraphProperties: + if not resp.is_success: + raise GraphPropertiesError(resp, request) + body = self.deserializer.loads(resp.raw_body) + return GraphProperties(body["graph"]) + + return await self._executor.execute(request, response_handler) + + def vertex_collection(self, name: str) -> VertexCollection[T, U, V]: + """Returns the vertex collection API wrapper. + + Args: + name (str): Vertex collection name. + + Returns: + VertexCollection: Vertex collection API wrapper. + """ + return VertexCollection[T, U, V]( + executor=self._executor, + graph=self._name, + name=name, + doc_serializer=self._doc_serializer, + doc_deserializer=self._doc_deserializer, + ) + + async def vertex_collections(self) -> Result[List[str]]: + """Get the names of all vertex collections in the graph. + + Returns: + list: List of vertex collection names. + + Raises: + VertexCollectionListError: If the operation fails. + + References: + - `list-vertex-collections `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint=f"/_api/gharial/{self._name}/vertex", + ) + + def response_handler(resp: Response) -> List[str]: + if not resp.is_success: + raise VertexCollectionListError(resp, request) + body = self.deserializer.loads(resp.raw_body) + return list(sorted(body["collections"])) + + return await self._executor.execute(request, response_handler) + + async def has_vertex_collection(self, name: str) -> Result[bool]: + """Check if the graph has the given vertex collection. + + Args: + name (str): Vertex collection mame. + + Returns: + bool: `True` if the graph has the vertex collection, `False` otherwise. + + Raises: + VertexCollectionListError: If the operation fails. + """ + request = Request( + method=Method.GET, + endpoint=f"/_api/gharial/{self._name}/vertex", + ) + + def response_handler(resp: Response) -> bool: + if not resp.is_success: + raise VertexCollectionListError(resp, request) + body = self.deserializer.loads(resp.raw_body) + return name in body["collections"] + + return await self._executor.execute(request, response_handler) + + async def create_vertex_collection( + self, + name: str, + options: Optional[VertexCollectionOptions | Json] = None, + ) -> Result[VertexCollection[T, U, V]]: + """Create a vertex collection in the graph. + + Args: + name (str): Vertex collection name. + options (dict | VertexCollectionOptions | None): Extra options for + creating vertex collections. + + Returns: + VertexCollection: Vertex collection API wrapper. + + Raises: + VertexCollectionCreateError: If the operation fails. + + References: + - `add-a-vertex-collection `__ + """ # noqa: E501 + data: Json = {"collection": name} + + if options is not None: + if isinstance(options, VertexCollectionOptions): + data["options"] = options.to_dict() + else: + data["options"] = options + + request = Request( + method=Method.POST, + endpoint=f"/_api/gharial/{self._name}/vertex", + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> VertexCollection[T, U, V]: + if not resp.is_success: + raise VertexCollectionCreateError(resp, request) + return self.vertex_collection(name) + + return await self._executor.execute(request, response_handler) + + async def delete_vertex_collection(self, name: str, purge: bool = False) -> None: + """Remove a vertex collection from the graph. + + Args: + name (str): Vertex collection name. + purge (bool): If set to `True`, the vertex collection is not just deleted + from the graph but also from the database completely. Note that you + cannot remove vertex collections that are used in one of the edge + definitions of the graph. + + Raises: + VertexCollectionDeleteError: If the operation fails. + + References: + - `remove-a-vertex-collection `__ + """ # noqa: E501 + request = Request( + method=Method.DELETE, + endpoint=f"/_api/gharial/{self._name}/vertex/{name}", + params={"dropCollection": purge}, + ) + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise VertexCollectionDeleteError(resp, request) + + await self._executor.execute(request, response_handler) + + async def has_vertex( + self, + vertex: str | Json, + allow_dirty_read: bool = False, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + ) -> Result[bool]: + """Check if the vertex exists in the graph. + + Args: + vertex (str | dict): Document ID, key or body. + Document body must contain the "_id" or "_key" field. + allow_dirty_read (bool): Allow reads from followers in a cluster. + if_match (str | None): The document is returned, if it has the same + revision as the given ETag. + if_none_match (str | None): The document is returned, if it has a + different revision than the given ETag. + + Returns: + `True` if the document exists, `False` otherwise. + + Raises: + DocumentRevisionError: If the revision is incorrect. + DocumentGetError: If retrieval fails. + """ # noqa: E501 + col = Collection.get_col_name(vertex) + return await self.vertex_collection(col).has( + vertex, + allow_dirty_read=allow_dirty_read, + if_match=if_match, + if_none_match=if_none_match, + ) + + async def vertex( + self, + vertex: str | Json, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + ) -> Result[Optional[Json]]: + """Return a vertex document. + + Args: + vertex (str | dict): Document ID, key or body. + Document body must contain the "_id" or "_key" field. + if_match (str | None): The document is returned, if it has the same + revision as the given ETag. + if_none_match (str | None): The document is returned, if it has a + different revision than the given ETag. + + Returns: + Document or `None` if not found. + + Raises: + DocumentRevisionError: If the revision is incorrect. + DocumentGetError: If retrieval fails. + DocumentParseError: If the document is malformed. + + References: + - `get-a-vertex `__ + """ # noqa: E501 + col = Collection.get_col_name(vertex) + return await self.vertex_collection(col).get( + vertex, + if_match=if_match, + if_none_match=if_none_match, + ) + + async def insert_vertex( + self, + collection: str, + vertex: T, + wait_for_sync: Optional[bool] = None, + return_new: Optional[bool] = None, + ) -> Result[Json]: + """Insert a new vertex document. + + Args: + collection (str): Name of the vertex collection to insert the document into. + vertex (dict): Document to insert. If it contains the "_key" or "_id" + field, the value is used as the key of the new document (otherwise + it is auto-generated). Any "_rev" field is ignored. + wait_for_sync (bool | None): Wait until document has been synced to disk. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` is specified, the result contains the document + metadata in the "vertex" field and the new document in the "new" field. + + Raises: + DocumentInsertError: If insertion fails. + DocumentParseError: If the document is malformed. + + References: + - `create-a-vertex `__ + """ # noqa: E501 + return await self.vertex_collection(collection).insert( + vertex, + wait_for_sync=wait_for_sync, + return_new=return_new, + ) + + async def update_vertex( + self, + vertex: T, + wait_for_sync: Optional[bool] = None, + keep_null: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[Json]: + """Update a vertex in the graph. + + Args: + vertex (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field. + wait_for_sync (bool | None): Wait until document has been synced to disk. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally update a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` or "return_old" are specified, the result contains + the document metadata in the "vertex" field and two additional fields + ("new" and "old"). + + Raises: + DocumentUpdateError: If update fails. + + References: + - `update-a-vertex `__ + """ # noqa: E501 + col = Collection.get_col_name(cast(Json | str, vertex)) + return await self.vertex_collection(col).update( + vertex, + wait_for_sync=wait_for_sync, + keep_null=keep_null, + return_new=return_new, + return_old=return_old, + if_match=if_match, + ) + + async def replace_vertex( + self, + vertex: T, + wait_for_sync: Optional[bool] = None, + keep_null: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[Json]: + """Replace a vertex in the graph. + + Args: + vertex (dict): New document. It must contain the "_key" or "_id" field. + wait_for_sync (bool | None): Wait until document has been synced to disk. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` or "return_old" are specified, the result contains + the document metadata in the "vertex" field and two additional fields + ("new" and "old"). + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentReplaceError: If replace fails. + + References: + - `replace-a-vertex `__ + """ # noqa: E501 + col = Collection.get_col_name(cast(Json | str, vertex)) + return await self.vertex_collection(col).replace( + vertex, + wait_for_sync=wait_for_sync, + keep_null=keep_null, + return_new=return_new, + return_old=return_old, + if_match=if_match, + ) + + async def delete_vertex( + self, + vertex: T, + ignore_missing: bool = False, + wait_for_sync: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[bool | Json]: + """Delete a vertex in the graph. + + Args: + vertex (dict): Document ID, key or body. The body must contain the + "_key" or "_id" field. + ignore_missing (bool): Do not raise an exception on missing document. + wait_for_sync (bool | None): Wait until operation has been synced to disk. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + bool | dict: `True` if vertex was deleted successfully, `False` if vertex + was not found and **ignore_missing** was set to `True` (does not apply + in transactions). Old document is returned if **return_old** is set + to `True`. + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentDeleteError: If deletion fails. + + References: + - `remove-a-vertex `__ + """ # noqa: E501 + col = Collection.get_col_name(cast(Json | str, vertex)) + return await self.vertex_collection(col).delete( + vertex, + ignore_missing=ignore_missing, + wait_for_sync=wait_for_sync, + return_old=return_old, + if_match=if_match, + ) + + def edge_collection(self, name: str) -> EdgeCollection[T, U, V]: + """Returns the edge collection API wrapper. + + Args: + name (str): Edge collection name. + + Returns: + EdgeCollection: Edge collection API wrapper. + """ + return EdgeCollection[T, U, V]( + executor=self._executor, + graph=self._name, + name=name, + doc_serializer=self._doc_serializer, + doc_deserializer=self._doc_deserializer, + ) + + async def edge_definitions(self) -> Result[Jsons]: + """Return the edge definitions from the graph. + + Returns: + list: List of edge definitions. + + Raises: + EdgeDefinitionListError: If the operation fails. + """ + request = Request(method=Method.GET, endpoint=f"/_api/gharial/{self._name}") + + def response_handler(resp: Response) -> Jsons: + if not resp.is_success: + raise EdgeDefinitionListError(resp, request) + body = self.deserializer.loads(resp.raw_body) + properties = GraphProperties(body["graph"]) + edge_definitions = properties.format( + GraphProperties.compatibility_formatter + )["edge_definitions"] + return cast(Jsons, edge_definitions) + + return await self._executor.execute(request, response_handler) + + async def has_edge_definition(self, name: str) -> Result[bool]: + """Check if the graph has the given edge definition. + + Returns: + bool: `True` if the graph has the edge definitions, `False` otherwise. + + Raises: + EdgeDefinitionListError: If the operation fails. + """ + request = Request(method=Method.GET, endpoint=f"/_api/gharial/{self._name}") + + def response_handler(resp: Response) -> bool: + if not resp.is_success: + raise EdgeDefinitionListError(resp, request) + body = self.deserializer.loads(resp.raw_body) + return any( + edge_definition["collection"] == name + for edge_definition in body["graph"]["edgeDefinitions"] + ) + + return await self._executor.execute(request, response_handler) + + async def edge_collections(self) -> Result[List[str]]: + """Get the names of all edge collections in the graph. + + Returns: + list: List of edge collection names. + + Raises: + EdgeCollectionListError: If the operation fails. + + References: + - `list-edge-collections `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint=f"/_api/gharial/{self._name}/edge", + ) + + def response_handler(resp: Response) -> List[str]: + if not resp.is_success: + raise EdgeCollectionListError(resp, request) + body = self.deserializer.loads(resp.raw_body) + return list(sorted(body["collections"])) + + return await self._executor.execute(request, response_handler) + + async def create_edge_definition( + self, + edge_collection: str, + from_vertex_collections: Sequence[str], + to_vertex_collections: Sequence[str], + options: Optional[EdgeDefinitionOptions | Json] = None, + ) -> Result[EdgeCollection[T, U, V]]: + """Create an edge definition in the graph. + + This edge definition has to contain a collection and an array of each from + and to vertex collections. + + .. code-block:: python + + { + "edge_collection": "edge_collection_name", + "from_vertex_collections": ["from_vertex_collection_name"], + "to_vertex_collections": ["to_vertex_collection_name"] + } + + Args: + edge_collection (str): Edge collection name. + from_vertex_collections (list): List of vertex collections + that can be used as the "from" vertex in edges. + to_vertex_collections (list): List of vertex collections + that can be used as the "to" vertex in edges. + options (dict | EdgeDefinitionOptions | None): Extra options for + creating edge definitions. + + Returns: + EdgeCollection: Edge collection API wrapper. + + Raises: + EdgeDefinitionCreateError: If the operation fails. + + References: + - `add-an-edge-definition `__ + """ # noqa: E501 + data: Json = { + "collection": edge_collection, + "from": from_vertex_collections, + "to": to_vertex_collections, + } + + if options is not None: + if isinstance(options, VertexCollectionOptions): + data["options"] = options.to_dict() + else: + data["options"] = options + + request = Request( + method=Method.POST, + endpoint=f"/_api/gharial/{self._name}/edge", + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> EdgeCollection[T, U, V]: + if not resp.is_success: + raise EdgeDefinitionCreateError(resp, request) + return self.edge_collection(edge_collection) + + return await self._executor.execute(request, response_handler) + + async def replace_edge_definition( + self, + edge_collection: str, + from_vertex_collections: Sequence[str], + to_vertex_collections: Sequence[str], + options: Optional[EdgeDefinitionOptions | Json] = None, + wait_for_sync: Optional[bool] = None, + drop_collections: Optional[bool] = None, + ) -> Result[EdgeCollection[T, U, V]]: + """Replace an edge definition. + + Args: + edge_collection (str): Edge collection name. + from_vertex_collections (list): Names of "from" vertex collections. + to_vertex_collections (list): Names of "to" vertex collections. + options (dict | EdgeDefinitionOptions | None): Extra options for + modifying collections withing this edge definition. + wait_for_sync (bool | None): If set to `True`, the operation waits for + data to be synced to disk before returning. + drop_collections (bool | None): Drop the edge collection in addition to + removing it from the graph. The collection is only dropped if it is + not used in other graphs. + + Returns: + EdgeCollection: API wrapper. + + Raises: + EdgeDefinitionReplaceError: If the operation fails. + + References: + - `replace-an-edge-definition `__ + """ # noqa: E501 + data: Json = { + "collection": edge_collection, + "from": from_vertex_collections, + "to": to_vertex_collections, + } + if options is not None: + if isinstance(options, VertexCollectionOptions): + data["options"] = options.to_dict() + else: + data["options"] = options + + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if drop_collections is not None: + params["dropCollections"] = drop_collections + + request = Request( + method=Method.PUT, + endpoint=f"/_api/gharial/{self._name}/edge/{edge_collection}", + data=self.serializer.dumps(data), + params=params, + ) + + def response_handler(resp: Response) -> EdgeCollection[T, U, V]: + if resp.is_success: + return self.edge_collection(edge_collection) + raise EdgeDefinitionReplaceError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def delete_edge_definition( + self, + name: str, + drop_collections: Optional[bool] = None, + wait_for_sync: Optional[bool] = None, + ) -> None: + """Delete an edge definition from the graph. + + Args: + name (str): Edge collection name. + drop_collections (bool | None): If set to `True`, the edge definition is not + just removed from the graph but the edge collection is also deleted + completely from the database. + wait_for_sync (bool | None): If set to `True`, the operation waits for + changes to be synced to disk before returning. + + Raises: + EdgeDefinitionDeleteError: If the operation fails. + + References: + - `remove-an-edge-definition `__ + """ # noqa: E501 + params: Params = {} + if drop_collections is not None: + params["dropCollections"] = drop_collections + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + + request = Request( + method=Method.DELETE, + endpoint=f"/_api/gharial/{self._name}/edge/{name}", + params=params, + ) + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise EdgeDefinitionDeleteError(resp, request) + + await self._executor.execute(request, response_handler) + + async def has_edge( + self, + edge: str | Json, + allow_dirty_read: bool = False, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + ) -> Result[bool]: + """Check if the edge exists in the graph. + + Args: + edge (str | dict): Document ID, key or body. + Document body must contain the "_id" or "_key" field. + allow_dirty_read (bool): Allow reads from followers in a cluster. + if_match (str | None): The document is returned, if it has the same + revision as the given ETag. + if_none_match (str | None): The document is returned, if it has a + different revision than the given ETag. + + Returns: + `True` if the document exists, `False` otherwise. + + Raises: + DocumentRevisionError: If the revision is incorrect. + DocumentGetError: If retrieval fails. + """ # noqa: E501 + col = Collection.get_col_name(edge) + return await self.edge_collection(col).has( + edge, + allow_dirty_read=allow_dirty_read, + if_match=if_match, + if_none_match=if_none_match, + ) + + async def edge( + self, + edge: str | Json, + rev: Optional[str] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + ) -> Result[Optional[Json]]: + """Return an edge from the graph. + + Args: + edge (str | dict): Document ID, key or body. + Document body must contain the "_id" or "_key" field. + rev (str | None): If this is set a document is only returned if it + has exactly this revision. + if_match (str | None): The document is returned, if it has the same + revision as the given ETag. + if_none_match (str | None): The document is returned, if it has a + different revision than the given ETag. + + Returns: + dict | None: Document or `None` if not found. + + Raises: + DocumentRevisionError: If the revision is incorrect. + DocumentGetError: If retrieval fails. + DocumentParseError: If the document is malformed. + + References: + - `get-an-edge `__ + """ # noqa: E501 + col = Collection.get_col_name(edge) + return await self.edge_collection(col).get( + edge, + rev=rev, + if_match=if_match, + if_none_match=if_none_match, + ) + + async def insert_edge( + self, + collection: str, + edge: T, + wait_for_sync: Optional[bool] = None, + return_new: Optional[bool] = None, + ) -> Result[Json]: + """Insert a new edge document. + + Args: + collection (str): Name of the vertex collection to insert the document into. + edge (dict): Document to insert. It must contain "_from" and + "_to" fields. If it contains the "_key" or "_id" + field, the value is used as the key of the new document (otherwise + it is auto-generated). Any "_rev" field is ignored. + wait_for_sync (bool | None): Wait until document has been synced to disk. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` is specified, the result contains the document + metadata in the "edge" field and the new document in the "new" field. + + Raises: + DocumentInsertError: If insertion fails. + DocumentParseError: If the document is malformed. + + References: + - `create-an-edge `__ + """ # noqa: E501 + return await self.edge_collection(collection).insert( + edge, + wait_for_sync=wait_for_sync, + return_new=return_new, + ) + + async def update_edge( + self, + edge: T, + wait_for_sync: Optional[bool] = None, + keep_null: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[Json]: + """Update a vertex in the graph. + + Args: + edge (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field, along with "_from" and + "_to" fields. + wait_for_sync (bool | None): Wait until document has been synced to disk. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally update a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` or "return_old" are specified, the result contains + the document metadata in the "edge" field and two additional fields + ("new" and "old"). + + Raises: + DocumentUpdateError: If update fails. + + References: + - `update-an-edge `__ + """ # noqa: E501 + col = Collection.get_col_name(cast(Json | str, edge)) + return await self.edge_collection(col).update( + edge, + wait_for_sync=wait_for_sync, + keep_null=keep_null, + return_new=return_new, + return_old=return_old, + if_match=if_match, + ) + + async def replace_edge( + self, + edge: T, + wait_for_sync: Optional[bool] = None, + keep_null: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[Json]: + """Replace an edge in the graph. + + Args: + edge (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field, along with "_from" and + "_to" fields. + wait_for_sync (bool | None): Wait until document has been synced to disk. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` or "return_old" are specified, the result contains + the document metadata in the "edge" field and two additional fields + ("new" and "old"). + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentReplaceError: If replace fails. + + References: + - `replace-an-edge `__ + """ # noqa: E501 + col = Collection.get_col_name(cast(Json | str, edge)) + return await self.edge_collection(col).replace( + edge, + wait_for_sync=wait_for_sync, + keep_null=keep_null, + return_new=return_new, + return_old=return_old, + if_match=if_match, + ) + + async def delete_edge( + self, + edge: T, + ignore_missing: bool = False, + wait_for_sync: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[bool | Json]: + """Delete an edge from the graph. + + Args: + edge (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field, along with "_from" and + "_to" fields. + ignore_missing (bool): Do not raise an exception on missing document. + wait_for_sync (bool | None): Wait until operation has been synced to disk. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + bool | dict: `True` if vertex was deleted successfully, `False` if vertex + was not found and **ignore_missing** was set to `True` (does not apply + in transactions). Old document is returned if **return_old** is set + to `True`. + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentDeleteError: If deletion fails. + + References: + - `remove-an-edge `__ + """ # noqa: E501 + col = Collection.get_col_name(cast(Json | str, edge)) + return await self.edge_collection(col).delete( + edge, + ignore_missing=ignore_missing, + wait_for_sync=wait_for_sync, + return_old=return_old, + if_match=if_match, + ) + + async def edges( + self, + collection: str, + vertex: str | Json, + direction: Optional[Literal["in", "out"]] = None, + allow_dirty_read: Optional[bool] = None, + ) -> Result[Json]: + """Return the edges starting or ending at the specified vertex. + + Args: + collection (str): Name of the edge collection to return edges from. + vertex (str | dict): Document ID, key or body. + direction (str | None): Direction of the edges to return. Selects `in` + or `out` direction for edges. If not set, any edges are returned. + allow_dirty_read (bool | None): Allow reads from followers in a cluster. + + Returns: + dict: List of edges and statistics. + + Raises: + EdgeListError: If retrieval fails. + + References: + - `get-inbound-and-outbound-edges `__ + """ # noqa: E501 + return await self.edge_collection(collection).edges( + vertex, + direction=direction, + allow_dirty_read=allow_dirty_read, + ) + + async def link( + self, + collection: str, + from_vertex: str | Json, + to_vertex: str | Json, + data: Optional[Json] = None, + wait_for_sync: Optional[bool] = None, + return_new: bool = False, + ) -> Result[Json]: + """Insert a new edge document linking the given vertices. + + Args: + collection (str): Name of the collection to insert the edge into. + from_vertex (str | dict): "_from" vertex document ID or body with "_id" + field. + to_vertex (str | dict): "_to" vertex document ID or body with "_id" field. + data (dict | None): Any extra data for the new edge document. If it has + "_key" or "_id" field, its value is used as key of the new edge document + (otherwise it is auto-generated). + wait_for_sync (bool | None): Wait until operation has been synced to disk. + return_new: Optional[bool]: Additionally return the complete new document + under the attribute `new` in the result. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` is specified, the result contains the document + metadata in the "edge" field and the new document in the "new" field. + + Raises: + DocumentInsertError: If insertion fails. + DocumentParseError: If the document is malformed. + """ + return await self.edge_collection(collection).link( + from_vertex, + to_vertex, + data=data, + wait_for_sync=wait_for_sync, + return_new=return_new, + ) diff --git a/arangoasync/typings.py b/arangoasync/typings.py index 86c32fd..280e27e 100644 --- a/arangoasync/typings.py +++ b/arangoasync/typings.py @@ -1692,6 +1692,32 @@ def __init__(self, data: Json) -> None: def name(self) -> str: return cast(str, self._data["name"]) + @property + def is_smart(self) -> bool: + """Check if the graph is a smart graph.""" + return cast(bool, self._data.get("isSmart", False)) + + @property + def is_satellite(self) -> bool: + """Check if the graph is a satellite graph.""" + return cast(bool, self._data.get("isSatellite", False)) + + @property + def number_of_shards(self) -> Optional[int]: + return cast(Optional[int], self._data.get("numberOfShards")) + + @property + def replication_factor(self) -> Optional[int | str]: + return cast(Optional[int | str], self._data.get("replicationFactor")) + + @property + def min_replication_factor(self) -> Optional[int]: + return cast(Optional[int], self._data.get("minReplicationFactor")) + + @property + def write_concern(self) -> Optional[int]: + return cast(Optional[int], self._data.get("writeConcern")) + @property def edge_definitions(self) -> Jsons: return cast(Jsons, self._data.get("edgeDefinitions", list())) @@ -1700,6 +1726,47 @@ def edge_definitions(self) -> Jsons: def orphan_collections(self) -> List[str]: return cast(List[str], self._data.get("orphanCollections", list())) + @staticmethod + def compatibility_formatter(data: Json) -> Json: + result: Json = {} + + if "_id" in data: + result["id"] = data["_id"] + if "_key" in data: + result["key"] = data["_key"] + if "name" in data: + result["name"] = data["name"] + if "_rev" in data: + result["revision"] = data["_rev"] + if "orphanCollections" in data: + result["orphan_collection"] = data["orphanCollections"] + if "edgeDefinitions" in data: + result["edge_definitions"] = [ + { + "edge_collection": edge_definition["collection"], + "from_vertex_collections": edge_definition["from"], + "to_vertex_collections": edge_definition["to"], + } + for edge_definition in data["edgeDefinitions"] + ] + if "isSmart" in data: + result["smart"] = data["isSmart"] + if "isDisjoint" in data: + result["disjoint"] = data["isDisjoint"] + if "isSatellite" in data: + result["is_satellite"] = data["isSatellite"] + if "smartGraphAttribute" in data: + result["smart_field"] = data["smartGraphAttribute"] + if "numberOfShards" in data: + result["shard_count"] = data["numberOfShards"] + if "replicationFactor" in data: + result["replication_factor"] = data["replicationFactor"] + if "minReplicationFactor" in data: + result["min_replication_factor"] = data["minReplicationFactor"] + if "writeConcern" in data: + result["write_concern"] = data["writeConcern"] + return result + class GraphOptions(JsonWrapper): """Special options for graph creation. @@ -1720,15 +1787,18 @@ class GraphOptions(JsonWrapper): Enterprise Edition. write_concern (int | None): The write concern for new collections in the graph. + + References: + - `create-a-graph `__ """ # noqa: E501 def __init__( self, - number_of_shards: Optional[int], - replication_factor: Optional[int | str], - satellites: Optional[List[str]], - smart_graph_attribute: Optional[str], - write_concern: Optional[int], + number_of_shards: Optional[int] = None, + replication_factor: Optional[int | str] = None, + satellites: Optional[List[str]] = None, + smart_graph_attribute: Optional[str] = None, + write_concern: Optional[int] = None, ) -> None: data: Json = dict() if number_of_shards is not None: @@ -1762,3 +1832,57 @@ def smart_graph_attribute(self) -> Optional[str]: @property def write_concern(self) -> Optional[int]: return cast(Optional[int], self._data.get("writeConcern")) + + +class VertexCollectionOptions(JsonWrapper): + """Special options for vertex collection creation. + + Args: + satellites (list): An array of collection names that is used to create + SatelliteCollections for a (Disjoint) SmartGraph using + SatelliteCollections (Enterprise Edition only). Each array element must + be a string and a valid collection name. + + References: + - `add-a-vertex-collection `__ + """ # noqa: E501 + + def __init__( + self, + satellites: Optional[List[str]] = None, + ) -> None: + data: Json = dict() + if satellites is not None: + data["satellites"] = satellites + super().__init__(data) + + @property + def satellites(self) -> Optional[List[str]]: + return cast(Optional[List[str]], self._data.get("satellites")) + + +class EdgeDefinitionOptions(JsonWrapper): + """Special options for edge definition creation. + + Args: + satellites (list): An array of collection names that is used to create + SatelliteCollections for a (Disjoint) SmartGraph using + SatelliteCollections (Enterprise Edition only). Each array element must + be a string and a valid collection name. + + References: + - `add-an-edge-definition `__ + """ # noqa: E501 + + def __init__( + self, + satellites: Optional[List[str]] = None, + ) -> None: + data: Json = dict() + if satellites is not None: + data["satellites"] = satellites + super().__init__(data) + + @property + def satellites(self) -> Optional[List[str]]: + return cast(Optional[List[str]], self._data.get("satellites")) diff --git a/docs/collection.rst b/docs/collection.rst index e6a846f..8dd3928 100644 --- a/docs/collection.rst +++ b/docs/collection.rst @@ -6,8 +6,10 @@ by its name which must consist only of hyphen, underscore and alphanumeric characters. There are three types of collections in python-arango: * **Standard Collection:** contains regular documents. -* **Vertex Collection:** contains vertex documents for graphs (not supported yet). -* **Edge Collection:** contains edge documents for graphs (not supported yet). +* **Vertex Collection:** contains vertex documents for graphs. See + :ref:`here ` for more details. +* **Edge Collection:** contains edge documents for graphs. See + :ref:`here ` for more details. Here is an example showing how you can manage standard collections: diff --git a/docs/document.rst b/docs/document.rst index ff9121e..571507e 100644 --- a/docs/document.rst +++ b/docs/document.rst @@ -42,6 +42,26 @@ collection: "friends": ["robin", "gordon"] } +.. _edge-documents: + +**Edge documents (edges)** are similar to standard documents but with two +additional required fields ``_from`` and ``_to``. Values of these fields must +be the handles of "from" and "to" vertex documents linked by the edge document +in question (see :doc:`graph` for details). Edge documents are contained in +:ref:`edge collections `. Here is an example of a valid edge +document in "friends" edge collection: + +.. code-block:: python + + { + "_id": "friends/001", + "_key": "001", + "_rev": "_Wm3d4le--_", + "_fro"': "students/john", + "_to": "students/jane", + "closeness": 9.5 + } + Standard documents are managed via collection API wrapper: .. code-block:: python diff --git a/docs/graph.rst b/docs/graph.rst new file mode 100644 index 0000000..0f0bbbf --- /dev/null +++ b/docs/graph.rst @@ -0,0 +1,415 @@ +Graphs +------ + +A **graph** consists of vertices and edges. Vertices are stored as documents in +:ref:`vertex collections ` and edges stored as documents in +:ref:`edge collections `. The collections used in a graph and +their relations are specified with :ref:`edge definitions `. +For more information, refer to `ArangoDB Manual`_. + +.. _ArangoDB Manual: https://docs.arangodb.com + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # List existing graphs in the database. + await db.graphs() + + # Create a new graph named "school" if it does not already exist. + # This returns an API wrapper for "school" graph. + if await db.has_graph("school"): + school = db.graph("school") + else: + school = await db.create_graph("school") + + # Retrieve various graph properties. + graph_name = school.name + db_name = school.db_name + vcols = await school.vertex_collections() + ecols = await school.edge_definitions() + + # Delete the graph. + await db.delete_graph("school") + +.. _edge-definitions: + +Edge Definitions +================ + +An **edge definition** specifies a directed relation in a graph. A graph can +have arbitrary number of edge definitions. Each edge definition consists of the +following components: + +* **From Vertex Collections:** contain "_from" vertices referencing "_to" vertices. +* **To Vertex Collections:** contain "_to" vertices referenced by "_from" vertices. +* **Edge Collection:** contains edges that link "_from" and "_to" vertices. + +Here is an example body of an edge definition: + +.. code-block:: python + + { + "edge_collection": "teach", + "from_vertex_collections": ["teachers"], + "to_vertex_collections": ["lectures"] + } + +Here is an example showing how edge definitions are managed: + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for graph "school". + if await db.has_graph("school"): + school = db.graph("school") + else: + school = await db.create_graph("school") + + # Create an edge definition named "teach". This creates any missing + # collections and returns an API wrapper for "teach" edge collection. + # At first, create a wrong teachers->teachers mapping intentionally. + if not await school.has_edge_definition("teach"): + await school.create_edge_definition( + edge_collection="teach", + from_vertex_collections=["teachers"], + to_vertex_collections=["teachers"] + ) + + # List edge definitions. + edge_defs = await school.edge_definitions() + + # Replace with the correct edge definition. + await school.replace_edge_definition( + edge_collection="teach", + from_vertex_collections=["teachers"], + to_vertex_collections=["lectures"] + ) + + # Delete the edge definition (and its collections). + await school.delete_edge_definition("teach", drop_collections=True) + +.. _vertex-collections: + +Vertex Collections +================== + +A **vertex collection** contains vertex documents, and shares its namespace +with all other types of collections. Each graph can have an arbitrary number of +vertex collections. Vertex collections that are not part of any edge definition +are called **orphan collections**. You can manage vertex documents via standard +collection API wrappers, but using vertex collection API wrappers provides +additional safeguards: + +* All modifications are executed in transactions. +* If a vertex is deleted, all connected edges are also automatically deleted. + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for graph "school". + school = db.graph("school") + + # Create a new vertex collection named "teachers" if it does not exist. + # This returns an API wrapper for "teachers" vertex collection. + if await school.has_vertex_collection("teachers"): + teachers = school.vertex_collection("teachers") + else: + teachers = await school.create_vertex_collection("teachers") + + # List vertex collections in the graph. + cols = await school.vertex_collections() + + # Vertex collections have similar interface as standard collections. + props = await teachers.properties() + await teachers.insert({"_key": "jon", "name": "Jon"}) + await teachers.update({"_key": "jon", "age": 35}) + await teachers.replace({"_key": "jon", "name": "Jon", "age": 36}) + await teachers.get("jon") + await teachers.has("jon") + await teachers.delete("jon") + +You can manage vertices via graph API wrappers also, but you must use document +IDs instead of keys where applicable. + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for graph "school". + school = db.graph("school") + + # Create a new vertex collection named "lectures" if it does not exist. + # This returns an API wrapper for "lectures" vertex collection. + if await school.has_vertex_collection("lectures"): + school.vertex_collection("lectures") + else: + await school.create_vertex_collection("lectures") + + # The "_id" field is required instead of "_key" field (except for insert). + await school.insert_vertex("lectures", {"_key": "CSC101"}) + await school.update_vertex({"_id": "lectures/CSC101", "difficulty": "easy"}) + await school.replace_vertex({"_id": "lectures/CSC101", "difficulty": "hard"}) + await school.has_vertex("lectures/CSC101") + await school.vertex("lectures/CSC101") + await school.delete_vertex("lectures/CSC101") + +See :class:`arangoasync.graph.Graph` and :class:`arangoasync.collection.VertexCollection` for API specification. + +.. _edge-collections: + +Edge Collections +================ + +An **edge collection** contains :ref:`edge documents `, and +shares its namespace with all other types of collections. You can manage edge +documents via standard collection API wrappers, but using edge collection API +wrappers provides additional safeguards: + +* All modifications are executed in transactions. +* Edge documents are checked against the edge definitions on insert. + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for graph "school". + if await db.has_graph("school"): + school = db.graph("school") + else: + school = await db.create_graph("school") + + if not await school.has_vertex_collection("lectures"): + await school.create_vertex_collection("lectures") + await school.insert_vertex("lectures", {"_key": "CSC101"}) + + if not await school.has_vertex_collection("teachers"): + await school.create_vertex_collection("teachers") + await school.insert_vertex("teachers", {"_key": "jon"}) + + # Get the API wrapper for edge collection "teach". + if await school.has_edge_definition("teach"): + teach = school.edge_collection("teach") + else: + teach = await school.create_edge_definition( + edge_collection="teach", + from_vertex_collections=["teachers"], + to_vertex_collections=["lectures"] + ) + + # Edge collections have a similar interface as standard collections. + await teach.insert({ + "_key": "jon-CSC101", + "_from": "teachers/jon", + "_to": "lectures/CSC101" + }) + await teach.replace({ + "_key": "jon-CSC101", + "_from": "teachers/jon", + "_to": "lectures/CSC101", + "online": False + }) + await teach.update({ + "_key": "jon-CSC101", + "online": True + }) + await teach.has("jon-CSC101") + await teach.get("jon-CSC101") + await teach.delete("jon-CSC101") + + # Create an edge between two vertices (essentially the same as insert). + await teach.link("teachers/jon", "lectures/CSC101", data={"online": False}) + + # List edges going in/out of a vertex. + inbound = await teach.edges("teachers/jon", direction="in") + outbound = await teach.edges("teachers/jon", direction="out") + +You can manage edges via graph API wrappers also, but you must use document +IDs instead of keys where applicable. + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for graph "school". + if await db.has_graph("school"): + school = db.graph("school") + else: + school = await db.create_graph("school") + + if not await school.has_vertex_collection("lectures"): + await school.create_vertex_collection("lectures") + await school.insert_vertex("lectures", {"_key": "CSC101"}) + + if not await school.has_vertex_collection("teachers"): + await school.create_vertex_collection("teachers") + await school.insert_vertex("teachers", {"_key": "jon"}) + + # Create the edge collection "teach". + if not await school.has_edge_definition("teach"): + await school.create_edge_definition( + edge_collection="teach", + from_vertex_collections=["teachers"], + to_vertex_collections=["lectures"] + ) + + # The "_id" field is required instead of "_key" field. + await school.insert_edge( + collection="teach", + edge={ + "_id": "teach/jon-CSC101", + "_from": "teachers/jon", + "_to": "lectures/CSC101" + } + ) + await school.replace_edge({ + "_id": "teach/jon-CSC101", + "_from": "teachers/jon", + "_to": "lectures/CSC101", + "online": False, + }) + await school.update_edge({ + "_id": "teach/jon-CSC101", + "online": True + }) + await school.has_edge("teach/jon-CSC101") + await school.edge("teach/jon-CSC101") + await school.delete_edge("teach/jon-CSC101") + await school.link("teach", "teachers/jon", "lectures/CSC101") + await school.edges("teach", "teachers/jon", direction="out") + +See :class:`arangoasync.graph.Graph` and :class:`arangoasync.graph.EdgeCollection` for API specification. + +.. _graph-traversals: + +Graph Traversals +================ + +**Graph traversals** are executed via AQL. +Each traversal can span across multiple vertex collections, and walk +over edges and vertices using various algorithms. + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for graph "school". + if await db.has_graph("school"): + school = db.graph("school") + else: + school = await db.create_graph("school") + + # Create vertex collections "lectures" and "teachers" if they do not exist. + if not await school.has_vertex_collection("lectures"): + await school.create_vertex_collection("lectures") + if not await school.has_vertex_collection("teachers"): + await school.create_vertex_collection("teachers") + + # Create the edge collection "teach". + if not await school.has_edge_definition("teach"): + await school.create_edge_definition( + edge_collection="teach", + from_vertex_collections=["teachers"], + to_vertex_collections=["lectures"] + ) + + # Get API wrappers for "from" and "to" vertex collections. + teachers = school.vertex_collection("teachers") + lectures = school.vertex_collection("lectures") + + # Get the API wrapper for the edge collection. + teach = school.edge_collection("teach") + + # Insert vertices into the graph. + await teachers.insert({"_key": "jon", "name": "Professor jon"}) + await lectures.insert({"_key": "CSC101", "name": "Introduction to CS"}) + await lectures.insert({"_key": "MAT223", "name": "Linear Algebra"}) + await lectures.insert({"_key": "STA201", "name": "Statistics"}) + + # Insert edges into the graph. + await teach.insert({"_from": "teachers/jon", "_to": "lectures/CSC101"}) + await teach.insert({"_from": "teachers/jon", "_to": "lectures/STA201"}) + await teach.insert({"_from": "teachers/jon", "_to": "lectures/MAT223"}) + + # AQL to perform a graph traversal. + # Traverse 1 to 3 hops from the vertex "teachers/jon", + query = """ + FOR v, e, p IN 1..3 OUTBOUND 'teachers/jon' GRAPH 'school' + OPTIONS { bfs: true, uniqueVertices: 'global' } + RETURN {vertex: v, edge: e, path: p} + """ + + # Traverse the graph in outbound direction, breath-first. + async with await db.aql.execute(query) as cursor: + async for lecture in cursor: + print(lecture) diff --git a/docs/index.rst b/docs/index.rst index 3252629..180c0ed 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -35,6 +35,7 @@ Contents collection indexes document + graph aql **Specialized Features** diff --git a/docs/overview.rst b/docs/overview.rst index 6f1f76a..f723234 100644 --- a/docs/overview.rst +++ b/docs/overview.rst @@ -39,7 +39,7 @@ Here is an example showing how **python-arango-async** client can be used: async for doc in cursor: student_names.append(doc["name"]) -You may also use the client without a context manager, but you must ensure to close the client when done: +You may also use the client without a context manager, but you must ensure to close the client when done. .. code-block:: python @@ -61,3 +61,65 @@ You may also use the client without a context manager, but you must ensure to cl # Close the client when done. await client.close() + +Another example with `graphs`_: + +.. _graphs: https://docs.arangodb.com/stable/graphs/ + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for graph "school". + if await db.has_graph("school"): + graph = db.graph("school") + else: + graph = await db.create_graph("school") + + # Create vertex collections for the graph. + students = await graph.create_vertex_collection("students") + lectures = await graph.create_vertex_collection("lectures") + + # Create an edge definition (relation) for the graph. + edges = await graph.create_edge_definition( + edge_collection="register", + from_vertex_collections=["students"], + to_vertex_collections=["lectures"] + ) + + # Insert vertex documents into "students" (from) vertex collection. + await students.insert({"_key": "01", "full_name": "Anna Smith"}) + await students.insert({"_key": "02", "full_name": "Jake Clark"}) + await students.insert({"_key": "03", "full_name": "Lisa Jones"}) + + # Insert vertex documents into "lectures" (to) vertex collection. + await lectures.insert({"_key": "MAT101", "title": "Calculus"}) + await lectures.insert({"_key": "STA101", "title": "Statistics"}) + await lectures.insert({"_key": "CSC101", "title": "Algorithms"}) + + # Insert edge documents into "register" edge collection. + await edges.insert({"_from": "students/01", "_to": "lectures/MAT101"}) + await edges.insert({"_from": "students/01", "_to": "lectures/STA101"}) + await edges.insert({"_from": "students/01", "_to": "lectures/CSC101"}) + await edges.insert({"_from": "students/02", "_to": "lectures/MAT101"}) + await edges.insert({"_from": "students/02", "_to": "lectures/STA101"}) + await edges.insert({"_from": "students/03", "_to": "lectures/CSC101"}) + + # Traverse the graph in outbound direction, breath-first. + query = """ + FOR v, e, p IN 1..3 OUTBOUND 'students/01' GRAPH 'school' + OPTIONS { bfs: true, uniqueVertices: 'global' } + RETURN {vertex: v, edge: e, path: p} + """ + + async with await db.aql.execute(query) as cursor: + async for doc in cursor: + print(doc) diff --git a/docs/serialization.rst b/docs/serialization.rst index 9fe520e..ed00702 100644 --- a/docs/serialization.rst +++ b/docs/serialization.rst @@ -80,6 +80,10 @@ that you are modeling your students data using Pydantic_. You want to be able to of a certain type, and also be able to read them back. More so, you would like to get multiple documents back using one of the formats provided by pandas_. +.. note:: + The driver assumes that the types support dictionary-like indexing, i.e. `doc["_id"]` + returns the id of the document. + **Example:** .. code-block:: python @@ -179,5 +183,7 @@ You would then use the custom serializer/deserializer when working with collecti students = await col.get_many(keys) assert type(students) == pd.DataFrame +See a full example in this `gist `__. + .. _Pydantic: https://docs.pydantic.dev/latest/ .. _pandas: https://pandas.pydata.org/ diff --git a/docs/specs.rst b/docs/specs.rst index dc92bd9..9983716 100644 --- a/docs/specs.rst +++ b/docs/specs.rst @@ -19,6 +19,9 @@ python-arango-async. .. automodule:: arangoasync.aql :members: +.. automodule:: arangoasync.graph + :members: + .. automodule:: arangoasync.job :members: diff --git a/tests/conftest.py b/tests/conftest.py index e91a591..98d75de 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,7 +8,12 @@ from arangoasync.auth import Auth, JwtToken from arangoasync.client import ArangoClient from arangoasync.typings import UserInfo -from tests.helpers import generate_col_name, generate_db_name, generate_username +from tests.helpers import ( + generate_col_name, + generate_db_name, + generate_graph_name, + generate_username, +) @dataclass @@ -19,6 +24,7 @@ class GlobalData: secret: str = None token: JwtToken = None sys_db_name: str = "_system" + graph_name: str = "test_graph" username: str = generate_username() cluster: bool = False enterprise: bool = False @@ -64,6 +70,7 @@ def pytest_configure(config): global_data.token = JwtToken.generate_token(global_data.secret) global_data.cluster = config.getoption("cluster") global_data.enterprise = config.getoption("enterprise") + global_data.graph_name = generate_graph_name() async def get_db_version(): async with ArangoClient(hosts=global_data.url) as client: @@ -215,6 +222,11 @@ async def bad_db(arango_client): ) +@pytest_asyncio.fixture +def bad_graph(bad_db): + return bad_db.graph(global_data.graph_name) + + @pytest_asyncio.fixture async def doc_col(db): col_name = generate_col_name() @@ -233,7 +245,7 @@ def db_version(): return global_data.db_version -@pytest_asyncio.fixture(scope="session", autouse=True) +@pytest_asyncio.fixture(autouse=True) async def teardown(): yield async with ArangoClient(hosts=global_data.url) as client: diff --git a/tests/helpers.py b/tests/helpers.py index cf8b3cb..8e91c26 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -19,6 +19,15 @@ def generate_col_name(): return f"test_collection_{uuid4().hex}" +def generate_graph_name(): + """Generate and return a random graph name. + + Returns: + str: Random graph name. + """ + return f"test_graph_{uuid4().hex}" + + def generate_username(): """Generate and return a random username. diff --git a/tests/test_graph.py b/tests/test_graph.py index 0967ff9..6d5fcbe 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -1,37 +1,404 @@ import pytest -from arangoasync.exceptions import GraphCreateError, GraphDeleteError, GraphListError +from arangoasync.exceptions import ( + DocumentDeleteError, + EdgeCollectionListError, + EdgeDefinitionDeleteError, + EdgeDefinitionListError, + EdgeDefinitionReplaceError, + EdgeListError, + GraphCreateError, + GraphDeleteError, + GraphListError, + GraphPropertiesError, + VertexCollectionCreateError, + VertexCollectionDeleteError, + VertexCollectionListError, +) +from arangoasync.typings import GraphOptions +from tests.helpers import generate_col_name, generate_graph_name @pytest.mark.asyncio async def test_graph_basic(db, bad_db): + graph1_name = generate_graph_name() # Test the graph representation - graph = db.graph("test_graph") - assert graph.name == "test_graph" - assert "test_graph" in repr(graph) + graph = db.graph(graph1_name) + assert graph.name == graph1_name + assert graph1_name in repr(graph) # Cannot find any graph + graph2_name = generate_graph_name() assert await db.graphs() == [] - assert await db.has_graph("fake_graph") is False + assert await db.has_graph(graph2_name) is False with pytest.raises(GraphListError): - await bad_db.has_graph("fake_graph") + await bad_db.has_graph(graph2_name) with pytest.raises(GraphListError): await bad_db.graphs() # Create a graph - graph = await db.create_graph("test_graph", wait_for_sync=True) - assert graph.name == "test_graph" + graph = await db.create_graph(graph1_name, wait_for_sync=True) + assert graph.name == graph1_name with pytest.raises(GraphCreateError): - await bad_db.create_graph("test_graph") + await bad_db.create_graph(graph1_name) # Check if the graph exists - assert await db.has_graph("test_graph") is True + assert await db.has_graph(graph1_name) is True graphs = await db.graphs() assert len(graphs) == 1 - assert graphs[0].name == "test_graph" + assert graphs[0].name == graph1_name # Delete the graph - await db.delete_graph("test_graph") - assert await db.has_graph("test_graph") is False + await db.delete_graph(graph1_name) + assert await db.has_graph(graph1_name) is False with pytest.raises(GraphDeleteError): - await bad_db.delete_graph("test_graph") + await bad_db.delete_graph(graph1_name) + + +@pytest.mark.asyncio +async def test_graph_properties(db, bad_graph, cluster, enterprise): + # Create a graph + name = generate_graph_name() + is_smart = cluster and enterprise + options = GraphOptions(number_of_shards=3) + graph = await db.create_graph(name, is_smart=is_smart, options=options) + + with pytest.raises(GraphPropertiesError): + await bad_graph.properties() + + # Create first vertex collection + vcol_name = generate_col_name() + vcol = await graph.create_vertex_collection(vcol_name) + assert vcol.name == vcol_name + + # Get the properties of the graph + properties = await graph.properties() + assert properties.name == name + assert properties.is_smart == is_smart + if cluster: + assert properties.number_of_shards == options.number_of_shards + assert properties.orphan_collections == [vcol_name] + + # Create second vertex collection + vcol2_name = generate_col_name() + vcol2 = await graph.create_vertex_collection(vcol2_name) + assert vcol2.name == vcol2_name + properties = await graph.properties() + assert len(properties.orphan_collections) == 2 + + # Create an edge definition + edge_name = generate_col_name() + edge_col = await graph.create_edge_definition( + edge_name, + from_vertex_collections=[vcol_name], + to_vertex_collections=[vcol2_name], + ) + assert edge_col.name == edge_name + + # There should be no more orphan collections + properties = await graph.properties() + assert len(properties.orphan_collections) == 0 + assert len(properties.edge_definitions) == 1 + assert properties.edge_definitions[0]["collection"] == edge_name + assert len(properties.edge_definitions[0]["from"]) == 1 + assert properties.edge_definitions[0]["from"][0] == vcol_name + assert len(properties.edge_definitions[0]["to"]) == 1 + assert properties.edge_definitions[0]["to"][0] == vcol2_name + + +@pytest.mark.asyncio +async def test_vertex_collections(db, docs, bad_graph): + # Test errors + with pytest.raises(VertexCollectionCreateError): + await bad_graph.create_vertex_collection("bad_col") + with pytest.raises(VertexCollectionListError): + await bad_graph.vertex_collections() + with pytest.raises(VertexCollectionListError): + await bad_graph.has_vertex_collection("bad_col") + with pytest.raises(VertexCollectionDeleteError): + await bad_graph.delete_vertex_collection("bad_col") + + # Create graph + graph = await db.create_graph(generate_graph_name()) + + # Create vertex collections + names = [generate_col_name() for _ in range(3)] + cols = [await graph.create_vertex_collection(name) for name in names] + + # List vertex collection + col_list = await graph.vertex_collections() + assert len(col_list) == 3 + for c in cols: + assert c.name in col_list + assert await graph.has_vertex_collection(c.name) + + # Delete collections + await graph.delete_vertex_collection(names[0]) + assert await graph.has_vertex_collection(names[0]) is False + + # Insert in both collections + v1_meta = await graph.insert_vertex(names[1], docs[0]) + v2_meta = await graph.insert_vertex(names[2], docs[1], return_new=True) + assert "new" in v2_meta + v2_meta = v2_meta["vertex"] + + # Get the vertex + v1 = await graph.vertex(v1_meta) + assert v1 is not None + assert v1["text"] == docs[0]["text"] + v2 = await graph.vertex(v2_meta["_id"]) + assert v2 is not None + v3 = await graph.vertex(f"{names[2]}/bad_id") + assert v3 is None + + # Update one vertex + v1["text"] = "updated_text" + v1_meta = await graph.update_vertex(v1, return_new=True) + assert "new" in v1_meta + assert "vertex" in v1_meta + v1 = await graph.vertex(v1_meta["vertex"]) + assert v1["text"] == "updated_text" + + # Replace the other vertex + v1["text"] = "replaced_text" + v1["additional"] = "data" + v1.pop("loc") + v1_meta = await graph.replace_vertex(v1, return_old=True, return_new=True) + assert "old" in v1_meta + assert "new" in v1_meta + assert "vertex" in v1_meta + v1 = await graph.vertex(v1_meta["vertex"]) + assert v1["text"] == "replaced_text" + assert "additional" in v1 + assert "loc" not in v1 + + # Delete a vertex + v1 = await graph.delete_vertex(v1["_id"], return_old=True) + assert "_id" in v1 + assert await graph.delete_vertex(v1["_id"], ignore_missing=True) is False + with pytest.raises(DocumentDeleteError): + assert await graph.delete_vertex(v1["_id"]) + + # Check has method + assert await graph.has_vertex(v1) is False + assert await graph.has_vertex(v2["_id"]) is True + + +@pytest.mark.asyncio +async def test_edge_collections(db, bad_graph): + # Test errors + with pytest.raises(EdgeDefinitionListError): + await bad_graph.edge_definitions() + with pytest.raises(EdgeDefinitionListError): + await bad_graph.has_edge_definition("bad_col") + with pytest.raises(EdgeCollectionListError): + await bad_graph.edge_collections() + with pytest.raises(EdgeDefinitionReplaceError): + await bad_graph.replace_edge_definition("foo", ["bar1"], ["bar2"]) + with pytest.raises(EdgeDefinitionDeleteError): + await bad_graph.delete_edge_definition("foo") + with pytest.raises(EdgeListError): + await bad_graph.edges("col", "foo") + + # Create full graph + name = generate_graph_name() + graph = await db.create_graph(name) + teachers_col_name = generate_col_name() + await db.create_collection(teachers_col_name) + await graph.create_vertex_collection(teachers_col_name) + students_col_name = generate_col_name() + await db.create_collection(students_col_name) + await graph.create_vertex_collection(students_col_name) + edge_col_name = generate_col_name() + edge_col = await graph.create_edge_definition( + edge_col_name, + from_vertex_collections=[teachers_col_name], + to_vertex_collections=[students_col_name], + ) + assert edge_col.name == edge_col_name + + # List edge definitions + edge_definitions = await graph.edge_definitions() + assert len(edge_definitions) == 1 + assert "edge_collection" in edge_definitions[0] + assert "from_vertex_collections" in edge_definitions[0] + assert "to_vertex_collections" in edge_definitions[0] + assert await graph.has_edge_definition(edge_col_name) is True + assert await graph.has_edge_definition("bad_edge") is False + + edge_cols = await graph.edge_collections() + assert len(edge_cols) == 1 + assert edge_col_name in edge_cols + + # Design the graph + teachers = [ + {"_key": "101", "name": "Mr. Smith"}, + {"_key": "102", "name": "Ms. Johnson"}, + {"_key": "103", "name": "Dr. Brown"}, + ] + students = [ + {"_key": "123", "name": "Alice"}, + {"_key": "456", "name": "Bob"}, + {"_key": "789", "name": "Charlie"}, + ] + edges = [ + { + "_from": f"{teachers_col_name}/101", + "_to": f"{students_col_name}/123", + "subject": "Math", + }, + { + "_from": f"{teachers_col_name}/102", + "_to": f"{students_col_name}/456", + "subject": "Science", + }, + { + "_from": f"{teachers_col_name}/103", + "_to": f"{students_col_name}/789", + "subject": "History", + }, + ] + + # Create an edge + edge_metas = [] + for idx in range(len(edges)): + await graph.insert_vertex(teachers_col_name, teachers[idx]) + await graph.insert_vertex(students_col_name, students[idx]) + edge_meta = await graph.insert_edge( + edge_col_name, + edges[0], + return_new=True, + ) + assert "new" in edge_meta + edge_metas.append(edge_meta) + + # Check for edge existence + edge_meta = edge_metas[0] + edge_id = edge_meta["new"]["_id"] + assert await graph.has_edge(edge_id) is True + assert await graph.has_edge(f"{edge_col_name}/bad_id") is False + edge = await graph.edge(edge_id) + assert edge is not None + + # Update an edge + edge["subject"] = "Advanced Math" + updated_edge_meta = await graph.update_edge(edge, return_new=True, return_old=True) + assert "new" in updated_edge_meta + assert "old" in updated_edge_meta + assert "edge" in updated_edge_meta + edge = await graph.edge(edge_id) + assert edge["subject"] == "Advanced Math" + + # Replace an edge + edge["subject"] = "Replaced Subject" + edge["extra_info"] = "Some additional data" + replaced_edge_meta = await graph.replace_edge( + edge, return_old=True, return_new=True + ) + assert "old" in replaced_edge_meta + assert "new" in replaced_edge_meta + assert "edge" in replaced_edge_meta + edge = await graph.edge(edge_id) + assert edge["subject"] == "Replaced Subject" + + # Delete the edge + deleted_edge = await graph.delete_edge(edge_id, return_old=True) + assert "_id" in deleted_edge + assert await graph.has_edge(edge_id) is False + + # Replace the edge definition + new_from_collections = [students_col_name] + new_to_collections = [teachers_col_name] + replaced_edge_col = await graph.replace_edge_definition( + edge_col_name, + from_vertex_collections=new_from_collections, + to_vertex_collections=new_to_collections, + ) + assert replaced_edge_col.name == edge_col_name + + # Verify the updated edge definition + edge_definitions = await graph.edge_definitions() + assert len(edge_definitions) == 1 + assert edge_definitions[0]["edge_collection"] == edge_col_name + assert edge_definitions[0]["from_vertex_collections"] == new_from_collections + assert edge_definitions[0]["to_vertex_collections"] == new_to_collections + + # Delete the edge definition + await graph.delete_edge_definition(edge_col_name) + assert await graph.has_edge_definition(edge_col_name) is False + + +@pytest.mark.asyncio +async def test_edge_links(db): + # Create full graph + name = generate_graph_name() + graph = await db.create_graph(name) + + # Teachers collection + teachers_col_name = generate_col_name() + await db.create_collection(teachers_col_name) + await graph.create_vertex_collection(teachers_col_name) + + # Students collection + students_col_name = generate_col_name() + await db.create_collection(students_col_name) + await graph.create_vertex_collection(students_col_name) + + # Edges + teachers_to_students = generate_col_name() + await graph.create_edge_definition( + teachers_to_students, + from_vertex_collections=[teachers_col_name], + to_vertex_collections=[students_col_name], + ) + students_to_students = generate_col_name() + await graph.create_edge_definition( + students_to_students, + from_vertex_collections=[teachers_col_name, students_col_name], + to_vertex_collections=[students_col_name], + ) + + # Populate the graph + teachers = [ + {"_key": "101", "name": "Mr. Smith"}, + {"_key": "102", "name": "Ms. Johnson"}, + {"_key": "103", "name": "Dr. Brown"}, + ] + students = [ + {"_key": "123", "name": "Alice"}, + {"_key": "456", "name": "Bob"}, + {"_key": "789", "name": "Charlie"}, + ] + + docs = [] + t = await graph.insert_vertex(teachers_col_name, teachers[0]) + s = await graph.insert_vertex(students_col_name, students[0]) + await graph.link(teachers_to_students, t, s, {"subject": "Math"}) + docs.append(s) + + t = await graph.insert_vertex(teachers_col_name, teachers[1]) + s = await graph.insert_vertex(students_col_name, students[1]) + await graph.link(teachers_to_students, t["_id"], s["_id"], {"subject": "Science"}) + docs.append(s) + + t = await graph.insert_vertex(teachers_col_name, teachers[2]) + s = await graph.insert_vertex(students_col_name, students[2]) + await graph.link(teachers_to_students, t, s, {"subject": "History"}) + docs.append(s) + + await graph.link(students_to_students, docs[0], docs[1], {"friendship": "close"}) + await graph.link(students_to_students, docs[1], docs[0], {"friendship": "close"}) + + edges = await graph.edges(students_to_students, docs[0]) + assert len(edges["edges"]) == 2 + assert "stats" in edges + + await graph.link(students_to_students, docs[2], docs[0], {"friendship": "close"}) + edges = await graph.edges(students_to_students, docs[0], direction="in") + assert len(edges["edges"]) == 2 + + edges = await graph.edges(students_to_students, docs[0], direction="out") + assert len(edges["edges"]) == 1 + + edges = await graph.edges(students_to_students, docs[0]) + assert len(edges["edges"]) == 3 diff --git a/tests/test_typings.py b/tests/test_typings.py index 7a40c33..fd04fa1 100644 --- a/tests/test_typings.py +++ b/tests/test_typings.py @@ -4,6 +4,7 @@ CollectionInfo, CollectionStatus, CollectionType, + EdgeDefinitionOptions, GraphOptions, GraphProperties, JsonWrapper, @@ -17,6 +18,7 @@ QueryProperties, QueryTrackingConfiguration, UserInfo, + VertexCollectionOptions, ) @@ -368,3 +370,19 @@ def test_GraphOptions(): assert graph_options.satellites == ["satellite1", "satellite2"] assert graph_options.smart_graph_attribute == "region" assert graph_options.write_concern == 1 + + +def test_VertexCollectionOptions(): + options = VertexCollectionOptions( + satellites=["col1", "col2"], + ) + + assert options.satellites == ["col1", "col2"] + + +def test_EdgeDefinitionOptions(): + options = EdgeDefinitionOptions( + satellites=["col1", "col2"], + ) + + assert options.satellites == ["col1", "col2"] From ce278946e6d66af492dd7e739152c9782be9ac20 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 1 Jun 2025 12:54:54 +0300 Subject: [PATCH 3/3] Bumping version number (#53) --- arangoasync/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/arangoasync/version.py b/arangoasync/version.py index 27fdca4..81f0fde 100644 --- a/arangoasync/version.py +++ b/arangoasync/version.py @@ -1 +1 @@ -__version__ = "0.0.3" +__version__ = "0.0.4"