diff --git a/README.md b/README.md index 507c3e9..ab24eae 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -![Logo](docs/static/logo.png) +![Logo](https://raw.githubusercontent.com/arangodb/python-arango-async/refs/heads/main/docs/static/logo.png) [![CircleCI](https://dl.circleci.com/status-badge/img/gh/arangodb/python-arango-async/tree/main.svg?style=svg)](https://dl.circleci.com/status-badge/redirect/gh/arangodb/python-arango-async/tree/main) [![CodeQL](https://github.com/arangodb/python-arango-async/actions/workflows/codeql.yaml/badge.svg)](https://github.com/arangodb/python-arango-async/actions/workflows/codeql.yaml) diff --git a/arangoasync/collection.py b/arangoasync/collection.py index c742714..c34c1aa 100644 --- a/arangoasync/collection.py +++ b/arangoasync/collection.py @@ -1653,7 +1653,7 @@ def response_handler(resp: Response) -> bool | Json: async def delete( self, - document: T, + document: str | T, ignore_revs: Optional[bool] = None, ignore_missing: bool = False, wait_for_sync: Optional[bool] = None, @@ -1665,7 +1665,7 @@ async def delete( """Delete a document. Args: - document (dict): Document ID, key or body. The body must contain the + document (str | dict): Document ID, key or body. The body must contain the "_key" or "_id" field. ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the document is ignored. If this is set to `False`, then the `_rev` @@ -1697,6 +1697,8 @@ async def delete( References: - `remove-a-document `__ """ # noqa: E501 + handle = self._get_doc_id(cast(str | Json, document)) + params: Params = {} if ignore_revs is not None: params["ignoreRevs"] = ignore_revs @@ -1715,7 +1717,7 @@ async def delete( request = Request( method=Method.DELETE, - endpoint=f"/_api/document/{self._extract_id(cast(Json, document))}", + endpoint=f"/_api/document/{handle}", params=params, headers=headers, ) diff --git a/arangoasync/database.py b/arangoasync/database.py index 3cac02d..998c6dd 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -10,7 +10,7 @@ from warnings import warn from arangoasync.aql import AQL -from arangoasync.collection import StandardCollection +from arangoasync.collection import Collection, StandardCollection from arangoasync.connection import Connection from arangoasync.errno import HTTP_FORBIDDEN, HTTP_NOT_FOUND from arangoasync.exceptions import ( @@ -46,6 +46,13 @@ UserListError, UserReplaceError, UserUpdateError, + ViewCreateError, + ViewDeleteError, + ViewGetError, + ViewListError, + ViewRenameError, + ViewReplaceError, + ViewUpdateError, ) from arangoasync.executor import ( ApiExecutor, @@ -684,6 +691,351 @@ def response_handler(resp: Response) -> bool: return await self._executor.execute(request, response_handler) + async def has_document( + self, + document: str | Json, + allow_dirty_read: bool = False, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + ) -> Result[bool]: + """Check if a document exists. + + Args: + document (str | dict): Document ID, key or body. + Document body must contain the "_id" field. + allow_dirty_read (bool): Allow reads from followers in a cluster. + if_match (str | None): The document is returned, if it has the same + revision as the given ETag. + if_none_match (str | None): The document is returned, if it has a + different revision than the given ETag. + + Returns: + `True` if the document exists, `False` otherwise. + + Raises: + DocumentRevisionError: If the revision is incorrect. + DocumentGetError: If retrieval fails. + + References: + - `get-a-document-header `__ + """ # noqa: E501 + col = Collection.get_col_name(document) + return await self.collection(col).has( + document, + allow_dirty_read=allow_dirty_read, + if_match=if_match, + if_none_match=if_none_match, + ) + + async def document( + self, + document: str | Json, + allow_dirty_read: bool = False, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + ) -> Result[Optional[Json]]: + """Return a document. + + Args: + document (str | dict): Document ID, key or body. + Document body must contain the "_id" field. + allow_dirty_read (bool): Allow reads from followers in a cluster. + if_match (str | None): The document is returned, if it has the same + revision as the given ETag. + if_none_match (str | None): The document is returned, if it has a + different revision than the given ETag. + + Returns: + Document or `None` if not found. + + Raises: + DocumentRevisionError: If the revision is incorrect. + DocumentGetError: If retrieval fails. + DocumentParseError: If the document is malformed. + + References: + - `get-a-document `__ + """ # noqa: E501 + col: StandardCollection[Json, Json, Jsons] = self.collection( + Collection.get_col_name(document) + ) + return await col.get( + document, + allow_dirty_read=allow_dirty_read, + if_match=if_match, + if_none_match=if_none_match, + ) + + async def insert_document( + self, + collection: str, + document: Json, + wait_for_sync: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + silent: Optional[bool] = None, + overwrite: Optional[bool] = None, + overwrite_mode: Optional[str] = None, + keep_null: Optional[bool] = None, + merge_objects: Optional[bool] = None, + refill_index_caches: Optional[bool] = None, + version_attribute: Optional[str] = None, + ) -> Result[bool | Json]: + """Insert a new document. + + Args: + collection (str): Collection name. + document (dict): Document to insert. If it contains the "_key" or "_id" + field, the value is used as the key of the new document (otherwise + it is auto-generated). Any "_rev" field is ignored. + wait_for_sync (bool | None): Wait until document has been synced to disk. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. Only available if the + `overwrite` option is used. + silent (bool | None): If set to `True`, no document metadata is returned. + This can be used to save resources. + overwrite (bool | None): If set to `True`, operation does not fail on + duplicate key and existing document is overwritten (replace-insert). + overwrite_mode (str | None): Overwrite mode. Supersedes **overwrite** + option. May be one of "ignore", "replace", "update" or "conflict". + keep_null (bool | None): If set to `True`, fields with value None are + retained in the document. Otherwise, they are removed completely. + Applies only when **overwrite_mode** is set to "update" + (update-insert). + merge_objects (bool | None): If set to `True`, sub-dictionaries are merged + instead of the new one overwriting the old one. Applies only when + **overwrite_mode** is set to "update" (update-insert). + refill_index_caches (bool | None): Whether to add new entries to + in-memory index caches if document insertions affect the edge index + or cache-enabled persistent indexes. + version_attribute (str | None): Support for simple external versioning to + document operations. Only applicable if **overwrite** is set to `True` + or **overwrite_mode** is set to "update" or "replace". + + Returns: + bool | dict: Document metadata (e.g. document id, key, revision) or `True` + if **silent** is set to `True`. + + Raises: + DocumentInsertError: If insertion fails. + DocumentParseError: If the document is malformed. + + References: + - `create-a-document `__ + """ # noqa: E501 + col: StandardCollection[Json, Json, Jsons] = self.collection(collection) + return await col.insert( + document, + wait_for_sync=wait_for_sync, + return_new=return_new, + return_old=return_old, + silent=silent, + overwrite=overwrite, + overwrite_mode=overwrite_mode, + keep_null=keep_null, + merge_objects=merge_objects, + refill_index_caches=refill_index_caches, + version_attribute=version_attribute, + ) + + async def update_document( + self, + document: Json, + ignore_revs: Optional[bool] = None, + wait_for_sync: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + silent: Optional[bool] = None, + keep_null: Optional[bool] = None, + merge_objects: Optional[bool] = None, + refill_index_caches: Optional[bool] = None, + version_attribute: Optional[str] = None, + if_match: Optional[str] = None, + ) -> Result[bool | Json]: + """Update a document. + + Args: + document (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field. + ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the + document is ignored. If this is set to `False`, then the `_rev` + attribute given in the body document is taken as a precondition. + The document is only updated if the current revision is the one + specified. + wait_for_sync (bool | None): Wait until document has been synced to disk. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + silent (bool | None): If set to `True`, no document metadata is returned. + This can be used to save resources. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. + merge_objects (bool | None): Controls whether objects (not arrays) are + merged if present in both the existing and the patch document. + If set to `False`, the value in the patch document overwrites the + existing document’s value. If set to `True`, objects are merged. + refill_index_caches (bool | None): Whether to add new entries to + in-memory index caches if document updates affect the edge index + or cache-enabled persistent indexes. + version_attribute (str | None): Support for simple external versioning to + document operations. + if_match (str | None): You can conditionally update a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + bool | dict: Document metadata (e.g. document id, key, revision) or `True` + if **silent** is set to `True`. + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentUpdateError: If update fails. + + References: + - `update-a-document `__ + """ # noqa: E501 + col: StandardCollection[Json, Json, Jsons] = self.collection( + Collection.get_col_name(document) + ) + return await col.update( + document, + ignore_revs=ignore_revs, + wait_for_sync=wait_for_sync, + return_new=return_new, + return_old=return_old, + silent=silent, + keep_null=keep_null, + merge_objects=merge_objects, + refill_index_caches=refill_index_caches, + version_attribute=version_attribute, + if_match=if_match, + ) + + async def replace_document( + self, + document: Json, + ignore_revs: Optional[bool] = None, + wait_for_sync: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + silent: Optional[bool] = None, + refill_index_caches: Optional[bool] = None, + version_attribute: Optional[str] = None, + if_match: Optional[str] = None, + ) -> Result[bool | Json]: + """Replace a document. + + Args: + document (dict): New document. It must contain the "_key" or "_id" field. + Edge document must also have "_from" and "_to" fields. + ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the + document is ignored. If this is set to `False`, then the `_rev` + attribute given in the body document is taken as a precondition. + The document is only replaced if the current revision is the one + specified. + wait_for_sync (bool | None): Wait until document has been synced to disk. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + silent (bool | None): If set to `True`, no document metadata is returned. + This can be used to save resources. + refill_index_caches (bool | None): Whether to add new entries to + in-memory index caches if document updates affect the edge index + or cache-enabled persistent indexes. + version_attribute (str | None): Support for simple external versioning to + document operations. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + bool | dict: Document metadata (e.g. document id, key, revision) or `True` + if **silent** is set to `True`. + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentReplaceError: If replace fails. + + References: + - `replace-a-document `__ + """ # noqa: E501 + col: StandardCollection[Json, Json, Jsons] = self.collection( + Collection.get_col_name(document) + ) + return await col.replace( + document, + ignore_revs=ignore_revs, + wait_for_sync=wait_for_sync, + return_new=return_new, + return_old=return_old, + silent=silent, + refill_index_caches=refill_index_caches, + version_attribute=version_attribute, + if_match=if_match, + ) + + async def delete_document( + self, + document: str | Json, + ignore_revs: Optional[bool] = None, + ignore_missing: bool = False, + wait_for_sync: Optional[bool] = None, + return_old: Optional[bool] = None, + silent: Optional[bool] = None, + refill_index_caches: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[bool | Json]: + """Delete a document. + + Args: + document (str | dict): Document ID, key or body. The body must contain the + "_key" or "_id" field. + ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the + document is ignored. If this is set to `False`, then the `_rev` + attribute given in the body document is taken as a precondition. + The document is only replaced if the current revision is the one + specified. + ignore_missing (bool): Do not raise an exception on missing document. + This parameter has no effect in transactions where an exception is + always raised on failures. + wait_for_sync (bool | None): Wait until operation has been synced to disk. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + silent (bool | None): If set to `True`, no document metadata is returned. + This can be used to save resources. + refill_index_caches (bool | None): Whether to add new entries to + in-memory index caches if document updates affect the edge index + or cache-enabled persistent indexes. + if_match (bool | None): You can conditionally remove a document based + on a target revision id by using the "if-match" HTTP header. + + Returns: + bool | dict: Document metadata (e.g. document id, key, revision) or `True` + if **silent** is set to `True` and the document was found. + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentDeleteError: If deletion fails. + + References: + - `remove-a-document `__ + """ # noqa: E501 + col: StandardCollection[Json, Json, Jsons] = self.collection( + Collection.get_col_name(document) + ) + return await col.delete( + document, + ignore_revs=ignore_revs, + ignore_missing=ignore_missing, + wait_for_sync=wait_for_sync, + return_old=return_old, + silent=silent, + refill_index_caches=refill_index_caches, + if_match=if_match, + ) + def graph( self, name: str, @@ -878,6 +1230,237 @@ def response_handler(resp: Response) -> bool: return await self._executor.execute(request, response_handler) + async def view(self, name: str) -> Result[Json]: + """Return the properties of a view. + + Args: + name (str): View name. + + Returns: + dict: View properties. + + Raises: + ViewGetError: If the operation fails. + + References: + - `read-properties-of-a-view `__ + - `get-the-properties-of-a-view `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint=f"/_api/view/{name}/properties") + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ViewGetError(resp, request) + return self.deserializer.loads(resp.raw_body) + + return await self._executor.execute(request, response_handler) + + async def view_info(self, name: str) -> Result[Json]: + """Return basic information about a specific view. + + Args: + name (str): View name. + + Returns: + dict: View information. + + Raises: + ViewGetError: If the operation fails. + + References: + - `get-information-about-a-view `_ + - `get-information-about-a-view `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint=f"/_api/view/{name}") + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ViewGetError(resp, request) + return self.deserializer.loads(resp.raw_body) + + return await self._executor.execute(request, response_handler) + + async def views(self) -> Result[Jsons]: + """List all views in the database along with their summary information. + + Returns: + list: List of views with their properties. + + Raises: + ViewListError: If the operation fails. + + References: + - `list-all-views `__ + - `list-all-views `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint="/_api/view") + + def response_handler(resp: Response) -> Jsons: + if not resp.is_success: + raise ViewListError(resp, request) + body = self.deserializer.loads(resp.raw_body) + return cast(Jsons, body["result"]) + + return await self._executor.execute(request, response_handler) + + async def create_view( + self, + name: str, + view_type: str, + properties: Optional[Json] = None, + ) -> Result[Json]: + """Create a view. + + Args: + name (str): View name. + view_type (str): Type of the view (e.g., "arangosearch", "view"). + properties (dict | None): Properties of the view. + + Returns: + dict: View properties. + + Raises: + ViewCreateError: If the operation fails. + + References: + - `create-a-search-alias-view `__ + - `create-an-arangosearch-view `__ + """ # noqa: E501 + data: Json = {"name": name, "type": view_type} + if properties is not None: + data.update(properties) + + request = Request( + method=Method.POST, + endpoint="/_api/view", + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ViewCreateError(resp, request) + return self.deserializer.loads(resp.raw_body) + + return await self._executor.execute(request, response_handler) + + async def replace_view(self, name: str, properties: Json) -> Result[Json]: + """Replace the properties of an existing view. + + Args: + name (str): View name. + properties (dict): New properties for the view. + + Returns: + dict: Updated view properties. + + Raises: + ViewReplaceError: If the operation fails. + + References: + - `replace-the-properties-of-a-search-alias-view `__ + - `replace-the-properties-of-an-arangosearch-view `__ + """ # noqa: E501 + request = Request( + method=Method.PUT, + endpoint=f"/_api/view/{name}/properties", + data=self.serializer.dumps(properties), + ) + + def response_handler(resp: Response) -> Json: + if resp.is_success: + return self.deserializer.loads(resp.raw_body) + raise ViewReplaceError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def update_view(self, name: str, properties: Json) -> Result[Json]: + """Update the properties of an existing view. + + Args: + name (str): View name. + properties (dict): New properties for the view. + + Returns: + dict: Updated view properties. + + Raises: + ViewUpdateError: If the operation fails. + + References: + - `update-the-properties-of-a-search-alias-view `__ + - `update-the-properties-of-an-arangosearch-view `__ + """ # noqa: E501 + request = Request( + method=Method.PATCH, + endpoint=f"/_api/view/{name}/properties", + data=self.serializer.dumps(properties), + ) + + def response_handler(resp: Response) -> Json: + if resp.is_success: + return self.deserializer.loads(resp.raw_body) + raise ViewUpdateError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def rename_view(self, name: str, new_name: str) -> None: + """Rename an existing view (not supported in cluster deployments). + + Args: + name (str): Current view name. + new_name (str): New view name. + + Raises: + ViewRenameError: If the operation fails. + + References: + - `rename-a-view `__ + - `rename-a-view `__ + """ # noqa: E501 + request = Request( + method=Method.PUT, + endpoint=f"/_api/view/{name}/rename", + data=self.serializer.dumps({"name": new_name}), + ) + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise ViewRenameError(resp, request) + + await self._executor.execute(request, response_handler) + + async def delete_view( + self, name: str, ignore_missing: bool = False + ) -> Result[bool]: + """Delete a view. + + Args: + name (str): View name. + ignore_missing (bool): If `True`, do not raise an exception if the + view does not exist. + + Returns: + bool: `True` if the view was deleted successfully, `False` if the + view was not found and **ignore_missing** was set to `True`. + + Raises: + ViewDeleteError: If the operation fails. + + References: + - `drop-a-view `__ + - `drop-a-view `__ + """ # noqa: E501 + request = Request(method=Method.DELETE, endpoint=f"/_api/view/{name}") + + def response_handler(resp: Response) -> bool: + if resp.is_success: + return True + if resp.status_code == HTTP_NOT_FOUND and ignore_missing: + return False + raise ViewDeleteError(resp, request) + + return await self._executor.execute(request, response_handler) + async def has_user(self, username: str) -> Result[bool]: """Check if a user exists. diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index c4ee40a..4e46d06 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -429,3 +429,31 @@ class VertexCollectionDeleteError(ArangoServerError): class VertexCollectionListError(ArangoServerError): """Failed to retrieve vertex collections.""" + + +class ViewCreateError(ArangoServerError): + """Failed to create view.""" + + +class ViewDeleteError(ArangoServerError): + """Failed to delete view.""" + + +class ViewGetError(ArangoServerError): + """Failed to retrieve view details.""" + + +class ViewListError(ArangoServerError): + """Failed to retrieve views.""" + + +class ViewRenameError(ArangoServerError): + """Failed to rename view.""" + + +class ViewReplaceError(ArangoServerError): + """Failed to replace view.""" + + +class ViewUpdateError(ArangoServerError): + """Failed to update view.""" diff --git a/docs/document.rst b/docs/document.rst index 571507e..c0764e8 100644 --- a/docs/document.rst +++ b/docs/document.rst @@ -150,4 +150,54 @@ Standard documents are managed via collection API wrapper: # Delete one or more matching documents. await students.delete_match({"first": "Emma"}) +You can manage documents via database API wrappers also, but only simple +operations (i.e. get, insert, update, replace, delete) are supported and you +must provide document IDs instead of keys: + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Create a new collection named "students" if it does not exist. + if not await db.has_collection("students"): + await db.create_collection("students") + + # Create some test documents to play around with. + # The documents must have the "_id" field instead. + lola = {"_id": "students/lola", "GPA": 3.5} + abby = {"_id": "students/abby", "GPA": 3.2} + john = {"_id": "students/john", "GPA": 3.6} + emma = {"_id": "students/emma", "GPA": 4.0} + + # Insert a new document. + metadata = await db.insert_document("students", lola) + assert metadata["_id"] == "students/lola" + assert metadata["_key"] == "lola" + + # Check if a document exists. + assert await db.has_document(lola) is True + + # Get a document (by ID or body with "_id" field). + await db.document("students/lola") + await db.document(abby) + + # Update a document. + lola["GPA"] = 3.6 + await db.update_document(lola) + + # Replace a document. + lola["GPA"] = 3.4 + await db.replace_document(lola) + + # Delete a document (by ID or body with "_id" field). + await db.delete_document("students/lola") + See :class:`arangoasync.database.StandardDatabase` and :class:`arangoasync.collection.StandardCollection` for API specification. diff --git a/docs/index.rst b/docs/index.rst index 180c0ed..f30ed6e 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -44,6 +44,7 @@ Contents :maxdepth: 1 transaction + view **API Executions** diff --git a/docs/view.rst b/docs/view.rst new file mode 100644 index 0000000..f680b54 --- /dev/null +++ b/docs/view.rst @@ -0,0 +1,69 @@ +Views +----- + +All types of views are supported. . For more information on **view** +management, refer to `ArangoDB Manual`_. + +.. _ArangoDB Manual: https://docs.arangodb.com + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Retrieve list of views. + await db.views() + + # Create a view. + await db.create_view( + name="foo", + view_type="arangosearch", + properties={ + "cleanupIntervalStep": 0, + "consolidationIntervalMsec": 0 + } + ) + + # Rename a view (not supported in cluster deployments). + await db.rename_view("foo", "bar") + + # Retrieve view properties. + await db.view("bar") + + # Retrieve view summary. + await db.view_info("bar") + + # Partially update view properties. + await db.update_view( + name="bar", + properties={ + "cleanupIntervalStep": 1000, + "consolidationIntervalMsec": 200 + } + ) + + # Replace view properties. Unspecified ones are reset to default. + await db.replace_view( + name="bar", + properties={"cleanupIntervalStep": 2000} + ) + + # Delete a view. + await db.delete_view("bar") + +For more information on the content of view **properties**, +see `Search Alias Views`_ and `Arangosearch Views`_. + +.. _Search Alias Views: https://docs.arangodb.com/stable/develop/http-api/views/search-alias-views/ +.. _Arangosearch Views: https://docs.arangodb.com/stable/develop/http-api/views/arangosearch-views/ + +Refer to :class:`arangoasync.database.StandardDatabase` class for API specification. diff --git a/starter.sh b/starter.sh old mode 100644 new mode 100755 index be1778a..3eef281 --- a/starter.sh +++ b/starter.sh @@ -6,7 +6,7 @@ # Usage: # ./starter.sh [single|cluster] [community|enterprise] [version] # Example: -# ./starter.sh cluster enterprise 3.11.4 +# ./starter.sh cluster enterprise 3.12.4 setup="${1:-single}" license="${2:-community}" diff --git a/tests/helpers.py b/tests/helpers.py index 8e91c26..b961064 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -44,3 +44,12 @@ def generate_string(): str: Random unique string. """ return uuid4().hex + + +def generate_view_name(): + """Generate and return a random view name. + + Returns: + str: Random view name. + """ + return f"test_view_{uuid4().hex}" diff --git a/tests/test_document.py b/tests/test_document.py index fbfd2b3..741ec34 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -566,3 +566,51 @@ async def test_document_delete_match(doc_col, bad_col, docs): await doc_col.insert_many(docs) count = await doc_col.delete_match({"text": "no_matching"}) assert count == 0 + + +@pytest.mark.asyncio +async def test_document_db_operations(db, bad_db, doc_col, docs): + # Insert a document through the collection API + doc = await doc_col.insert(docs[0]) + + # Check if the document exists in the database + assert await db.has_document(doc) is True + assert await db.has_document({"_id": "missing_col/missing_doc"}) is False + assert await db.has_document("missing_doc") is False + with pytest.raises(DocumentGetError): + await bad_db.has_document(doc) + + # Get the document + doc2 = await db.document(doc["_id"]) + assert doc2["_id"] == doc["_id"] + with pytest.raises(DocumentGetError): + await bad_db.document(doc["_id"]) + + # Insert a new document + doc = await db.insert_document(doc_col.name, docs[1]) + assert doc["_id"] == f"{doc_col.name}/{doc['_key']}" + with pytest.raises(DocumentInsertError): + await bad_db.insert_document(doc_col.name, docs[2]) + + # Update the document + doc["val"] = 100 + updated_doc = await db.update_document(doc, return_new=True) + assert updated_doc["_id"] == doc["_id"] + assert updated_doc["new"]["val"] == 100 + with pytest.raises(DocumentUpdateError): + await bad_db.update_document(doc) + + # Replace the document + doc["val"] = 200 + replaced_doc = await db.replace_document(doc, return_new=True) + assert replaced_doc["_id"] == doc["_id"] + assert replaced_doc["new"]["val"] == 200 + with pytest.raises(DocumentReplaceError): + await bad_db.replace_document(doc) + + # Delete the document + deleted_doc = await db.delete_document(doc["_id"], return_old=True) + assert deleted_doc["_id"] == doc["_id"] + assert deleted_doc["old"]["val"] == 200 + with pytest.raises(DocumentDeleteError): + await bad_db.delete_document(doc) diff --git a/tests/test_view.py b/tests/test_view.py new file mode 100644 index 0000000..80b2388 --- /dev/null +++ b/tests/test_view.py @@ -0,0 +1,137 @@ +import pytest + +from arangoasync import errno +from arangoasync.exceptions import ( + ViewCreateError, + ViewDeleteError, + ViewGetError, + ViewListError, + ViewRenameError, + ViewReplaceError, + ViewUpdateError, +) +from tests.helpers import generate_view_name + + +@pytest.mark.asyncio +async def test_view_management(db, bad_db, doc_col, cluster): + # Create a view + view_name = generate_view_name() + bad_view_name = generate_view_name() + view_type = "arangosearch" + + result = await db.create_view( + view_name, + view_type, + {"consolidationIntervalMsec": 50000, "links": {doc_col.name: {}}}, + ) + assert "id" in result + assert result["name"] == view_name + assert result["type"] == view_type + assert result["consolidationIntervalMsec"] == 50000 + assert doc_col.name in result["links"] + + # Create view with bad database + with pytest.raises(ViewCreateError): + await bad_db.create_view( + view_name, + view_type, + {"consolidationIntervalMsec": 50000, "links": {doc_col.name: {}}}, + ) + + view_id = result["id"] + + # Test create duplicate view + with pytest.raises(ViewCreateError) as err: + await db.create_view(view_name, view_type, {"consolidationIntervalMsec": 50000}) + assert err.value.error_code == errno.DUPLICATE_NAME + + # Test get view (properties) + view = await db.view(view_name) + assert view["id"] == view_id + assert view["name"] == view_name + assert view["type"] == view_type + assert view["consolidationIntervalMsec"] == 50000 + + # Test get missing view + with pytest.raises(ViewGetError) as err: + await db.view(bad_view_name) + assert err.value.error_code == errno.DATA_SOURCE_NOT_FOUND + + # Test get view info + view_info = await db.view_info(view_name) + assert view_info["id"] == view_id + assert view_info["name"] == view_name + assert view_info["type"] == view_type + assert "consolidationIntervalMsec" not in view_info + with pytest.raises(ViewGetError) as err: + await db.view_info(bad_view_name) + assert err.value.error_code == errno.DATA_SOURCE_NOT_FOUND + + # Test list views + result = await db.views() + assert len(result) == 1 + view = result[0] + assert view["id"] == view_id + assert view["name"] == view_name + assert view["type"] == view_type + + # Test list views with bad database + with pytest.raises(ViewListError) as err: + await bad_db.views() + assert err.value.error_code == errno.FORBIDDEN + + # Test replace view + view = await db.replace_view(view_name, {"consolidationIntervalMsec": 40000}) + assert view["id"] == view_id + assert view["name"] == view_name + assert view["type"] == view_type + assert view["consolidationIntervalMsec"] == 40000 + + # Test replace view with bad database + with pytest.raises(ViewReplaceError) as err: + await bad_db.replace_view(view_name, {"consolidationIntervalMsec": 7000}) + assert err.value.error_code == errno.FORBIDDEN + + # Test update view + view = await db.update_view(view_name, {"consolidationIntervalMsec": 70000}) + assert view["id"] == view_id + assert view["name"] == view_name + assert view["type"] == view_type + assert view["consolidationIntervalMsec"] == 70000 + + # Test update view with bad database + with pytest.raises(ViewUpdateError) as err: + await bad_db.update_view(view_name, {"consolidationIntervalMsec": 80000}) + assert err.value.error_code == errno.FORBIDDEN + + # Test rename view + new_view_name = generate_view_name() + if cluster: + with pytest.raises(ViewRenameError): + await db.rename_view(view_name, new_view_name) + new_view_name = view_name + else: + await db.rename_view(view_name, new_view_name) + result = await db.views() + assert len(result) == 1 + view = result[0] + assert view["id"] == view_id + assert view["name"] == new_view_name + + # Test rename missing view + with pytest.raises(ViewRenameError) as err: + await db.rename_view(bad_view_name, view_name) + assert err.value.error_code == errno.DATA_SOURCE_NOT_FOUND + + # Test delete view + assert await db.delete_view(new_view_name) is True + assert len(await db.views()) == 0 + + # Test delete missing view + with pytest.raises(ViewDeleteError) as err: + await db.delete_view(new_view_name) + assert err.value.error_code == errno.DATA_SOURCE_NOT_FOUND + + # Test delete missing view with ignore_missing set to True + assert await db.delete_view(view_name, ignore_missing=True) is False